4949use core:: mem;
5050use stack:: Stack ;
5151use arch:: StackPointer ;
52+ use unwind;
5253
5354pub const STACK_ALIGNMENT : usize = 16 ;
5455
@@ -127,8 +128,9 @@ pub unsafe fn init(stack: &Stack, f: unsafe extern "C" fn(usize, StackPointer))
127128 # trampoline_2.
128129 nop
129130
130- # Call the provided function.
131- callq *16(%rsp)
131+ # Call unwind_wrapper with the provided function.
132+ movq 16(%rsp), %rdx
133+ call ${0:c}
132134
133135 # Clear the stack pointer. We can't call into this context any more once
134136 # the function has returned.
@@ -150,7 +152,7 @@ pub unsafe fn init(stack: &Stack, f: unsafe extern "C" fn(usize, StackPointer))
150152 .cfi_register %rip, %rax
151153 jmpq *%rax
152154 "#
153- : : : : "volatile" )
155+ : : "s" ( unwind :: unwind_wrapper as usize ) : : "volatile" )
154156 }
155157
156158 // We set up the stack in a somewhat special way so that to the unwinder it
@@ -315,3 +317,66 @@ pub unsafe fn swap(arg: usize, new_sp: StackPointer) -> (usize, StackPointer) {
315317 : "volatile" , "alignstack" ) ;
316318 ( ret, mem:: transmute ( ret_sp) )
317319}
320+
321+
322+ #[ inline( always) ]
323+ pub unsafe fn unwind ( new_sp : StackPointer , new_stack : & Stack ) {
324+ // Address of the topmost CFA stack slot.
325+ let new_cfa = ( new_stack. base ( ) as * mut usize ) . offset ( -4 ) ;
326+
327+ #[ naked]
328+ unsafe extern "C" fn trampoline ( ) {
329+ asm ! (
330+ r#"
331+ # Save frame pointer explicitly; the unwinder uses it to find CFA of
332+ # the caller, and so it has to have the correct value immediately after
333+ # the call instruction that invoked the trampoline.
334+ pushq %rbp
335+ .cfi_adjust_cfa_offset 8
336+ .cfi_rel_offset %rbp, 0
337+
338+ # Link the call stacks together by writing the current stack bottom
339+ # address to the CFA slot in the new stack.
340+ movq %rsp, (%rcx)
341+
342+ # Load stack pointer of the new context.
343+ movq %rdx, %rsp
344+
345+ # Restore frame pointer of the new context.
346+ popq %rbp
347+ .cfi_adjust_cfa_offset -8
348+ .cfi_restore %rbp
349+
350+ # Jump to the unwind function, which will force a stack unwind in the
351+ # target context. This will eventually return to our caller through the
352+ # stack link.
353+ jmp ${0:c}
354+ "#
355+ : : "s" ( unwind:: force_unwind as usize ) : : "volatile" )
356+ }
357+
358+ asm ! (
359+ r#"
360+ # Push instruction pointer of the old context and switch to
361+ # the new context.
362+ call ${0:c}
363+ "#
364+ :
365+ : "s" ( trampoline as usize )
366+ "{rdx}" ( new_sp. 0 )
367+ "{rcx}" ( new_cfa)
368+ : "rax" , "rbx" , "rcx" , "rdx" , "rsi" , "rdi" , /*"rbp", "rsp",*/
369+ "r8" , "r9" , "r10" , "r11" , "r12" , "r13" , "r14" , "r15" ,
370+ "mm0" , "mm1" , "mm2" , "mm3" , "mm4" , "mm5" , "mm6" , "mm7" ,
371+ "xmm0" , "xmm1" , "xmm2" , "xmm3" , "xmm4" , "xmm5" , "xmm6" , "xmm7" ,
372+ "xmm8" , "xmm9" , "xmm10" , "xmm11" , "xmm12" , "xmm13" , "xmm14" , "xmm15" ,
373+ "xmm16" , "xmm17" , "xmm18" , "xmm19" , "xmm20" , "xmm21" , "xmm22" , "xmm23" ,
374+ "xmm24" , "xmm25" , "xmm26" , "xmm27" , "xmm28" , "xmm29" , "xmm30" , "xmm31" ,
375+ "cc" , "dirflag" , "fpsr" , "flags" , "memory"
376+ // Ideally, we would set the LLVM "noredzone" attribute on this function
377+ // (and it would be propagated to the call site). Unfortunately, rustc
378+ // provides no such functionality. Fortunately, by a lucky coincidence,
379+ // the "alignstack" LLVM inline assembly option does exactly the same
380+ // thing on x86_64.
381+ : "volatile" , "alignstack" ) ;
382+ }
0 commit comments