|
43 | 43 |
|
44 | 44 | #![cfg(not(windows))] // Windows already has builtins to do this
|
45 | 45 |
|
| 46 | +#[cfg(all(any(target_arch = "x86_64", target_arch = "x86"), not(target_vendor = "apple")))] |
46 | 47 | extern "C" {
|
47 | 48 | pub fn __rust_probestack();
|
48 | 49 | }
|
49 | 50 |
|
50 | 51 | #[naked]
|
51 | 52 | #[no_mangle]
|
52 |
| -#[cfg(all(target_arch = "x86_64", not(feature = "mangled-names")))] |
| 53 | +#[cfg(all(target_arch = "x86_64", not(target_vendor = "apple"), not(feature = "mangled-names")))] |
53 | 54 | pub unsafe extern "C" fn __rust_probestack_wrapper() {
|
54 | 55 | // Our goal here is to touch each page between %rsp+8 and %rsp+8-%rax,
|
55 | 56 | // ensuring that if any pages are unmapped we'll make a page fault.
|
@@ -128,7 +129,7 @@ pub unsafe extern "C" fn __rust_probestack_wrapper() {
|
128 | 129 |
|
129 | 130 | #[naked]
|
130 | 131 | #[no_mangle]
|
131 |
| -#[cfg(all(target_arch = "x86", not(feature = "mangled-names")))] |
| 132 | +#[cfg(all(target_arch = "x86", not(target_vendor = "apple"), not(feature = "mangled-names")))] |
132 | 133 | pub unsafe extern "C" fn __rust_probestack_wrapper() {
|
133 | 134 | // This is the same as x86_64 above, only translated for 32-bit sizes. Note
|
134 | 135 | // that on Unix we're expected to restore everything as it was, this
|
@@ -179,3 +180,73 @@ pub unsafe extern "C" fn __rust_probestack_wrapper() {
|
179 | 180 | " ::: "memory" : "volatile");
|
180 | 181 | ::core::intrinsics::unreachable();
|
181 | 182 | }
|
| 183 | + |
| 184 | +#[naked] |
| 185 | +#[no_mangle] |
| 186 | +#[cfg(all(target_arch = "x86_64", target_vendor = "apple", not(feature = "mangled-names")))] |
| 187 | +pub unsafe extern "C" fn __rust_probestack() { |
| 188 | + // Same as above, but without the CFI tricks. The assembler for Apple |
| 189 | + // devices doesn't support the directives we were using to define |
| 190 | + // __rust_probestack. |
| 191 | + asm!(" |
| 192 | + pushq %rbp |
| 193 | + movq %rsp, %rbp |
| 194 | +
|
| 195 | + mov %rax,%r11 |
| 196 | +
|
| 197 | + cmp $$0x1000,%r11 |
| 198 | + jna 3f |
| 199 | + 2: |
| 200 | + sub $$0x1000,%rsp |
| 201 | + test %rsp,8(%rsp) |
| 202 | + sub $$0x1000,%r11 |
| 203 | + cmp $$0x1000,%r11 |
| 204 | + ja 2b |
| 205 | +
|
| 206 | + 3: |
| 207 | + sub %r11,%rsp |
| 208 | + test %rsp,8(%rsp) |
| 209 | +
|
| 210 | + add %rax,%rsp |
| 211 | +
|
| 212 | + leave |
| 213 | + ret |
| 214 | + " ::: "memory" : "volatile"); |
| 215 | + ::core::intrinsics::unreachable(); |
| 216 | +} |
| 217 | + |
| 218 | +#[naked] |
| 219 | +#[no_mangle] |
| 220 | +#[cfg(all(target_arch = "x86", target_vendor = "apple", not(feature = "mangled-names")))] |
| 221 | +pub unsafe extern "C" fn __rust_probestack() { |
| 222 | + // This is the same as x86_64 above, only translated for 32-bit sizes. Note |
| 223 | + // that on Unix we're expected to restore everything as it was, this |
| 224 | + // function basically can't tamper with anything. |
| 225 | + // |
| 226 | + // The ABI here is the same as x86_64, except everything is 32-bits large. |
| 227 | + asm!(" |
| 228 | + push %ebp |
| 229 | + mov %esp, %ebp |
| 230 | + push %ecx |
| 231 | + mov %eax,%ecx |
| 232 | +
|
| 233 | + cmp $$0x1000,%ecx |
| 234 | + jna 3f |
| 235 | + 2: |
| 236 | + sub $$0x1000,%esp |
| 237 | + test %esp,8(%esp) |
| 238 | + sub $$0x1000,%ecx |
| 239 | + cmp $$0x1000,%ecx |
| 240 | + ja 2b |
| 241 | +
|
| 242 | + 3: |
| 243 | + sub %ecx,%esp |
| 244 | + test %esp,8(%esp) |
| 245 | +
|
| 246 | + add %eax,%esp |
| 247 | + pop %ecx |
| 248 | + leave |
| 249 | + ret |
| 250 | + " ::: "memory" : "volatile"); |
| 251 | + ::core::intrinsics::unreachable(); |
| 252 | +} |
0 commit comments