diff --git a/src/doc/unstable-book/src/language-features/global-allocator.md b/src/doc/unstable-book/src/language-features/global-allocator.md index 8f1ba22de8cb1..57bcaf14beb3c 100644 --- a/src/doc/unstable-book/src/language-features/global-allocator.md +++ b/src/doc/unstable-book/src/language-features/global-allocator.md @@ -29,17 +29,17 @@ looks like: ```rust #![feature(global_allocator, allocator_api, heap_api)] -use std::alloc::{GlobalAlloc, System, Layout, Opaque}; +use std::alloc::{AllocErr, GlobalAlloc, System, Layout, Opaque}; use std::ptr::NonNull; struct MyAllocator; unsafe impl GlobalAlloc for MyAllocator { - unsafe fn alloc(&self, layout: Layout) -> *mut Opaque { + unsafe fn alloc(&self, layout: Layout) -> Result, AllocErr> { System.alloc(layout) } - unsafe fn dealloc(&self, ptr: *mut Opaque, layout: Layout) { + unsafe fn dealloc(&self, ptr: NonNull, layout: Layout) { System.dealloc(ptr, layout) } } @@ -55,18 +55,15 @@ fn main() { ``` And that's it! The `#[global_allocator]` attribute is applied to a `static` -which implements the `Alloc` trait in the `std::alloc` module. Note, though, -that the implementation is defined for `&MyAllocator`, not just `MyAllocator`. -You may wish, however, to also provide `Alloc for MyAllocator` for other use -cases. +which implements the `GlobalAlloc` trait in the `std::alloc` module. A crate can only have one instance of `#[global_allocator]` and this instance may be loaded through a dependency. For example `#[global_allocator]` above could have been placed in one of the dependencies loaded through `extern crate`. -Note that `Alloc` itself is an `unsafe` trait, with much documentation on the -trait itself about usage and for implementors. Extra care should be taken when -implementing a global allocator as well as the allocator may be called from many -portions of the standard library, such as the panicking routine. As a result it -is highly recommended to not panic during allocation and work in as many -situations with as few dependencies as possible as well. +Note that `GlobalAlloc` itself is an `unsafe` trait, with much documentation on +the trait itself about usage and for implementors. Extra care should be taken +when implementing a global allocator as well as the allocator may be called from +many portions of the standard library, such as the panicking routine. As a +result it is highly recommended to not panic during allocation and work in as +many situations with as few dependencies as possible as well. diff --git a/src/liballoc/alloc.rs b/src/liballoc/alloc.rs index 8753c495737c3..f50f7969abe78 100644 --- a/src/liballoc/alloc.rs +++ b/src/liballoc/alloc.rs @@ -51,38 +51,46 @@ pub const Heap: Global = Global; unsafe impl GlobalAlloc for Global { #[inline] - unsafe fn alloc(&self, layout: Layout) -> *mut Opaque { - let ptr = __rust_alloc(layout.size(), layout.align()); - ptr as *mut Opaque + unsafe fn alloc(&self, layout: Layout) -> Result, AllocErr> { + NonNull::new(__rust_alloc(layout.size(), layout.align()) as *mut Opaque).ok_or(AllocErr) } #[inline] - unsafe fn dealloc(&self, ptr: *mut Opaque, layout: Layout) { - __rust_dealloc(ptr as *mut u8, layout.size(), layout.align()) + unsafe fn dealloc(&self, ptr: NonNull, layout: Layout) { + __rust_dealloc(ptr.as_ptr() as *mut u8, layout.size(), layout.align()) } #[inline] - unsafe fn realloc(&self, ptr: *mut Opaque, layout: Layout, new_size: usize) -> *mut Opaque { - let ptr = __rust_realloc(ptr as *mut u8, layout.size(), layout.align(), new_size); - ptr as *mut Opaque + unsafe fn realloc( + &self, + ptr: NonNull, + layout: Layout, + new_size: usize, + ) -> Result, AllocErr> { + NonNull::new(__rust_realloc( + ptr.as_ptr() as *mut u8, + layout.size(), + layout.align(), + new_size, + ) as *mut Opaque).ok_or(AllocErr) } #[inline] - unsafe fn alloc_zeroed(&self, layout: Layout) -> *mut Opaque { - let ptr = __rust_alloc_zeroed(layout.size(), layout.align()); - ptr as *mut Opaque + unsafe fn alloc_zeroed(&self, layout: Layout) -> Result, AllocErr> { + NonNull::new(__rust_alloc_zeroed(layout.size(), layout.align()) as *mut Opaque) + .ok_or(AllocErr) } } unsafe impl Alloc for Global { #[inline] unsafe fn alloc(&mut self, layout: Layout) -> Result, AllocErr> { - NonNull::new(GlobalAlloc::alloc(self, layout)).ok_or(AllocErr) + GlobalAlloc::alloc(self, layout) } #[inline] unsafe fn dealloc(&mut self, ptr: NonNull, layout: Layout) { - GlobalAlloc::dealloc(self, ptr.as_ptr(), layout) + GlobalAlloc::dealloc(self, ptr, layout) } #[inline] @@ -92,12 +100,12 @@ unsafe impl Alloc for Global { new_size: usize) -> Result, AllocErr> { - NonNull::new(GlobalAlloc::realloc(self, ptr.as_ptr(), layout, new_size)).ok_or(AllocErr) + GlobalAlloc::realloc(self, ptr, layout, new_size) } #[inline] unsafe fn alloc_zeroed(&mut self, layout: Layout) -> Result, AllocErr> { - NonNull::new(GlobalAlloc::alloc_zeroed(self, layout)).ok_or(AllocErr) + GlobalAlloc::alloc_zeroed(self, layout) } } @@ -111,11 +119,9 @@ unsafe fn exchange_malloc(size: usize, align: usize) -> *mut u8 { align as *mut u8 } else { let layout = Layout::from_size_align_unchecked(size, align); - let ptr = Global.alloc(layout); - if !ptr.is_null() { - ptr as *mut u8 - } else { - oom(layout) + match Global.alloc(layout) { + Ok(p) => p.as_ptr() as *mut u8, + Err(_) => oom(layout), } } } @@ -123,13 +129,12 @@ unsafe fn exchange_malloc(size: usize, align: usize) -> *mut u8 { #[cfg_attr(not(test), lang = "box_free")] #[inline] pub(crate) unsafe fn box_free(ptr: Unique) { - let ptr = ptr.as_ptr(); - let size = size_of_val(&*ptr); - let align = min_align_of_val(&*ptr); + let size = size_of_val(ptr.as_ref()); + let align = min_align_of_val(ptr.as_ref()); // We do not allocate for Box when T is ZST, so deallocation is also not necessary. if size != 0 { let layout = Layout::from_size_align_unchecked(size, align); - Global.dealloc(ptr as *mut Opaque, layout); + Global.dealloc(NonNull::from(ptr).cast(), layout); } } diff --git a/src/liballoc/arc.rs b/src/liballoc/arc.rs index f75132487849f..7c1b269ff8b27 100644 --- a/src/liballoc/arc.rs +++ b/src/liballoc/arc.rs @@ -518,7 +518,7 @@ impl Arc { if self.inner().weak.fetch_sub(1, Release) == 1 { atomic::fence(Acquire); - Global.dealloc(self.ptr.as_opaque(), Layout::for_value(self.ptr.as_ref())) + Global.dealloc(self.ptr.cast(), Layout::for_value(self.ptr.as_ref())) } } @@ -638,7 +638,7 @@ impl ArcFromSlice for Arc<[T]> { let slice = from_raw_parts_mut(self.elems, self.n_elems); ptr::drop_in_place(slice); - Global.dealloc(self.mem.as_opaque(), self.layout.clone()); + Global.dealloc(self.mem.cast(), self.layout.clone()); } } } @@ -1157,7 +1157,7 @@ impl Drop for Weak { if self.inner().weak.fetch_sub(1, Release) == 1 { atomic::fence(Acquire); unsafe { - Global.dealloc(self.ptr.as_opaque(), Layout::for_value(self.ptr.as_ref())) + Global.dealloc(self.ptr.cast(), Layout::for_value(self.ptr.as_ref())) } } } diff --git a/src/liballoc/btree/node.rs b/src/liballoc/btree/node.rs index 431695c32ab68..19bdcbc6ad63e 100644 --- a/src/liballoc/btree/node.rs +++ b/src/liballoc/btree/node.rs @@ -287,7 +287,7 @@ impl Root { self.as_mut().as_leaf_mut().parent = ptr::null(); unsafe { - Global.dealloc(NonNull::from(top).as_opaque(), Layout::new::>()); + Global.dealloc(NonNull::from(top).cast(), Layout::new::>()); } } } @@ -478,7 +478,7 @@ impl NodeRef { debug_assert!(!self.is_shared_root()); let node = self.node; let ret = self.ascend().ok(); - Global.dealloc(node.as_opaque(), Layout::new::>()); + Global.dealloc(node.cast(), Layout::new::>()); ret } } @@ -499,7 +499,7 @@ impl NodeRef { > { let node = self.node; let ret = self.ascend().ok(); - Global.dealloc(node.as_opaque(), Layout::new::>()); + Global.dealloc(node.cast(), Layout::new::>()); ret } } @@ -1321,12 +1321,12 @@ impl<'a, K, V> Handle, K, V, marker::Internal>, marker:: } Global.dealloc( - right_node.node.as_opaque(), + right_node.node.cast(), Layout::new::>(), ); } else { Global.dealloc( - right_node.node.as_opaque(), + right_node.node.cast(), Layout::new::>(), ); } diff --git a/src/liballoc/raw_vec.rs b/src/liballoc/raw_vec.rs index 07bb7f1a3ebd8..fce7b7e09319f 100644 --- a/src/liballoc/raw_vec.rs +++ b/src/liballoc/raw_vec.rs @@ -93,7 +93,7 @@ impl RawVec { // handles ZSTs and `cap = 0` alike let ptr = if alloc_size == 0 { - NonNull::::dangling().as_opaque() + NonNull::::dangling().cast() } else { let align = mem::align_of::(); let layout = Layout::from_size_align(alloc_size, align).unwrap(); @@ -314,7 +314,7 @@ impl RawVec { let new_cap = 2 * self.cap; let new_size = new_cap * elem_size; alloc_guard(new_size).unwrap_or_else(|_| capacity_overflow()); - let ptr_res = self.a.realloc(NonNull::from(self.ptr).as_opaque(), + let ptr_res = self.a.realloc(NonNull::from(self.ptr).cast(), cur, new_size); match ptr_res { @@ -373,7 +373,7 @@ impl RawVec { let new_cap = 2 * self.cap; let new_size = new_cap * elem_size; alloc_guard(new_size).unwrap_or_else(|_| capacity_overflow()); - match self.a.grow_in_place(NonNull::from(self.ptr).as_opaque(), old_layout, new_size) { + match self.a.grow_in_place(NonNull::from(self.ptr).cast(), old_layout, new_size) { Ok(_) => { // We can't directly divide `size`. self.cap = new_cap; @@ -546,7 +546,7 @@ impl RawVec { // FIXME: may crash and burn on over-reserve alloc_guard(new_layout.size()).unwrap_or_else(|_| capacity_overflow()); match self.a.grow_in_place( - NonNull::from(self.ptr).as_opaque(), old_layout, new_layout.size(), + NonNull::from(self.ptr).cast(), old_layout, new_layout.size(), ) { Ok(_) => { self.cap = new_cap; @@ -607,7 +607,7 @@ impl RawVec { let new_size = elem_size * amount; let align = mem::align_of::(); let old_layout = Layout::from_size_align_unchecked(old_size, align); - match self.a.realloc(NonNull::from(self.ptr).as_opaque(), + match self.a.realloc(NonNull::from(self.ptr).cast(), old_layout, new_size) { Ok(p) => self.ptr = p.cast().into(), @@ -667,7 +667,7 @@ impl RawVec { let res = match self.current_layout() { Some(layout) => { debug_assert!(new_layout.align() == layout.align()); - self.a.realloc(NonNull::from(self.ptr).as_opaque(), layout, new_layout.size()) + self.a.realloc(NonNull::from(self.ptr).cast(), layout, new_layout.size()) } None => self.a.alloc(new_layout), }; @@ -710,7 +710,7 @@ impl RawVec { let elem_size = mem::size_of::(); if elem_size != 0 { if let Some(layout) = self.current_layout() { - self.a.dealloc(NonNull::from(self.ptr).as_opaque(), layout); + self.a.dealloc(NonNull::from(self.ptr).cast(), layout); } } } diff --git a/src/liballoc/rc.rs b/src/liballoc/rc.rs index 1648fc6b7ef4b..dbf7bdabf60de 100644 --- a/src/liballoc/rc.rs +++ b/src/liballoc/rc.rs @@ -845,7 +845,7 @@ unsafe impl<#[may_dangle] T: ?Sized> Drop for Rc { self.dec_weak(); if self.weak() == 0 { - Global.dealloc(self.ptr.as_opaque(), Layout::for_value(self.ptr.as_ref())); + Global.dealloc(self.ptr.cast(), Layout::for_value(self.ptr.as_ref())); } } } @@ -1269,7 +1269,7 @@ impl Drop for Weak { // the weak count starts at 1, and will only go to zero if all // the strong pointers have disappeared. if self.weak() == 0 { - Global.dealloc(self.ptr.as_opaque(), Layout::for_value(self.ptr.as_ref())); + Global.dealloc(self.ptr.cast(), Layout::for_value(self.ptr.as_ref())); } } } diff --git a/src/liballoc_system/lib.rs b/src/liballoc_system/lib.rs index 9490b54e675df..b89f6bbe927e1 100644 --- a/src/liballoc_system/lib.rs +++ b/src/liballoc_system/lib.rs @@ -51,17 +51,17 @@ pub struct System; unsafe impl Alloc for System { #[inline] unsafe fn alloc(&mut self, layout: Layout) -> Result, AllocErr> { - NonNull::new(GlobalAlloc::alloc(self, layout)).ok_or(AllocErr) + GlobalAlloc::alloc(self, layout) } #[inline] unsafe fn alloc_zeroed(&mut self, layout: Layout) -> Result, AllocErr> { - NonNull::new(GlobalAlloc::alloc_zeroed(self, layout)).ok_or(AllocErr) + GlobalAlloc::alloc_zeroed(self, layout) } #[inline] unsafe fn dealloc(&mut self, ptr: NonNull, layout: Layout) { - GlobalAlloc::dealloc(self, ptr.as_ptr(), layout) + GlobalAlloc::dealloc(self, ptr, layout) } #[inline] @@ -69,29 +69,31 @@ unsafe impl Alloc for System { ptr: NonNull, layout: Layout, new_size: usize) -> Result, AllocErr> { - NonNull::new(GlobalAlloc::realloc(self, ptr.as_ptr(), layout, new_size)).ok_or(AllocErr) + GlobalAlloc::realloc(self, ptr, layout, new_size) } } #[cfg(any(windows, unix, target_os = "cloudabi", target_os = "redox"))] mod realloc_fallback { - use core::alloc::{GlobalAlloc, Opaque, Layout}; + use core::alloc::{GlobalAlloc, Opaque, Layout, AllocErr}; use core::cmp; - use core::ptr; + use core::ptr::{self, NonNull}; impl super::System { - pub(crate) unsafe fn realloc_fallback(&self, ptr: *mut Opaque, old_layout: Layout, - new_size: usize) -> *mut Opaque { + pub(crate) unsafe fn realloc_fallback( + &self, + ptr: NonNull, + old_layout: Layout, + new_size: usize, + ) -> Result, AllocErr> { // Docs for GlobalAlloc::realloc require this to be valid: let new_layout = Layout::from_size_align_unchecked(new_size, old_layout.align()); - let new_ptr = GlobalAlloc::alloc(self, new_layout); - if !new_ptr.is_null() { - let size = cmp::min(old_layout.size(), new_size); - ptr::copy_nonoverlapping(ptr as *mut u8, new_ptr as *mut u8, size); - GlobalAlloc::dealloc(self, ptr, old_layout); - } - new_ptr + let new_ptr = GlobalAlloc::alloc(self, new_layout)?; + let size = cmp::min(old_layout.size(), new_size); + ptr::copy_nonoverlapping(ptr.as_ptr() as *mut u8, new_ptr.as_ptr() as *mut u8, size); + GlobalAlloc::dealloc(self, ptr, old_layout); + Ok(new_ptr) } } } @@ -104,21 +106,20 @@ mod platform { use MIN_ALIGN; use System; - use core::alloc::{GlobalAlloc, Layout, Opaque}; + use core::alloc::{GlobalAlloc, Layout, Opaque, AllocErr}; + use core::ptr::NonNull; #[unstable(feature = "allocator_api", issue = "32838")] unsafe impl GlobalAlloc for System { #[inline] - unsafe fn alloc(&self, layout: Layout) -> *mut Opaque { + unsafe fn alloc(&self, layout: Layout) -> Result, AllocErr> { if layout.align() <= MIN_ALIGN && layout.align() <= layout.size() { - libc::malloc(layout.size()) as *mut Opaque + NonNull::new(libc::malloc(layout.size()) as *mut Opaque).ok_or(AllocErr) } else { #[cfg(target_os = "macos")] { if layout.align() > (1 << 31) { - // FIXME: use Opaque::null_mut - // https://github.com/rust-lang/rust/issues/49659 - return 0 as *mut Opaque + return Err(AllocErr); } } aligned_malloc(&layout) @@ -126,27 +127,32 @@ mod platform { } #[inline] - unsafe fn alloc_zeroed(&self, layout: Layout) -> *mut Opaque { + unsafe fn alloc_zeroed(&self, layout: Layout) -> Result, AllocErr> { if layout.align() <= MIN_ALIGN && layout.align() <= layout.size() { - libc::calloc(layout.size(), 1) as *mut Opaque + NonNull::new(libc::calloc(layout.size(), 1) as *mut Opaque).ok_or(AllocErr) } else { - let ptr = self.alloc(layout.clone()); - if !ptr.is_null() { - ptr::write_bytes(ptr as *mut u8, 0, layout.size()); - } - ptr + let ptr = self.alloc(layout.clone())?; + ptr::write_bytes(ptr.as_ptr() as *mut u8, 0, layout.size()); + Ok(ptr) } } #[inline] - unsafe fn dealloc(&self, ptr: *mut Opaque, _layout: Layout) { - libc::free(ptr as *mut libc::c_void) + unsafe fn dealloc(&self, ptr: NonNull, _layout: Layout) { + libc::free(ptr.as_ptr() as *mut libc::c_void) } #[inline] - unsafe fn realloc(&self, ptr: *mut Opaque, layout: Layout, new_size: usize) -> *mut Opaque { + unsafe fn realloc( + &self, + ptr: NonNull, + layout: Layout, + new_size: usize, + ) -> Result, AllocErr> { if layout.align() <= MIN_ALIGN && layout.align() <= new_size { - libc::realloc(ptr as *mut libc::c_void, new_size) as *mut Opaque + NonNull::new( + libc::realloc(ptr.as_ptr() as *mut libc::c_void, new_size) as *mut Opaque + ).ok_or(AllocErr) } else { self.realloc_fallback(ptr, layout, new_size) } @@ -155,7 +161,7 @@ mod platform { #[cfg(any(target_os = "android", target_os = "redox", target_os = "solaris"))] #[inline] - unsafe fn aligned_malloc(layout: &Layout) -> *mut Opaque { + unsafe fn aligned_malloc(layout: &Layout) -> Result, AllocErr> { // On android we currently target API level 9 which unfortunately // doesn't have the `posix_memalign` API used below. Instead we use // `memalign`, but this unfortunately has the property on some systems @@ -173,19 +179,18 @@ mod platform { // [3]: https://bugs.chromium.org/p/chromium/issues/detail?id=138579 // [4]: https://chromium.googlesource.com/chromium/src/base/+/master/ // /memory/aligned_memory.cc - libc::memalign(layout.align(), layout.size()) as *mut Opaque + NonNull::new(libc::memalign(layout.align(), layout.size()) as *mut Opaque).ok_or(AllocErr) } #[cfg(not(any(target_os = "android", target_os = "redox", target_os = "solaris")))] #[inline] - unsafe fn aligned_malloc(layout: &Layout) -> *mut Opaque { + unsafe fn aligned_malloc(layout: &Layout) -> Result, AllocErr> { let mut out = ptr::null_mut(); let ret = libc::posix_memalign(&mut out, layout.align(), layout.size()); if ret != 0 { - // FIXME: use Opaque::null_mut https://github.com/rust-lang/rust/issues/49659 - 0 as *mut Opaque + Err(AllocErr) } else { - out as *mut Opaque + Ok(NonNull::new_unchecked(out as *mut Opaque)) } } } @@ -195,7 +200,8 @@ mod platform { mod platform { use MIN_ALIGN; use System; - use core::alloc::{GlobalAlloc, Opaque, Layout}; + use core::alloc::{GlobalAlloc, Opaque, Layout, AllocErr}; + use core::ptr::NonNull; type LPVOID = *mut u8; type HANDLE = LPVOID; @@ -227,7 +233,10 @@ mod platform { } #[inline] - unsafe fn allocate_with_flags(layout: Layout, flags: DWORD) -> *mut Opaque { + unsafe fn allocate_with_flags( + layout: Layout, + flags: DWORD, + ) -> Result, AllocErr> { let ptr = if layout.align() <= MIN_ALIGN { HeapAlloc(GetProcessHeap(), flags, layout.size()) } else { @@ -239,39 +248,45 @@ mod platform { align_ptr(ptr, layout.align()) } }; - ptr as *mut Opaque + NonNull::new(ptr as *mut Opaque).ok_or(AllocErr) } #[unstable(feature = "allocator_api", issue = "32838")] unsafe impl GlobalAlloc for System { #[inline] - unsafe fn alloc(&self, layout: Layout) -> *mut Opaque { + unsafe fn alloc(&self, layout: Layout) -> Result, AllocErr> { allocate_with_flags(layout, 0) } #[inline] - unsafe fn alloc_zeroed(&self, layout: Layout) -> *mut Opaque { + unsafe fn alloc_zeroed(&self, layout: Layout) -> Result, AllocErr> { allocate_with_flags(layout, HEAP_ZERO_MEMORY) } #[inline] - unsafe fn dealloc(&self, ptr: *mut Opaque, layout: Layout) { + unsafe fn dealloc(&self, ptr: NonNull, layout: Layout) { if layout.align() <= MIN_ALIGN { - let err = HeapFree(GetProcessHeap(), 0, ptr as LPVOID); - debug_assert!(err != 0, "Failed to free heap memory: {}", - GetLastError()); + let err = HeapFree(GetProcessHeap(), 0, ptr.as_ptr() as LPVOID); + debug_assert!(err != 0, "Failed to free heap memory: {}", GetLastError()); } else { - let header = get_header(ptr as *mut u8); + let header = get_header(ptr.as_ptr() as *mut u8); let err = HeapFree(GetProcessHeap(), 0, header.0 as LPVOID); - debug_assert!(err != 0, "Failed to free heap memory: {}", - GetLastError()); + debug_assert!(err != 0, "Failed to free heap memory: {}", GetLastError()); } } #[inline] - unsafe fn realloc(&self, ptr: *mut Opaque, layout: Layout, new_size: usize) -> *mut Opaque { + unsafe fn realloc( + &self, + ptr: NonNull, + layout: Layout, + new_size: usize, + ) -> Result, AllocErr> { if layout.align() <= MIN_ALIGN { - HeapReAlloc(GetProcessHeap(), 0, ptr as LPVOID, new_size) as *mut Opaque + NonNull::new( + HeapReAlloc(GetProcessHeap(), 0, ptr.as_ptr() as LPVOID, new_size) + as *mut Opaque + ).ok_or(AllocErr) } else { self.realloc_fallback(ptr, layout, new_size) } @@ -300,7 +315,8 @@ mod platform { mod platform { extern crate dlmalloc; - use core::alloc::{GlobalAlloc, Layout, Opaque}; + use core::alloc::{GlobalAlloc, Layout, Opaque, AllocErr}; + use core::ptr::NonNull; use System; // No need for synchronization here as wasm is currently single-threaded @@ -309,23 +325,33 @@ mod platform { #[unstable(feature = "allocator_api", issue = "32838")] unsafe impl GlobalAlloc for System { #[inline] - unsafe fn alloc(&self, layout: Layout) -> *mut Opaque { - DLMALLOC.malloc(layout.size(), layout.align()) as *mut Opaque + unsafe fn alloc(&self, layout: Layout) -> Result, AllocErr> { + NonNull::new(DLMALLOC.malloc(layout.size(), layout.align()) as *mut Opaque) + .ok_or(AllocErr) } #[inline] - unsafe fn alloc_zeroed(&self, layout: Layout) -> *mut Opaque { - DLMALLOC.calloc(layout.size(), layout.align()) as *mut Opaque + unsafe fn alloc_zeroed(&self, layout: Layout) -> Result, AllocErr> { + NonNull::new(DLMALLOC.calloc(layout.size(), layout.align()) as *mut Opaque) + .ok_or(AllocErr) } #[inline] - unsafe fn dealloc(&self, ptr: *mut Opaque, layout: Layout) { - DLMALLOC.free(ptr as *mut u8, layout.size(), layout.align()) + unsafe fn dealloc(&self, ptr: NonNull, layout: Layout) { + DLMALLOC.free(ptr.as_ptr() as *mut u8, layout.size(), layout.align()) } #[inline] - unsafe fn realloc(&self, ptr: *mut Opaque, layout: Layout, new_size: usize) -> *mut Opaque { - DLMALLOC.realloc(ptr as *mut u8, layout.size(), layout.align(), new_size) as *mut Opaque + unsafe fn realloc( + &self, + ptr: NonNull, + layout: Layout, + new_size: usize, + ) -> Result, AllocErr> { + NonNull::new( + DLMALLOC.realloc(ptr.as_ptr() as *mut u8, layout.size(), layout.align(), new_size) + as *mut Opaque, + ).ok_or(AllocErr) } } } diff --git a/src/libcore/alloc.rs b/src/libcore/alloc.rs index 674c4fb57c7f0..ad247b8dada9a 100644 --- a/src/libcore/alloc.rs +++ b/src/libcore/alloc.rs @@ -21,25 +21,15 @@ use mem; use usize; use ptr::{self, NonNull}; -extern { - /// An opaque, unsized type. Used for pointers to allocated memory. - /// - /// This type can only be used behind a pointer like `*mut Opaque` or `ptr::NonNull`. - /// Such pointers are similar to C’s `void*` type. - pub type Opaque; -} - -impl Opaque { - /// Similar to `std::ptr::null`, which requires `T: Sized`. - pub fn null() -> *const Self { - 0 as _ - } - - /// Similar to `std::ptr::null_mut`, which requires `T: Sized`. - pub fn null_mut() -> *mut Self { - 0 as _ - } -} +/// An opaque, byte-sized type. Used for pointers to allocated memory. +/// +/// This type can only be used behind a pointer like `*mut Opaque` or `ptr::NonNull`. +/// Such pointers are similar to C’s `void*` type. +/// +/// `Opaque` has a size of 1 byte, which allows you to calculate byte offsets +/// from it using the `offset`, `add` and `sub` methods on raw pointers. +#[allow(missing_debug_implementations)] +pub struct Opaque(u8); /// Represents the combination of a starting address and /// a total capacity of the returned block. @@ -403,7 +393,7 @@ pub unsafe trait GlobalAlloc { /// # Safety /// /// **FIXME:** what are the exact requirements? - unsafe fn alloc(&self, layout: Layout) -> *mut Opaque; + unsafe fn alloc(&self, layout: Layout) -> Result, AllocErr>; /// Deallocate the block of memory at the given `ptr` pointer with the given `layout`. /// @@ -411,15 +401,13 @@ pub unsafe trait GlobalAlloc { /// /// **FIXME:** what are the exact requirements? /// In particular around layout *fit*. (See docs for the `Alloc` trait.) - unsafe fn dealloc(&self, ptr: *mut Opaque, layout: Layout); + unsafe fn dealloc(&self, ptr: NonNull, layout: Layout); - unsafe fn alloc_zeroed(&self, layout: Layout) -> *mut Opaque { + unsafe fn alloc_zeroed(&self, layout: Layout) -> Result, AllocErr> { let size = layout.size(); - let ptr = self.alloc(layout); - if !ptr.is_null() { - ptr::write_bytes(ptr as *mut u8, 0, size); - } - ptr + let ptr = self.alloc(layout)?; + ptr::write_bytes(ptr.as_ptr() as *mut u8, 0, size); + Ok(ptr) } /// Shink or grow a block of memory to the given `new_size`. @@ -438,18 +426,21 @@ pub unsafe trait GlobalAlloc { /// /// **FIXME:** what are the exact requirements? /// In particular around layout *fit*. (See docs for the `Alloc` trait.) - unsafe fn realloc(&self, ptr: *mut Opaque, layout: Layout, new_size: usize) -> *mut Opaque { + unsafe fn realloc( + &self, + ptr: NonNull, + layout: Layout, + new_size: usize, + ) -> Result, AllocErr> { let new_layout = Layout::from_size_align_unchecked(new_size, layout.align()); - let new_ptr = self.alloc(new_layout); - if !new_ptr.is_null() { - ptr::copy_nonoverlapping( - ptr as *const u8, - new_ptr as *mut u8, - cmp::min(layout.size(), new_size), - ); - self.dealloc(ptr, layout); - } - new_ptr + let new_ptr = self.alloc(new_layout)?; + ptr::copy_nonoverlapping( + ptr.as_ptr() as *const u8, + new_ptr.as_ptr() as *mut u8, + cmp::min(layout.size(), new_size), + ); + self.dealloc(ptr, layout); + Ok(new_ptr) } } @@ -964,7 +955,7 @@ pub unsafe trait Alloc { { let k = Layout::new::(); if k.size() > 0 { - self.dealloc(ptr.as_opaque(), k); + self.dealloc(ptr.cast(), k); } } @@ -1052,7 +1043,7 @@ pub unsafe trait Alloc { match (Layout::array::(n_old), Layout::array::(n_new)) { (Ok(ref k_old), Ok(ref k_new)) if k_old.size() > 0 && k_new.size() > 0 => { debug_assert!(k_old.align() == k_new.align()); - self.realloc(ptr.as_opaque(), k_old.clone(), k_new.size()).map(NonNull::cast) + self.realloc(ptr.cast(), k_old.clone(), k_new.size()).map(NonNull::cast) } _ => { Err(AllocErr) @@ -1085,7 +1076,7 @@ pub unsafe trait Alloc { { match Layout::array::(n) { Ok(ref k) if k.size() > 0 => { - Ok(self.dealloc(ptr.as_opaque(), k.clone())) + Ok(self.dealloc(ptr.cast(), k.clone())) } _ => { Err(AllocErr) diff --git a/src/libcore/ptr.rs b/src/libcore/ptr.rs index 6c0709caa084b..c1b403a5c1461 100644 --- a/src/libcore/ptr.rs +++ b/src/libcore/ptr.rs @@ -2921,14 +2921,6 @@ impl NonNull { NonNull::new_unchecked(self.as_ptr() as *mut U) } } - - /// Cast to an `Opaque` pointer - #[unstable(feature = "allocator_api", issue = "32838")] - pub fn as_opaque(self) -> NonNull<::alloc::Opaque> { - unsafe { - NonNull::new_unchecked(self.as_ptr() as _) - } - } } #[stable(feature = "nonnull", since = "1.25.0")] diff --git a/src/librustc_allocator/expand.rs b/src/librustc_allocator/expand.rs index 497d5fdcac702..30c480af925b8 100644 --- a/src/librustc_allocator/expand.rs +++ b/src/librustc_allocator/expand.rs @@ -14,6 +14,7 @@ use rustc_target::spec::abi::Abi; use syntax::ast::{Attribute, Crate, LitKind, StrStyle}; use syntax::ast::{Arg, Constness, Generics, Mac, Mutability, Ty, Unsafety}; use syntax::ast::{self, Expr, Ident, Item, ItemKind, TyKind, VisibilityKind}; +use syntax::ast::{Path, PathSegment}; use syntax::attr; use syntax::codemap::{dummy_spanned, respan}; use syntax::codemap::{ExpnInfo, MacroAttribute, NameAndSpan}; @@ -149,7 +150,7 @@ impl<'a> AllocFnFactory<'a> { .map(|ty| self.arg_ty(ty, &mut abi_args, mk)) .collect(); let result = self.call_allocator(method.name, args); - let (output_ty, output_expr) = self.ret_ty(&method.output, result); + let (output_ty, output_expr) = self.ret_ty(&method.output, result, mk); let kind = ItemKind::Fn( self.cx.fn_decl(abi_args, ast::FunctionRetTy::Ty(output_ty)), Unsafety::Unsafe, @@ -234,10 +235,40 @@ impl<'a> AllocFnFactory<'a> { } AllocatorTy::Ptr => { + let nonnull_new = Path { + span: self.span, + segments: vec![ + PathSegment { + ident: self.core, + parameters: None, + }, + PathSegment { + ident: Ident::from_str("ptr"), + parameters: None, + }, + PathSegment { + ident: Ident::from_str("NonNull"), + parameters: ast::AngleBracketedParameterData { + span: self.span, + lifetimes: Vec::new(), + types: vec![self.opaque()], + bindings: Vec::new(), + }.into(), + }, + PathSegment { + ident: Ident::from_str("new_unchecked"), + parameters: None, + }, + ], + }; + let nonnull_new = self.cx.expr_path(nonnull_new); + let ptr_opaque = self.cx.ty_ptr(self.span, self.opaque(), Mutability::Mutable); + let ident = ident(); args.push(self.cx.arg(self.span, ident, self.ptr_u8())); let arg = self.cx.expr_ident(self.span, ident); - self.cx.expr_cast(self.span, arg, self.ptr_opaque()) + let arg = self.cx.expr_cast(self.span, arg, ptr_opaque); + self.cx.expr_call(self.span, nonnull_new, vec![arg]) } AllocatorTy::Usize => { @@ -252,14 +283,47 @@ impl<'a> AllocFnFactory<'a> { } } - fn ret_ty(&self, ty: &AllocatorTy, expr: P) -> (P, P) { + fn ret_ty( + &self, + ty: &AllocatorTy, + expr: P, + ident: &mut FnMut() -> Ident, + ) -> (P, P) { match *ty { AllocatorTy::ResultPtr => { // We're creating: // - // #expr as *mut u8 + // match #expr { + // Ok(ptr) => ptr.as_ptr() as *mut u8, + // Err(_) => 0 as *mut u8, + // } + + let name = ident(); + let ok_expr = { + let nonnull = self.cx.expr_ident(self.span, name); + let ptr = self.cx.expr_method_call( + self.span, + nonnull, + Ident::from_str("as_ptr"), + Vec::new() + ); + self.cx.expr_cast(self.span, ptr, self.ptr_u8()) + }; + let pat = self.cx.pat_ident(self.span, name); + let ok = self.cx.path_ident(self.span, Ident::from_str("Ok")); + let ok = self.cx.pat_tuple_struct(self.span, ok, vec![pat]); + let ok = self.cx.arm(self.span, vec![ok], ok_expr); + + let err_expr = { + let null = self.cx.expr_usize(self.span, 0); + self.cx.expr_cast(self.span, null, self.ptr_u8()) + }; + let pat = self.cx.pat_wild(self.span); + let err = self.cx.path_ident(self.span, Ident::from_str("Err")); + let err = self.cx.pat_tuple_struct(self.span, err, vec![pat]); + let err = self.cx.arm(self.span, vec![err], err_expr); - let expr = self.cx.expr_cast(self.span, expr, self.ptr_u8()); + let expr = self.cx.expr_match(self.span, expr, vec![ok, err]); (self.ptr_u8(), expr) } @@ -282,7 +346,7 @@ impl<'a> AllocFnFactory<'a> { self.cx.ty_ptr(self.span, ty_u8, Mutability::Mutable) } - fn ptr_opaque(&self) -> P { + fn opaque(&self) -> P { let opaque = self.cx.path( self.span, vec![ @@ -291,7 +355,6 @@ impl<'a> AllocFnFactory<'a> { Ident::from_str("Opaque"), ], ); - let ty_opaque = self.cx.ty_path(opaque); - self.cx.ty_ptr(self.span, ty_opaque, Mutability::Mutable) + self.cx.ty_path(opaque) } } diff --git a/src/librustc_asan/lib.rs b/src/librustc_asan/lib.rs index 3429e3bda0f67..6e496b4a6e6ca 100644 --- a/src/librustc_asan/lib.rs +++ b/src/librustc_asan/lib.rs @@ -21,7 +21,10 @@ extern crate alloc_system; +#[cfg(not(stage0))] use alloc_system::System; +// The GlobalAllocator trait has changed since stage0 +#[cfg(not(stage0))] #[global_allocator] static ALLOC: System = System; diff --git a/src/librustc_lsan/lib.rs b/src/librustc_lsan/lib.rs index 81a09e7e21a64..0154ea6b9f9e2 100644 --- a/src/librustc_lsan/lib.rs +++ b/src/librustc_lsan/lib.rs @@ -21,7 +21,10 @@ extern crate alloc_system; +#[cfg(not(stage0))] use alloc_system::System; +// The GlobalAllocator trait has changed since stage0 +#[cfg(not(stage0))] #[global_allocator] static ALLOC: System = System; diff --git a/src/librustc_msan/lib.rs b/src/librustc_msan/lib.rs index 81a09e7e21a64..0154ea6b9f9e2 100644 --- a/src/librustc_msan/lib.rs +++ b/src/librustc_msan/lib.rs @@ -21,7 +21,10 @@ extern crate alloc_system; +#[cfg(not(stage0))] use alloc_system::System; +// The GlobalAllocator trait has changed since stage0 +#[cfg(not(stage0))] #[global_allocator] static ALLOC: System = System; diff --git a/src/librustc_tsan/lib.rs b/src/librustc_tsan/lib.rs index 3429e3bda0f67..6e496b4a6e6ca 100644 --- a/src/librustc_tsan/lib.rs +++ b/src/librustc_tsan/lib.rs @@ -21,7 +21,10 @@ extern crate alloc_system; +#[cfg(not(stage0))] use alloc_system::System; +// The GlobalAllocator trait has changed since stage0 +#[cfg(not(stage0))] #[global_allocator] static ALLOC: System = System; diff --git a/src/libstd/alloc.rs b/src/libstd/alloc.rs index 4f9dffc7c9549..1307d536fdcce 100644 --- a/src/libstd/alloc.rs +++ b/src/libstd/alloc.rs @@ -73,6 +73,7 @@ pub extern fn rust_oom(layout: Layout) -> ! { #[allow(unused_attributes)] pub mod __default_lib_allocator { use super::{System, Layout, GlobalAlloc, Opaque}; + use ptr::{self, NonNull}; // for symbol names src/librustc/middle/allocator.rs // for signatures src/librustc_allocator/lib.rs @@ -83,7 +84,10 @@ pub mod __default_lib_allocator { #[rustc_std_internal_symbol] pub unsafe extern fn __rdl_alloc(size: usize, align: usize) -> *mut u8 { let layout = Layout::from_size_align_unchecked(size, align); - System.alloc(layout) as *mut u8 + match System.alloc(layout) { + Ok(p) => p.as_ptr() as *mut u8, + Err(_) => ptr::null_mut(), + } } #[no_mangle] @@ -91,7 +95,8 @@ pub mod __default_lib_allocator { pub unsafe extern fn __rdl_dealloc(ptr: *mut u8, size: usize, align: usize) { - System.dealloc(ptr as *mut Opaque, Layout::from_size_align_unchecked(size, align)) + let layout = Layout::from_size_align_unchecked(size, align); + System.dealloc(NonNull::new_unchecked(ptr as *mut Opaque), layout); } #[no_mangle] @@ -101,13 +106,19 @@ pub mod __default_lib_allocator { align: usize, new_size: usize) -> *mut u8 { let old_layout = Layout::from_size_align_unchecked(old_size, align); - System.realloc(ptr as *mut Opaque, old_layout, new_size) as *mut u8 + match System.realloc(NonNull::new_unchecked(ptr as *mut Opaque), old_layout, new_size) { + Ok(p) => p.as_ptr() as *mut u8, + Err(_) => ptr::null_mut(), + } } #[no_mangle] #[rustc_std_internal_symbol] pub unsafe extern fn __rdl_alloc_zeroed(size: usize, align: usize) -> *mut u8 { let layout = Layout::from_size_align_unchecked(size, align); - System.alloc_zeroed(layout) as *mut u8 + match System.alloc_zeroed(layout) { + Ok(p) => p.as_ptr() as *mut u8, + Err(_) => ptr::null_mut(), + } } } diff --git a/src/libstd/collections/hash/table.rs b/src/libstd/collections/hash/table.rs index eed2debcaa282..4787c00ed0b53 100644 --- a/src/libstd/collections/hash/table.rs +++ b/src/libstd/collections/hash/table.rs @@ -1204,7 +1204,7 @@ unsafe impl<#[may_dangle] K, #[may_dangle] V> Drop for RawTable { debug_assert!(!oflo, "should be impossible"); unsafe { - Global.dealloc(NonNull::new_unchecked(self.hashes.ptr()).as_opaque(), + Global.dealloc(NonNull::new_unchecked(self.hashes.ptr()).cast(), Layout::from_size_align(size, align).unwrap()); // Remember how everything was allocated out of one buffer // during initialization? We only need one call to free here. diff --git a/src/libstd/lib.rs b/src/libstd/lib.rs index f7d06852f2793..bbc29cdc00abf 100644 --- a/src/libstd/lib.rs +++ b/src/libstd/lib.rs @@ -326,14 +326,65 @@ // `force_alloc_system` is *only* intended as a workaround for local rebuilds // with a rustc without jemalloc. // FIXME(#44236) shouldn't need MSVC logic -#![cfg_attr(all(not(target_env = "msvc"), - any(all(stage0, not(test)), feature = "force_alloc_system")), +#![cfg_attr(all(not(target_env = "msvc"), not(stage0), feature = "force_alloc_system"), feature(global_allocator))] -#[cfg(all(not(target_env = "msvc"), - any(all(stage0, not(test)), feature = "force_alloc_system")))] +#[cfg(all(not(target_env = "msvc"), not(stage0), feature = "force_alloc_system"))] #[global_allocator] static ALLOC: alloc_system::System = alloc_system::System; +// Workaround for the GlobalAlloc trait having changed since stage0. This code +// is mostly copied from libstd/alloc.rs +#[cfg(all(not(target_env = "msvc"), stage0, not(test)))] +#[unstable(issue = "0", feature = "std_internals")] +#[doc(hidden)] +#[allow(unused_attributes)] +pub mod __global_lib_allocator { + use alloc::{System, Layout, GlobalAlloc, Opaque}; + use ptr::{self, NonNull}; + + #[no_mangle] + #[rustc_std_internal_symbol] + pub unsafe extern fn __rg_alloc(size: usize, align: usize) -> *mut u8 { + let layout = Layout::from_size_align_unchecked(size, align); + match System.alloc(layout) { + Ok(p) => p.as_ptr() as *mut u8, + Err(_) => ptr::null_mut(), + } + } + + #[no_mangle] + #[rustc_std_internal_symbol] + pub unsafe extern fn __rg_dealloc(ptr: *mut u8, + size: usize, + align: usize) { + let layout = Layout::from_size_align_unchecked(size, align); + System.dealloc(NonNull::new_unchecked(ptr as *mut Opaque), layout); + } + + #[no_mangle] + #[rustc_std_internal_symbol] + pub unsafe extern fn __rg_realloc(ptr: *mut u8, + old_size: usize, + align: usize, + new_size: usize) -> *mut u8 { + let old_layout = Layout::from_size_align_unchecked(old_size, align); + match System.realloc(NonNull::new_unchecked(ptr as *mut Opaque), old_layout, new_size) { + Ok(p) => p.as_ptr() as *mut u8, + Err(_) => ptr::null_mut(), + } + } + + #[no_mangle] + #[rustc_std_internal_symbol] + pub unsafe extern fn __rg_alloc_zeroed(size: usize, align: usize) -> *mut u8 { + let layout = Layout::from_size_align_unchecked(size, align); + match System.alloc_zeroed(layout) { + Ok(p) => p.as_ptr() as *mut u8, + Err(_) => ptr::null_mut(), + } + } +} + // Explicitly import the prelude. The compiler uses this same unstable attribute // to import the prelude implicitly when building crates that depend on std. #[prelude_import] diff --git a/src/test/run-make-fulldeps/std-core-cycle/bar.rs b/src/test/run-make-fulldeps/std-core-cycle/bar.rs index 62fd2ade1ca5c..5e60a35e971dd 100644 --- a/src/test/run-make-fulldeps/std-core-cycle/bar.rs +++ b/src/test/run-make-fulldeps/std-core-cycle/bar.rs @@ -12,15 +12,16 @@ #![crate_type = "rlib"] use std::alloc::*; +use std::ptr::NonNull; pub struct A; unsafe impl GlobalAlloc for A { - unsafe fn alloc(&self, _: Layout) -> *mut Opaque { + unsafe fn alloc(&self, _: Layout) -> Result, AllocErr> { loop {} } - unsafe fn dealloc(&self, _ptr: *mut Opaque, _: Layout) { + unsafe fn dealloc(&self, _ptr: NonNull, _: Layout) { loop {} } } diff --git a/src/test/run-pass/allocator/auxiliary/custom.rs b/src/test/run-pass/allocator/auxiliary/custom.rs index 91f70aa83e854..7803be6453526 100644 --- a/src/test/run-pass/allocator/auxiliary/custom.rs +++ b/src/test/run-pass/allocator/auxiliary/custom.rs @@ -13,18 +13,19 @@ #![feature(heap_api, allocator_api)] #![crate_type = "rlib"] -use std::alloc::{GlobalAlloc, System, Layout, Opaque}; +use std::alloc::{AllocErr, GlobalAlloc, System, Layout, Opaque}; +use std::ptr::NonNull; use std::sync::atomic::{AtomicUsize, Ordering}; pub struct A(pub AtomicUsize); unsafe impl GlobalAlloc for A { - unsafe fn alloc(&self, layout: Layout) -> *mut Opaque { + unsafe fn alloc(&self, layout: Layout) -> Result, AllocErr> { self.0.fetch_add(1, Ordering::SeqCst); System.alloc(layout) } - unsafe fn dealloc(&self, ptr: *mut Opaque, layout: Layout) { + unsafe fn dealloc(&self, ptr: NonNull, layout: Layout) { self.0.fetch_add(1, Ordering::SeqCst); System.dealloc(ptr, layout) } diff --git a/src/test/run-pass/allocator/custom.rs b/src/test/run-pass/allocator/custom.rs index 415d39a593e16..47f7cfb99635e 100644 --- a/src/test/run-pass/allocator/custom.rs +++ b/src/test/run-pass/allocator/custom.rs @@ -15,20 +15,21 @@ extern crate helper; -use std::alloc::{self, Global, Alloc, System, Layout, Opaque}; +use std::alloc::{self, Global, Alloc, AllocErr, System, Layout, Opaque}; use std::sync::atomic::{AtomicUsize, Ordering, ATOMIC_USIZE_INIT}; +use std::ptr::NonNull; static HITS: AtomicUsize = ATOMIC_USIZE_INIT; struct A; unsafe impl alloc::GlobalAlloc for A { - unsafe fn alloc(&self, layout: Layout) -> *mut Opaque { + unsafe fn alloc(&self, layout: Layout) -> Result, AllocErr> { HITS.fetch_add(1, Ordering::SeqCst); System.alloc(layout) } - unsafe fn dealloc(&self, ptr: *mut Opaque, layout: Layout) { + unsafe fn dealloc(&self, ptr: NonNull, layout: Layout) { HITS.fetch_add(1, Ordering::SeqCst); System.dealloc(ptr, layout) } diff --git a/src/test/run-pass/allocator/xcrate-use2.rs b/src/test/run-pass/allocator/xcrate-use2.rs index b8e844522dc8b..d18d9bec3f2ad 100644 --- a/src/test/run-pass/allocator/xcrate-use2.rs +++ b/src/test/run-pass/allocator/xcrate-use2.rs @@ -30,21 +30,21 @@ fn main() { let layout = Layout::from_size_align(4, 2).unwrap(); // Global allocator routes to the `custom_as_global` global - let ptr = Global.alloc(layout.clone()); + let ptr = Global.alloc(layout.clone()).unwrap(); helper::work_with(&ptr); assert_eq!(custom_as_global::get(), n + 1); Global.dealloc(ptr, layout.clone()); assert_eq!(custom_as_global::get(), n + 2); // Usage of the system allocator avoids all globals - let ptr = System.alloc(layout.clone()); + let ptr = System.alloc(layout.clone()).unwrap(); helper::work_with(&ptr); assert_eq!(custom_as_global::get(), n + 2); System.dealloc(ptr, layout.clone()); assert_eq!(custom_as_global::get(), n + 2); // Usage of our personal allocator doesn't affect other instances - let ptr = GLOBAL.alloc(layout.clone()); + let ptr = GLOBAL.alloc(layout.clone()).unwrap(); helper::work_with(&ptr); assert_eq!(custom_as_global::get(), n + 2); assert_eq!(GLOBAL.0.load(Ordering::SeqCst), 1); diff --git a/src/test/run-pass/realloc-16687.rs b/src/test/run-pass/realloc-16687.rs index febd249d776af..355053858cc14 100644 --- a/src/test/run-pass/realloc-16687.rs +++ b/src/test/run-pass/realloc-16687.rs @@ -64,7 +64,7 @@ unsafe fn test_triangle() -> bool { println!("deallocate({:?}, {:?}", ptr, layout); } - Global.dealloc(NonNull::new_unchecked(ptr).as_opaque(), layout); + Global.dealloc(NonNull::new_unchecked(ptr).cast(), layout); } unsafe fn reallocate(ptr: *mut u8, old: Layout, new: Layout) -> *mut u8 { @@ -72,7 +72,7 @@ unsafe fn test_triangle() -> bool { println!("reallocate({:?}, old={:?}, new={:?})", ptr, old, new); } - let ret = Global.realloc(NonNull::new_unchecked(ptr).as_opaque(), old, new.size()) + let ret = Global.realloc(NonNull::new_unchecked(ptr).cast(), old, new.size()) .unwrap_or_else(|_| oom(Layout::from_size_align_unchecked(new.size(), old.align()))); if PRINT {