Skip to content

Changes to GlobalAlloc #51160

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Closed
wants to merge 4 commits into from
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
23 changes: 10 additions & 13 deletions src/doc/unstable-book/src/language-features/global-allocator.md
Original file line number Diff line number Diff line change
Expand Up @@ -29,17 +29,17 @@ looks like:
```rust
#![feature(global_allocator, allocator_api, heap_api)]

use std::alloc::{GlobalAlloc, System, Layout, Opaque};
use std::alloc::{AllocErr, GlobalAlloc, System, Layout, Opaque};
use std::ptr::NonNull;

struct MyAllocator;

unsafe impl GlobalAlloc for MyAllocator {
unsafe fn alloc(&self, layout: Layout) -> *mut Opaque {
unsafe fn alloc(&self, layout: Layout) -> Result<NonNull<Opaque>, AllocErr> {
System.alloc(layout)
}

unsafe fn dealloc(&self, ptr: *mut Opaque, layout: Layout) {
unsafe fn dealloc(&self, ptr: NonNull<Opaque>, layout: Layout) {
System.dealloc(ptr, layout)
}
}
Expand All @@ -55,18 +55,15 @@ fn main() {
```

And that's it! The `#[global_allocator]` attribute is applied to a `static`
which implements the `Alloc` trait in the `std::alloc` module. Note, though,
that the implementation is defined for `&MyAllocator`, not just `MyAllocator`.
You may wish, however, to also provide `Alloc for MyAllocator` for other use
cases.
which implements the `GlobalAlloc` trait in the `std::alloc` module.

A crate can only have one instance of `#[global_allocator]` and this instance
may be loaded through a dependency. For example `#[global_allocator]` above
could have been placed in one of the dependencies loaded through `extern crate`.

Note that `Alloc` itself is an `unsafe` trait, with much documentation on the
trait itself about usage and for implementors. Extra care should be taken when
implementing a global allocator as well as the allocator may be called from many
portions of the standard library, such as the panicking routine. As a result it
is highly recommended to not panic during allocation and work in as many
situations with as few dependencies as possible as well.
Note that `GlobalAlloc` itself is an `unsafe` trait, with much documentation on
the trait itself about usage and for implementors. Extra care should be taken
when implementing a global allocator as well as the allocator may be called from
many portions of the standard library, such as the panicking routine. As a
result it is highly recommended to not panic during allocation and work in as
many situations with as few dependencies as possible as well.
53 changes: 29 additions & 24 deletions src/liballoc/alloc.rs
Original file line number Diff line number Diff line change
Expand Up @@ -51,38 +51,46 @@ pub const Heap: Global = Global;

unsafe impl GlobalAlloc for Global {
#[inline]
unsafe fn alloc(&self, layout: Layout) -> *mut Opaque {
let ptr = __rust_alloc(layout.size(), layout.align());
ptr as *mut Opaque
unsafe fn alloc(&self, layout: Layout) -> Result<NonNull<Opaque>, AllocErr> {
NonNull::new(__rust_alloc(layout.size(), layout.align()) as *mut Opaque).ok_or(AllocErr)
}

#[inline]
unsafe fn dealloc(&self, ptr: *mut Opaque, layout: Layout) {
__rust_dealloc(ptr as *mut u8, layout.size(), layout.align())
unsafe fn dealloc(&self, ptr: NonNull<Opaque>, layout: Layout) {
__rust_dealloc(ptr.as_ptr() as *mut u8, layout.size(), layout.align())
}

#[inline]
unsafe fn realloc(&self, ptr: *mut Opaque, layout: Layout, new_size: usize) -> *mut Opaque {
let ptr = __rust_realloc(ptr as *mut u8, layout.size(), layout.align(), new_size);
ptr as *mut Opaque
unsafe fn realloc(
&self,
ptr: NonNull<Opaque>,
layout: Layout,
new_size: usize,
) -> Result<NonNull<Opaque>, AllocErr> {
NonNull::new(__rust_realloc(
ptr.as_ptr() as *mut u8,
layout.size(),
layout.align(),
new_size,
) as *mut Opaque).ok_or(AllocErr)
}

#[inline]
unsafe fn alloc_zeroed(&self, layout: Layout) -> *mut Opaque {
let ptr = __rust_alloc_zeroed(layout.size(), layout.align());
ptr as *mut Opaque
unsafe fn alloc_zeroed(&self, layout: Layout) -> Result<NonNull<Opaque>, AllocErr> {
NonNull::new(__rust_alloc_zeroed(layout.size(), layout.align()) as *mut Opaque)
.ok_or(AllocErr)
}
}

unsafe impl Alloc for Global {
#[inline]
unsafe fn alloc(&mut self, layout: Layout) -> Result<NonNull<Opaque>, AllocErr> {
NonNull::new(GlobalAlloc::alloc(self, layout)).ok_or(AllocErr)
GlobalAlloc::alloc(self, layout)
}

#[inline]
unsafe fn dealloc(&mut self, ptr: NonNull<Opaque>, layout: Layout) {
GlobalAlloc::dealloc(self, ptr.as_ptr(), layout)
GlobalAlloc::dealloc(self, ptr, layout)
}

#[inline]
Expand All @@ -92,12 +100,12 @@ unsafe impl Alloc for Global {
new_size: usize)
-> Result<NonNull<Opaque>, AllocErr>
{
NonNull::new(GlobalAlloc::realloc(self, ptr.as_ptr(), layout, new_size)).ok_or(AllocErr)
GlobalAlloc::realloc(self, ptr, layout, new_size)
}

#[inline]
unsafe fn alloc_zeroed(&mut self, layout: Layout) -> Result<NonNull<Opaque>, AllocErr> {
NonNull::new(GlobalAlloc::alloc_zeroed(self, layout)).ok_or(AllocErr)
GlobalAlloc::alloc_zeroed(self, layout)
}
}

Expand All @@ -111,25 +119,22 @@ unsafe fn exchange_malloc(size: usize, align: usize) -> *mut u8 {
align as *mut u8
} else {
let layout = Layout::from_size_align_unchecked(size, align);
let ptr = Global.alloc(layout);
if !ptr.is_null() {
ptr as *mut u8
} else {
oom(layout)
match Global.alloc(layout) {
Ok(p) => p.as_ptr() as *mut u8,
Err(_) => oom(layout),
}
}
}

#[cfg_attr(not(test), lang = "box_free")]
#[inline]
pub(crate) unsafe fn box_free<T: ?Sized>(ptr: Unique<T>) {
let ptr = ptr.as_ptr();
let size = size_of_val(&*ptr);
let align = min_align_of_val(&*ptr);
let size = size_of_val(ptr.as_ref());
let align = min_align_of_val(ptr.as_ref());
// We do not allocate for Box<T> when T is ZST, so deallocation is also not necessary.
if size != 0 {
let layout = Layout::from_size_align_unchecked(size, align);
Global.dealloc(ptr as *mut Opaque, layout);
Global.dealloc(NonNull::from(ptr).cast(), layout);
}
}

Expand Down
6 changes: 3 additions & 3 deletions src/liballoc/arc.rs
Original file line number Diff line number Diff line change
Expand Up @@ -518,7 +518,7 @@ impl<T: ?Sized> Arc<T> {

if self.inner().weak.fetch_sub(1, Release) == 1 {
atomic::fence(Acquire);
Global.dealloc(self.ptr.as_opaque(), Layout::for_value(self.ptr.as_ref()))
Global.dealloc(self.ptr.cast(), Layout::for_value(self.ptr.as_ref()))
}
}

Expand Down Expand Up @@ -638,7 +638,7 @@ impl<T: Clone> ArcFromSlice<T> for Arc<[T]> {
let slice = from_raw_parts_mut(self.elems, self.n_elems);
ptr::drop_in_place(slice);

Global.dealloc(self.mem.as_opaque(), self.layout.clone());
Global.dealloc(self.mem.cast(), self.layout.clone());
}
}
}
Expand Down Expand Up @@ -1157,7 +1157,7 @@ impl<T: ?Sized> Drop for Weak<T> {
if self.inner().weak.fetch_sub(1, Release) == 1 {
atomic::fence(Acquire);
unsafe {
Global.dealloc(self.ptr.as_opaque(), Layout::for_value(self.ptr.as_ref()))
Global.dealloc(self.ptr.cast(), Layout::for_value(self.ptr.as_ref()))
}
}
}
Expand Down
10 changes: 5 additions & 5 deletions src/liballoc/btree/node.rs
Original file line number Diff line number Diff line change
Expand Up @@ -287,7 +287,7 @@ impl<K, V> Root<K, V> {
self.as_mut().as_leaf_mut().parent = ptr::null();

unsafe {
Global.dealloc(NonNull::from(top).as_opaque(), Layout::new::<InternalNode<K, V>>());
Global.dealloc(NonNull::from(top).cast(), Layout::new::<InternalNode<K, V>>());
}
}
}
Expand Down Expand Up @@ -478,7 +478,7 @@ impl<K, V> NodeRef<marker::Owned, K, V, marker::Leaf> {
debug_assert!(!self.is_shared_root());
let node = self.node;
let ret = self.ascend().ok();
Global.dealloc(node.as_opaque(), Layout::new::<LeafNode<K, V>>());
Global.dealloc(node.cast(), Layout::new::<LeafNode<K, V>>());
ret
}
}
Expand All @@ -499,7 +499,7 @@ impl<K, V> NodeRef<marker::Owned, K, V, marker::Internal> {
> {
let node = self.node;
let ret = self.ascend().ok();
Global.dealloc(node.as_opaque(), Layout::new::<InternalNode<K, V>>());
Global.dealloc(node.cast(), Layout::new::<InternalNode<K, V>>());
ret
}
}
Expand Down Expand Up @@ -1321,12 +1321,12 @@ impl<'a, K, V> Handle<NodeRef<marker::Mut<'a>, K, V, marker::Internal>, marker::
}

Global.dealloc(
right_node.node.as_opaque(),
right_node.node.cast(),
Layout::new::<InternalNode<K, V>>(),
);
} else {
Global.dealloc(
right_node.node.as_opaque(),
right_node.node.cast(),
Layout::new::<LeafNode<K, V>>(),
);
}
Expand Down
14 changes: 7 additions & 7 deletions src/liballoc/raw_vec.rs
Original file line number Diff line number Diff line change
Expand Up @@ -93,7 +93,7 @@ impl<T, A: Alloc> RawVec<T, A> {

// handles ZSTs and `cap = 0` alike
let ptr = if alloc_size == 0 {
NonNull::<T>::dangling().as_opaque()
NonNull::<T>::dangling().cast()
} else {
let align = mem::align_of::<T>();
let layout = Layout::from_size_align(alloc_size, align).unwrap();
Expand Down Expand Up @@ -314,7 +314,7 @@ impl<T, A: Alloc> RawVec<T, A> {
let new_cap = 2 * self.cap;
let new_size = new_cap * elem_size;
alloc_guard(new_size).unwrap_or_else(|_| capacity_overflow());
let ptr_res = self.a.realloc(NonNull::from(self.ptr).as_opaque(),
let ptr_res = self.a.realloc(NonNull::from(self.ptr).cast(),
cur,
new_size);
match ptr_res {
Expand Down Expand Up @@ -373,7 +373,7 @@ impl<T, A: Alloc> RawVec<T, A> {
let new_cap = 2 * self.cap;
let new_size = new_cap * elem_size;
alloc_guard(new_size).unwrap_or_else(|_| capacity_overflow());
match self.a.grow_in_place(NonNull::from(self.ptr).as_opaque(), old_layout, new_size) {
match self.a.grow_in_place(NonNull::from(self.ptr).cast(), old_layout, new_size) {
Ok(_) => {
// We can't directly divide `size`.
self.cap = new_cap;
Expand Down Expand Up @@ -546,7 +546,7 @@ impl<T, A: Alloc> RawVec<T, A> {
// FIXME: may crash and burn on over-reserve
alloc_guard(new_layout.size()).unwrap_or_else(|_| capacity_overflow());
match self.a.grow_in_place(
NonNull::from(self.ptr).as_opaque(), old_layout, new_layout.size(),
NonNull::from(self.ptr).cast(), old_layout, new_layout.size(),
) {
Ok(_) => {
self.cap = new_cap;
Expand Down Expand Up @@ -607,7 +607,7 @@ impl<T, A: Alloc> RawVec<T, A> {
let new_size = elem_size * amount;
let align = mem::align_of::<T>();
let old_layout = Layout::from_size_align_unchecked(old_size, align);
match self.a.realloc(NonNull::from(self.ptr).as_opaque(),
match self.a.realloc(NonNull::from(self.ptr).cast(),
old_layout,
new_size) {
Ok(p) => self.ptr = p.cast().into(),
Expand Down Expand Up @@ -667,7 +667,7 @@ impl<T, A: Alloc> RawVec<T, A> {
let res = match self.current_layout() {
Some(layout) => {
debug_assert!(new_layout.align() == layout.align());
self.a.realloc(NonNull::from(self.ptr).as_opaque(), layout, new_layout.size())
self.a.realloc(NonNull::from(self.ptr).cast(), layout, new_layout.size())
}
None => self.a.alloc(new_layout),
};
Expand Down Expand Up @@ -710,7 +710,7 @@ impl<T, A: Alloc> RawVec<T, A> {
let elem_size = mem::size_of::<T>();
if elem_size != 0 {
if let Some(layout) = self.current_layout() {
self.a.dealloc(NonNull::from(self.ptr).as_opaque(), layout);
self.a.dealloc(NonNull::from(self.ptr).cast(), layout);
}
}
}
Expand Down
4 changes: 2 additions & 2 deletions src/liballoc/rc.rs
Original file line number Diff line number Diff line change
Expand Up @@ -845,7 +845,7 @@ unsafe impl<#[may_dangle] T: ?Sized> Drop for Rc<T> {
self.dec_weak();

if self.weak() == 0 {
Global.dealloc(self.ptr.as_opaque(), Layout::for_value(self.ptr.as_ref()));
Global.dealloc(self.ptr.cast(), Layout::for_value(self.ptr.as_ref()));
}
}
}
Expand Down Expand Up @@ -1269,7 +1269,7 @@ impl<T: ?Sized> Drop for Weak<T> {
// the weak count starts at 1, and will only go to zero if all
// the strong pointers have disappeared.
if self.weak() == 0 {
Global.dealloc(self.ptr.as_opaque(), Layout::for_value(self.ptr.as_ref()));
Global.dealloc(self.ptr.cast(), Layout::for_value(self.ptr.as_ref()));
}
}
}
Expand Down
Loading