Skip to content

Commit 8eee1b4

Browse files
committed
auto merge of #17095 : thestinger/rust/alloc, r=alexcrichton
Previously, some parts of this optimization were impossible because the alignment passed to the free function was not correct. That was fully fixed by #17012. Closes #17092
2 parents 370f8df + 72a92b2 commit 8eee1b4

File tree

2 files changed

+76
-35
lines changed

2 files changed

+76
-35
lines changed

src/liballoc/heap.rs

+75-34
Original file line numberDiff line numberDiff line change
@@ -149,12 +149,24 @@ unsafe fn closure_exchange_malloc(drop_glue: fn(*mut u8), size: uint,
149149
alloc as *mut u8
150150
}
151151

152+
// The minimum alignment guaranteed by the architecture. This value is used to
153+
// add fast paths for low alignment values. In practice, the alignment is a
154+
// constant at the call site and the branch will be optimized out.
155+
#[cfg(target_arch = "arm")]
156+
#[cfg(target_arch = "mips")]
157+
#[cfg(target_arch = "mipsel")]
158+
static MIN_ALIGN: uint = 8;
159+
#[cfg(target_arch = "x86")]
160+
#[cfg(target_arch = "x86_64")]
161+
static MIN_ALIGN: uint = 16;
162+
152163
#[cfg(jemalloc)]
153164
mod imp {
154165
use core::option::{None, Option};
155166
use core::ptr::{RawPtr, mut_null, null};
156167
use core::num::Int;
157168
use libc::{c_char, c_int, c_void, size_t};
169+
use super::MIN_ALIGN;
158170

159171
#[link(name = "jemalloc", kind = "static")]
160172
#[cfg(not(test))]
@@ -166,7 +178,10 @@ mod imp {
166178
flags: c_int) -> *mut c_void;
167179
fn je_xallocx(ptr: *mut c_void, size: size_t, extra: size_t,
168180
flags: c_int) -> size_t;
181+
#[cfg(stage0)]
169182
fn je_dallocx(ptr: *mut c_void, flags: c_int);
183+
#[cfg(not(stage0))]
184+
fn je_sdallocx(ptr: *mut c_void, size: size_t, flags: c_int);
170185
fn je_nallocx(size: size_t, flags: c_int) -> size_t;
171186
fn je_malloc_stats_print(write_cb: Option<extern "C" fn(cbopaque: *mut c_void,
172187
*const c_char)>,
@@ -183,9 +198,15 @@ mod imp {
183198
#[inline(always)]
184199
fn mallocx_align(a: uint) -> c_int { a.trailing_zeros() as c_int }
185200

201+
#[inline(always)]
202+
fn align_to_flags(align: uint) -> c_int {
203+
if align <= MIN_ALIGN { 0 } else { mallocx_align(align) }
204+
}
205+
186206
#[inline]
187207
pub unsafe fn allocate(size: uint, align: uint) -> *mut u8 {
188-
let ptr = je_mallocx(size as size_t, mallocx_align(align)) as *mut u8;
208+
let flags = align_to_flags(align);
209+
let ptr = je_mallocx(size as size_t, flags) as *mut u8;
189210
if ptr.is_null() {
190211
::oom()
191212
}
@@ -195,8 +216,8 @@ mod imp {
195216
#[inline]
196217
pub unsafe fn reallocate(ptr: *mut u8, size: uint, align: uint,
197218
_old_size: uint) -> *mut u8 {
198-
let ptr = je_rallocx(ptr as *mut c_void, size as size_t,
199-
mallocx_align(align)) as *mut u8;
219+
let flags = align_to_flags(align);
220+
let ptr = je_rallocx(ptr as *mut c_void, size as size_t, flags) as *mut u8;
200221
if ptr.is_null() {
201222
::oom()
202223
}
@@ -206,18 +227,28 @@ mod imp {
206227
#[inline]
207228
pub unsafe fn reallocate_inplace(ptr: *mut u8, size: uint, align: uint,
208229
_old_size: uint) -> bool {
209-
je_xallocx(ptr as *mut c_void, size as size_t, 0,
210-
mallocx_align(align)) == size as size_t
230+
let flags = align_to_flags(align);
231+
je_xallocx(ptr as *mut c_void, size as size_t, 0, flags) == size as size_t
211232
}
212233

213234
#[inline]
235+
#[cfg(stage0)]
214236
pub unsafe fn deallocate(ptr: *mut u8, _size: uint, align: uint) {
215-
je_dallocx(ptr as *mut c_void, mallocx_align(align))
237+
let flags = align_to_flags(align);
238+
je_dallocx(ptr as *mut c_void, flags)
239+
}
240+
241+
#[inline]
242+
#[cfg(not(stage0))]
243+
pub unsafe fn deallocate(ptr: *mut u8, size: uint, align: uint) {
244+
let flags = align_to_flags(align);
245+
je_sdallocx(ptr as *mut c_void, size as size_t, flags)
216246
}
217247

218248
#[inline]
219249
pub fn usable_size(size: uint, align: uint) -> uint {
220-
unsafe { je_nallocx(size as size_t, mallocx_align(align)) as uint }
250+
let flags = align_to_flags(align);
251+
unsafe { je_nallocx(size as size_t, flags) as uint }
221252
}
222253

223254
pub fn stats_print() {
@@ -234,6 +265,7 @@ mod imp {
234265
use core::ptr;
235266
use libc;
236267
use libc_heap;
268+
use super::MIN_ALIGN;
237269

238270
extern {
239271
fn posix_memalign(memptr: *mut *mut libc::c_void,
@@ -243,16 +275,7 @@ mod imp {
243275

244276
#[inline]
245277
pub unsafe fn allocate(size: uint, align: uint) -> *mut u8 {
246-
// The posix_memalign manpage states
247-
//
248-
// alignment [...] must be a power of and a multiple of
249-
// sizeof(void *)
250-
//
251-
// The `align` parameter to this function is the *minimum* alignment for
252-
// a block of memory, so we special case everything under `*uint` to
253-
// just pass it to malloc, which is guaranteed to align to at least the
254-
// size of `*uint`.
255-
if align < mem::size_of::<uint>() {
278+
if align <= MIN_ALIGN {
256279
libc_heap::malloc_raw(size)
257280
} else {
258281
let mut out = 0 as *mut libc::c_void;
@@ -269,10 +292,14 @@ mod imp {
269292
#[inline]
270293
pub unsafe fn reallocate(ptr: *mut u8, size: uint, align: uint,
271294
old_size: uint) -> *mut u8 {
272-
let new_ptr = allocate(size, align);
273-
ptr::copy_memory(new_ptr, ptr as *const u8, cmp::min(size, old_size));
274-
deallocate(ptr, old_size, align);
275-
return new_ptr;
295+
if align <= MIN_ALIGN {
296+
libc_heap::realloc_raw(ptr, size)
297+
} else {
298+
let new_ptr = allocate(size, align);
299+
ptr::copy_memory(new_ptr, ptr as *const u8, cmp::min(size, old_size));
300+
deallocate(ptr, old_size, align);
301+
new_ptr
302+
}
276303
}
277304

278305
#[inline]
@@ -291,14 +318,16 @@ mod imp {
291318
size
292319
}
293320

294-
pub fn stats_print() {
295-
}
321+
pub fn stats_print() {}
296322
}
297323

298324
#[cfg(not(jemalloc), windows)]
299325
mod imp {
300326
use libc::{c_void, size_t};
327+
use libc;
328+
use libc_heap;
301329
use core::ptr::RawPtr;
330+
use super::MIN_ALIGN;
302331

303332
extern {
304333
fn _aligned_malloc(size: size_t, align: size_t) -> *mut c_void;
@@ -309,22 +338,30 @@ mod imp {
309338

310339
#[inline]
311340
pub unsafe fn allocate(size: uint, align: uint) -> *mut u8 {
312-
let ptr = _aligned_malloc(size as size_t, align as size_t);
313-
if ptr.is_null() {
314-
::oom();
341+
if align <= MIN_ALIGN {
342+
libc_heap::malloc_raw(size)
343+
} else {
344+
let ptr = _aligned_malloc(size as size_t, align as size_t);
345+
if ptr.is_null() {
346+
::oom();
347+
}
348+
ptr as *mut u8
315349
}
316-
ptr as *mut u8
317350
}
318351

319352
#[inline]
320353
pub unsafe fn reallocate(ptr: *mut u8, size: uint, align: uint,
321354
_old_size: uint) -> *mut u8 {
322-
let ptr = _aligned_realloc(ptr as *mut c_void, size as size_t,
323-
align as size_t);
324-
if ptr.is_null() {
325-
::oom();
355+
if align <= MIN_ALIGN {
356+
libc_heap::realloc_raw(ptr, size)
357+
} else {
358+
let ptr = _aligned_realloc(ptr as *mut c_void, size as size_t,
359+
align as size_t);
360+
if ptr.is_null() {
361+
::oom();
362+
}
363+
ptr as *mut u8
326364
}
327-
ptr as *mut u8
328365
}
329366

330367
#[inline]
@@ -334,8 +371,12 @@ mod imp {
334371
}
335372

336373
#[inline]
337-
pub unsafe fn deallocate(ptr: *mut u8, _size: uint, _align: uint) {
338-
_aligned_free(ptr as *mut c_void)
374+
pub unsafe fn deallocate(ptr: *mut u8, _size: uint, align: uint) {
375+
if align <= MIN_ALIGN {
376+
libc::free(ptr as *mut libc::c_void)
377+
} else {
378+
_aligned_free(ptr as *mut c_void)
379+
}
339380
}
340381

341382
#[inline]

0 commit comments

Comments
 (0)