Skip to content

Commit e6a3c6e

Browse files
committed
libcore: Make it unsafe to create NonZero and impl Deref.
1 parent 61a737a commit e6a3c6e

File tree

4 files changed

+51
-54
lines changed

4 files changed

+51
-54
lines changed

src/liballoc/arc.rs

Lines changed: 6 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -91,7 +91,7 @@ impl<T: Sync + Send> Arc<T> {
9191
weak: atomic::AtomicUint::new(1),
9292
data: data,
9393
};
94-
Arc { _ptr: NonZero(unsafe { mem::transmute(x) }) }
94+
Arc { _ptr: unsafe { NonZero::new(mem::transmute(x)) } }
9595
}
9696

9797
/// Downgrades a strong pointer to a weak pointer.
@@ -115,8 +115,7 @@ impl<T> Arc<T> {
115115
// `ArcInner` structure itself is `Sync` because the inner data is
116116
// `Sync` as well, so we're ok loaning out an immutable pointer to
117117
// these contents.
118-
let NonZero(ptr) = self._ptr;
119-
unsafe { &*ptr }
118+
unsafe { &**self._ptr }
120119
}
121120
}
122121

@@ -184,8 +183,7 @@ impl<T: Send + Sync + Clone> Arc<T> {
184183
// reference count is guaranteed to be 1 at this point, and we required
185184
// the Arc itself to be `mut`, so we're returning the only possible
186185
// reference to the inner data.
187-
let NonZero(ptr) = self._ptr;
188-
let inner = unsafe { &mut *ptr };
186+
let inner = unsafe { &mut **self._ptr };
189187
&mut inner.data
190188
}
191189
}
@@ -194,7 +192,7 @@ impl<T: Send + Sync + Clone> Arc<T> {
194192
#[experimental = "waiting on stability of Drop"]
195193
impl<T: Sync + Send> Drop for Arc<T> {
196194
fn drop(&mut self) {
197-
let NonZero(ptr) = self._ptr;
195+
let ptr = *self._ptr;
198196
// This structure has #[unsafe_no_drop_flag], so this drop glue may run
199197
// more than once (but it is guaranteed to be zeroed after the first if
200198
// it's run more than once)
@@ -258,8 +256,7 @@ impl<T: Sync + Send> Weak<T> {
258256
#[inline]
259257
fn inner(&self) -> &ArcInner<T> {
260258
// See comments above for why this is "safe"
261-
let NonZero(ptr) = self._ptr;
262-
unsafe { &*ptr }
259+
unsafe { &**self._ptr }
263260
}
264261
}
265262

@@ -277,7 +274,7 @@ impl<T: Sync + Send> Clone for Weak<T> {
277274
#[experimental = "Weak pointers may not belong in this module."]
278275
impl<T: Sync + Send> Drop for Weak<T> {
279276
fn drop(&mut self) {
280-
let NonZero(ptr) = self._ptr;
277+
let ptr = *self._ptr;
281278

282279
// see comments above for why this check is here
283280
if ptr.is_null() { return }

src/liballoc/rc.rs

Lines changed: 8 additions & 21 deletions
Original file line numberDiff line numberDiff line change
@@ -195,7 +195,7 @@ impl<T> Rc<T> {
195195
// destructor never frees the allocation while the
196196
// strong destructor is running, even if the weak
197197
// pointer is stored inside the strong one.
198-
_ptr: NonZero(transmute(box RcBox {
198+
_ptr: NonZero::new(transmute(box RcBox {
199199
value: value,
200200
strong: Cell::new(1),
201201
weak: Cell::new(1)
@@ -280,8 +280,7 @@ pub fn try_unwrap<T>(rc: Rc<T>) -> Result<T, Rc<T>> {
280280
let val = ptr::read(&*rc); // copy the contained object
281281
// destruct the box and skip our Drop
282282
// we can ignore the refcounts because we know we're unique
283-
let NonZero(ptr) = rc._ptr;
284-
deallocate(ptr as *mut u8, size_of::<RcBox<T>>(),
283+
deallocate(*rc._ptr as *mut u8, size_of::<RcBox<T>>(),
285284
min_align_of::<RcBox<T>>());
286285
forget(rc);
287286
Ok(val)
@@ -311,10 +310,7 @@ pub fn try_unwrap<T>(rc: Rc<T>) -> Result<T, Rc<T>> {
311310
#[experimental]
312311
pub fn get_mut<'a, T>(rc: &'a mut Rc<T>) -> Option<&'a mut T> {
313312
if is_unique(rc) {
314-
let inner = unsafe {
315-
let NonZero(ptr) = rc._ptr;
316-
&mut *ptr
317-
};
313+
let inner = unsafe { &mut **rc._ptr };
318314
Some(&mut inner.value)
319315
} else {
320316
None
@@ -347,10 +343,7 @@ impl<T: Clone> Rc<T> {
347343
// reference count is guaranteed to be 1 at this point, and we required
348344
// the `Rc<T>` itself to be `mut`, so we're returning the only possible
349345
// reference to the inner value.
350-
let inner = unsafe {
351-
let NonZero(ptr) = self._ptr;
352-
&mut *ptr
353-
};
346+
let inner = unsafe { &mut **self._ptr };
354347
&mut inner.value
355348
}
356349
}
@@ -392,7 +385,7 @@ impl<T> Drop for Rc<T> {
392385
/// ```
393386
fn drop(&mut self) {
394387
unsafe {
395-
let NonZero(ptr) = self._ptr;
388+
let ptr = *self._ptr;
396389
if !ptr.is_null() {
397390
self.dec_strong();
398391
if self.strong() == 0 {
@@ -675,7 +668,7 @@ impl<T> Drop for Weak<T> {
675668
/// ```
676669
fn drop(&mut self) {
677670
unsafe {
678-
let NonZero(ptr) = self._ptr;
671+
let ptr = *self._ptr;
679672
if !ptr.is_null() {
680673
self.dec_weak();
681674
// the weak count starts at 1, and will only go to
@@ -736,18 +729,12 @@ trait RcBoxPtr<T> {
736729

737730
impl<T> RcBoxPtr<T> for Rc<T> {
738731
#[inline(always)]
739-
fn inner(&self) -> &RcBox<T> {
740-
let NonZero(ptr) = self._ptr;
741-
unsafe { &(*ptr) }
742-
}
732+
fn inner(&self) -> &RcBox<T> { unsafe { &(**self._ptr) } }
743733
}
744734

745735
impl<T> RcBoxPtr<T> for Weak<T> {
746736
#[inline(always)]
747-
fn inner(&self) -> &RcBox<T> {
748-
let NonZero(ptr) = self._ptr;
749-
unsafe { &(*ptr) }
750-
}
737+
fn inner(&self) -> &RcBox<T> { unsafe { &(**self._ptr) } }
751738
}
752739

753740
#[cfg(test)]

src/libcollections/vec.rs

Lines changed: 17 additions & 23 deletions
Original file line numberDiff line numberDiff line change
@@ -147,7 +147,7 @@ impl<T> Vec<T> {
147147
// non-null value which is fine since we never call deallocate on the ptr
148148
// if cap is 0. The reason for this is because the pointer of a slice
149149
// being NULL would break the null pointer optimization for enums.
150-
Vec { ptr: NonZero(EMPTY as *mut T), len: 0, cap: 0 }
150+
Vec { ptr: unsafe { NonZero::new(EMPTY as *mut T) }, len: 0, cap: 0 }
151151
}
152152

153153
/// Constructs a new, empty `Vec` with the specified capacity.
@@ -181,15 +181,15 @@ impl<T> Vec<T> {
181181
#[stable]
182182
pub fn with_capacity(capacity: uint) -> Vec<T> {
183183
if mem::size_of::<T>() == 0 {
184-
Vec { ptr: NonZero(EMPTY as *mut T), len: 0, cap: uint::MAX }
184+
Vec { ptr: unsafe { NonZero::new(EMPTY as *mut T) }, len: 0, cap: uint::MAX }
185185
} else if capacity == 0 {
186186
Vec::new()
187187
} else {
188188
let size = capacity.checked_mul(mem::size_of::<T>())
189189
.expect("capacity overflow");
190190
let ptr = unsafe { allocate(size, mem::min_align_of::<T>()) };
191191
if ptr.is_null() { ::alloc::oom() }
192-
Vec { ptr: NonZero(ptr as *mut T), len: 0, cap: capacity }
192+
Vec { ptr: unsafe { NonZero::new(ptr as *mut T) }, len: 0, cap: capacity }
193193
}
194194
}
195195

@@ -262,7 +262,7 @@ impl<T> Vec<T> {
262262
#[unstable = "needs finalization"]
263263
pub unsafe fn from_raw_parts(ptr: *mut T, length: uint,
264264
capacity: uint) -> Vec<T> {
265-
Vec { ptr: NonZero(ptr), len: length, cap: capacity }
265+
Vec { ptr: NonZero::new(ptr), len: length, cap: capacity }
266266
}
267267

268268
/// Creates a vector by copying the elements from a raw pointer.
@@ -744,24 +744,23 @@ impl<T> Vec<T> {
744744
pub fn shrink_to_fit(&mut self) {
745745
if mem::size_of::<T>() == 0 { return }
746746

747-
let NonZero(ptr) = self.ptr;
748747
if self.len == 0 {
749748
if self.cap != 0 {
750749
unsafe {
751-
dealloc(ptr, self.cap)
750+
dealloc(*self.ptr, self.cap)
752751
}
753752
self.cap = 0;
754753
}
755754
} else {
756755
unsafe {
757756
// Overflow check is unnecessary as the vector is already at
758757
// least this large.
759-
let ptr = reallocate(ptr as *mut u8,
758+
let ptr = reallocate(*self.ptr as *mut u8,
760759
self.cap * mem::size_of::<T>(),
761760
self.len * mem::size_of::<T>(),
762761
mem::min_align_of::<T>()) as *mut T;
763762
if ptr.is_null() { ::alloc::oom() }
764-
self.ptr = NonZero(ptr);
763+
self.ptr = NonZero::new(ptr);
765764
}
766765
self.cap = self.len;
767766
}
@@ -819,10 +818,9 @@ impl<T> Vec<T> {
819818
#[inline]
820819
#[stable]
821820
pub fn as_mut_slice<'a>(&'a mut self) -> &'a mut [T] {
822-
let NonZero(ptr) = self.ptr;
823821
unsafe {
824822
mem::transmute(RawSlice {
825-
data: ptr as *const T,
823+
data: *self.ptr as *const T,
826824
len: self.len,
827825
})
828826
}
@@ -845,7 +843,7 @@ impl<T> Vec<T> {
845843
#[unstable = "matches collection reform specification, waiting for dust to settle"]
846844
pub fn into_iter(self) -> MoveItems<T> {
847845
unsafe {
848-
let NonZero(ptr) = self.ptr;
846+
let ptr = *self.ptr;
849847
let cap = self.cap;
850848
let begin = ptr as *const T;
851849
let end = if mem::size_of::<T>() == 0 {
@@ -1064,16 +1062,15 @@ impl<T> Vec<T> {
10641062
let size = max(old_size, 2 * mem::size_of::<T>()) * 2;
10651063
if old_size > size { panic!("capacity overflow") }
10661064
unsafe {
1067-
let NonZero(ptr) = self.ptr;
1068-
let ptr = alloc_or_realloc(ptr, old_size, size);
1065+
let ptr = alloc_or_realloc(*self.ptr, old_size, size);
10691066
if ptr.is_null() { ::alloc::oom() }
1070-
self.ptr = NonZero(ptr);
1067+
self.ptr = NonZero::new(ptr);
10711068
}
10721069
self.cap = max(self.cap, 2) * 2;
10731070
}
10741071

10751072
unsafe {
1076-
let NonZero(end) = self.ptr.offset(self.len as int);
1073+
let end = *self.ptr.offset(self.len as int);
10771074
ptr::write(&mut *end, value);
10781075
self.len += 1;
10791076
}
@@ -1153,10 +1150,9 @@ impl<T> Vec<T> {
11531150
let size = capacity.checked_mul(mem::size_of::<T>())
11541151
.expect("capacity overflow");
11551152
unsafe {
1156-
let NonZero(ptr) = self.ptr;
1157-
let ptr = alloc_or_realloc(ptr, self.cap * mem::size_of::<T>(), size);
1153+
let ptr = alloc_or_realloc(*self.ptr, self.cap * mem::size_of::<T>(), size);
11581154
if ptr.is_null() { ::alloc::oom() }
1159-
self.ptr = NonZero(ptr);
1155+
self.ptr = NonZero::new(ptr);
11601156
}
11611157
self.cap = capacity;
11621158
}
@@ -1275,10 +1271,9 @@ impl<T> AsSlice<T> for Vec<T> {
12751271
#[inline]
12761272
#[stable]
12771273
fn as_slice<'a>(&'a self) -> &'a [T] {
1278-
let NonZero(ptr) = self.ptr;
12791274
unsafe {
12801275
mem::transmute(RawSlice {
1281-
data: ptr as *const T,
1276+
data: *self.ptr as *const T,
12821277
len: self.len
12831278
})
12841279
}
@@ -1305,8 +1300,7 @@ impl<T> Drop for Vec<T> {
13051300
for x in self.iter() {
13061301
ptr::read(x);
13071302
}
1308-
let NonZero(ptr) = self.ptr;
1309-
dealloc(ptr, self.cap)
1303+
dealloc(*self.ptr, self.cap)
13101304
}
13111305
}
13121306
}
@@ -1342,7 +1336,7 @@ impl<T> MoveItems<T> {
13421336
for _x in self { }
13431337
let MoveItems { allocation, cap, ptr: _ptr, end: _end } = self;
13441338
mem::forget(self);
1345-
Vec { ptr: NonZero(allocation), cap: cap, len: 0 }
1339+
Vec { ptr: NonZero::new(allocation), cap: cap, len: 0 }
13461340
}
13471341
}
13481342

src/libcore/ptr.rs

Lines changed: 20 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -92,6 +92,7 @@ use clone::Clone;
9292
use intrinsics;
9393
use option::Option;
9494
use option::Option::{Some, None};
95+
use ops::Deref;
9596

9697
use cmp::{PartialEq, Eq, PartialOrd, Equiv};
9798
use cmp::Ordering;
@@ -106,7 +107,25 @@ pub use intrinsics::set_memory;
106107
/// NULL or 0 that might allow certain optimizations.
107108
#[lang="non_zero"]
108109
#[deriving(Clone, PartialEq, Eq, PartialOrd)]
109-
pub struct NonZero<T>(pub T);
110+
#[experimental]
111+
pub struct NonZero<T>(T);
112+
113+
impl<T> NonZero<T> {
114+
/// Create an instance of NonZero with the provided value.
115+
/// You must indeed ensure that the value is actually "non-zero".
116+
#[inline(always)]
117+
pub unsafe fn new(inner: T) -> NonZero<T> {
118+
NonZero(inner)
119+
}
120+
}
121+
122+
impl<T> Deref<T> for NonZero<T> {
123+
#[inline]
124+
fn deref<'a>(&'a self) -> &'a T {
125+
let NonZero(ref inner) = *self;
126+
inner
127+
}
128+
}
110129

111130
/// Create a null pointer.
112131
///

0 commit comments

Comments
 (0)