@@ -82,8 +82,10 @@ use core::intrinsics::drop_in_place;
82
82
use core:: mem;
83
83
use core:: nonzero:: NonZero ;
84
84
use core:: ops:: { Deref , CoerceUnsized } ;
85
+ use core:: ptr;
85
86
use core:: marker:: Unsize ;
86
87
use core:: hash:: { Hash , Hasher } ;
88
+ use core:: usize;
87
89
use heap:: deallocate;
88
90
89
91
/// An atomically reference counted wrapper for shared state.
@@ -154,7 +156,12 @@ impl<T: ?Sized + fmt::Debug> fmt::Debug for Weak<T> {
154
156
155
157
struct ArcInner < T : ?Sized > {
156
158
strong : atomic:: AtomicUsize ,
159
+
160
+ // the value usize::MAX acts as a sentinel for temporarily "locking" the
161
+ // ability to upgrade weak pointers or downgrade strong ones; this is used
162
+ // to avoid races in `make_unique` and `get_mut`.
157
163
weak : atomic:: AtomicUsize ,
164
+
158
165
data : T ,
159
166
}
160
167
@@ -201,9 +208,25 @@ impl<T: ?Sized> Arc<T> {
201
208
#[ unstable( feature = "arc_weak" ,
202
209
reason = "Weak pointers may not belong in this module." ) ]
203
210
pub fn downgrade ( & self ) -> Weak < T > {
204
- // See the clone() impl for why this is relaxed
205
- self . inner ( ) . weak . fetch_add ( 1 , Relaxed ) ;
206
- Weak { _ptr : self . _ptr }
211
+ loop {
212
+ // This Relaaxed is OK because we're checking the value in the CAS
213
+ // below.
214
+ let cur = self . inner ( ) . weak . load ( Relaxed ) ;
215
+
216
+ // check if the weak counter is currently "locked"; if so, spin.
217
+ if cur == usize:: MAX { continue }
218
+
219
+ // NOTE: this code currently ignores the possibility of overflow
220
+ // into usize::MAX; in general both Rc and Arc need to be adjusted
221
+ // to deal with overflow.
222
+
223
+ // Unlike with Clone(), we need this to be an Acquire read to
224
+ // synchronize with the write coming from `is_unique`, so that the
225
+ // events prior to that write happen before this read.
226
+ if self . inner ( ) . weak . compare_and_swap ( cur, cur + 1 , Acquire ) == cur {
227
+ return Weak { _ptr : self . _ptr }
228
+ }
229
+ }
207
230
}
208
231
209
232
/// Get the number of weak references to this value.
@@ -258,51 +281,6 @@ pub fn weak_count<T: ?Sized>(this: &Arc<T>) -> usize { Arc::weak_count(this) }
258
281
#[ deprecated( since = "1.2.0" , reason = "renamed to Arc::strong_count" ) ]
259
282
pub fn strong_count < T : ?Sized > ( this : & Arc < T > ) -> usize { Arc :: strong_count ( this) }
260
283
261
-
262
- /// Returns a mutable reference to the contained value if the `Arc<T>` is unique.
263
- ///
264
- /// Returns `None` if the `Arc<T>` is not unique.
265
- ///
266
- /// This function is marked **unsafe** because it is racy if weak pointers
267
- /// are active.
268
- ///
269
- /// # Examples
270
- ///
271
- /// ```
272
- /// # #![feature(arc_unique, alloc)]
273
- /// extern crate alloc;
274
- /// # fn main() {
275
- /// use alloc::arc::{Arc, get_mut};
276
- ///
277
- /// # unsafe {
278
- /// let mut x = Arc::new(3);
279
- /// *get_mut(&mut x).unwrap() = 4;
280
- /// assert_eq!(*x, 4);
281
- ///
282
- /// let _y = x.clone();
283
- /// assert!(get_mut(&mut x).is_none());
284
- /// # }
285
- /// # }
286
- /// ```
287
- #[ inline]
288
- #[ unstable( feature = "arc_unique" ) ]
289
- #[ deprecated( since = "1.2.0" ,
290
- reason = "this function is unsafe with weak pointers" ) ]
291
- pub unsafe fn get_mut < T : ?Sized > ( this : & mut Arc < T > ) -> Option < & mut T > {
292
- // FIXME(#24880) potential race with upgraded weak pointers here
293
- if Arc :: strong_count ( this) == 1 && Arc :: weak_count ( this) == 0 {
294
- // This unsafety is ok because we're guaranteed that the pointer
295
- // returned is the *only* pointer that will ever be returned to T. Our
296
- // reference count is guaranteed to be 1 at this point, and we required
297
- // the Arc itself to be `mut`, so we're returning the only possible
298
- // reference to the inner data.
299
- let inner = & mut * * this. _ptr ;
300
- Some ( & mut inner. data )
301
- } else {
302
- None
303
- }
304
- }
305
-
306
284
#[ stable( feature = "rust1" , since = "1.0.0" ) ]
307
285
impl < T : ?Sized > Clone for Arc < T > {
308
286
/// Makes a clone of the `Arc<T>`.
@@ -350,44 +328,150 @@ impl<T: Clone> Arc<T> {
350
328
/// Make a mutable reference from the given `Arc<T>`.
351
329
///
352
330
/// This is also referred to as a copy-on-write operation because the inner
353
- /// data is cloned if the reference count is greater than one.
354
- ///
355
- /// This method is marked **unsafe** because it is racy if weak pointers
356
- /// are active.
331
+ /// data is cloned if the (strong) reference count is greater than one. If
332
+ /// we hold the only strong reference, any existing weak references will no
333
+ /// longer be upgradeable.
357
334
///
358
335
/// # Examples
359
336
///
360
337
/// ```
361
338
/// # #![feature(arc_unique)]
362
339
/// use std::sync::Arc;
363
340
///
364
- /// # unsafe {
365
341
/// let mut five = Arc::new(5);
366
342
///
367
- /// let mut_five = five.make_unique();
368
- /// # }
343
+ /// let mut_five = Arc::make_unique(&mut five);
369
344
/// ```
370
345
#[ inline]
371
346
#[ unstable( feature = "arc_unique" ) ]
372
- # [ deprecated ( since = "1.2.0" ,
373
- reason = "this function is unsafe with weak pointers" ) ]
374
- pub unsafe fn make_unique ( & mut self ) -> & mut T {
375
- // FIXME(#24880) potential race with upgraded weak pointers here
347
+ pub fn make_unique ( this : & mut Arc < T > ) -> & mut T {
348
+ // Note that we hold both a strong reference and a weak reference.
349
+ // Thus, releasing our strong reference only will not, by itself, cause
350
+ // the memory to be deallocated.
376
351
//
377
- // Note that we hold a strong reference, which also counts as a weak
378
- // reference, so we only clone if there is an additional reference of
379
- // either kind.
380
- if self . inner ( ) . strong . load ( SeqCst ) != 1 ||
381
- self . inner ( ) . weak . load ( SeqCst ) != 1 {
382
- * self = Arc :: new ( ( * * self ) . clone ( ) )
352
+ // Use Acquire to ensure that we see any writes to `weak` that happen
353
+ // before release writes (i.e., decrements) to `strong`. Since we hold a
354
+ // weak count, there's no chance the ArcInner itself could be
355
+ // deallocated.
356
+ if this. inner ( ) . strong . compare_and_swap ( 1 , 0 , Acquire ) != 1 {
357
+ // Another srong pointer exists; clone
358
+ * this = Arc :: new ( ( * * this) . clone ( ) ) ;
359
+ } else if this. inner ( ) . weak . load ( Relaxed ) != 1 {
360
+ // Relaxed suffices in the above because this is fundamentally an
361
+ // optimization: we are always racing with weak pointers being
362
+ // dropped. Worst case, we end up allocated a new Arc unnecessarily.
363
+
364
+ // We removed the last strong ref, but there are additional weak
365
+ // refs remaining. We'll move the contents to a new Arc, and
366
+ // invalidate the other weak refs.
367
+
368
+ // Note that it is not possible for the read of `weak` to yield
369
+ // usize::MAX (i.e., locked), since the weak count can only be
370
+ // locked by a thread with a strong reference.
371
+
372
+ // Materialize our own implicit weak pointer, so that it can clean
373
+ // up the ArcInner as needed.
374
+ let weak = Weak { _ptr : this. _ptr } ;
375
+
376
+ // mark the data itself as already deallocated
377
+ unsafe {
378
+ // there is no data race in the implicit write caused by `read`
379
+ // here (due to zeroing) because data is no longer accessed by
380
+ // other threads (due to there being no more strong refs at this
381
+ // point).
382
+ let mut swap = Arc :: new ( ptr:: read ( & ( * * weak. _ptr ) . data ) ) ;
383
+ mem:: swap ( this, & mut swap) ;
384
+ mem:: forget ( swap) ;
385
+ }
386
+ } else {
387
+ // We were the sole reference of either kind; bump back up the
388
+ // strong ref count.
389
+ this. inner ( ) . strong . store ( 1 , Release ) ;
383
390
}
391
+
384
392
// As with `get_mut()`, the unsafety is ok because our reference was
385
393
// either unique to begin with, or became one upon cloning the contents.
386
- let inner = & mut * * self . _ptr ;
387
- & mut inner. data
394
+ unsafe {
395
+ let inner = & mut * * this. _ptr ;
396
+ & mut inner. data
397
+ }
388
398
}
389
399
}
390
400
401
+ impl < T : ?Sized > Arc < T > {
402
+ /// Returns a mutable reference to the contained value if the `Arc<T>` is unique.
403
+ ///
404
+ /// Returns `None` if the `Arc<T>` is not unique.
405
+ ///
406
+ /// # Examples
407
+ ///
408
+ /// ```
409
+ /// # #![feature(arc_unique, alloc)]
410
+ /// extern crate alloc;
411
+ /// # fn main() {
412
+ /// use alloc::arc::Arc;
413
+ ///
414
+ /// let mut x = Arc::new(3);
415
+ /// *Arc::get_mut(&mut x).unwrap() = 4;
416
+ /// assert_eq!(*x, 4);
417
+ ///
418
+ /// let _y = x.clone();
419
+ /// assert!(Arc::get_mut(&mut x).is_none());
420
+ /// # }
421
+ /// ```
422
+ #[ inline]
423
+ #[ unstable( feature = "arc_unique" ) ]
424
+ pub fn get_mut ( this : & mut Arc < T > ) -> Option < & mut T > {
425
+ if this. is_unique ( ) {
426
+ // This unsafety is ok because we're guaranteed that the pointer
427
+ // returned is the *only* pointer that will ever be returned to T. Our
428
+ // reference count is guaranteed to be 1 at this point, and we required
429
+ // the Arc itself to be `mut`, so we're returning the only possible
430
+ // reference to the inner data.
431
+ unsafe {
432
+ let inner = & mut * * this. _ptr ;
433
+ Some ( & mut inner. data )
434
+ }
435
+ } else {
436
+ None
437
+ }
438
+ }
439
+
440
+ /// Determine whether this is the unique reference (including weak refs) to
441
+ /// the underlying data.
442
+ ///
443
+ /// Note that this requires locking the weak ref count.
444
+ fn is_unique ( & mut self ) -> bool {
445
+ // lock the weak pointer count if we appear to be the sole weak pointer
446
+ // holder.
447
+ //
448
+ // The acquire label here ensures a happens-before relationship with any
449
+ // writes to `strong` prior to decrements of the `weak` count (via drop,
450
+ // which uses Release).
451
+ if self . inner ( ) . weak . compare_and_swap ( 1 , usize:: MAX , Acquire ) == 1 {
452
+ // Due to the previous acquire read, this will observe any writes to
453
+ // `strong` that were due to upgrading weak pointers; only strong
454
+ // clones remain, which require that the strong count is > 1 anyway.
455
+ let unique = self . inner ( ) . strong . load ( Relaxed ) == 1 ;
456
+
457
+ // The release write here synchronizes with a read in `downgrade`,
458
+ // effectively preventing the above read of `strong` from happening
459
+ // after the write.
460
+ self . inner ( ) . weak . store ( 1 , Release ) ; // release the lock
461
+ unique
462
+ } else {
463
+ false
464
+ }
465
+ }
466
+ }
467
+
468
+ #[ inline]
469
+ #[ unstable( feature = "arc_unique" ) ]
470
+ #[ deprecated( since = "1.2" , reason = "use Arc::get_mut instead" ) ]
471
+ pub fn get_mut < T : ?Sized > ( this : & mut Arc < T > ) -> Option < & mut T > {
472
+ Arc :: get_mut ( this)
473
+ }
474
+
391
475
#[ stable( feature = "rust1" , since = "1.0.0" ) ]
392
476
impl < T : ?Sized > Drop for Arc < T > {
393
477
/// Drops the `Arc<T>`.
@@ -483,9 +567,15 @@ impl<T: ?Sized> Weak<T> {
483
567
// fetch_add because once the count hits 0 it must never be above 0.
484
568
let inner = self . inner ( ) ;
485
569
loop {
486
- let n = inner. strong . load ( SeqCst ) ;
570
+ // Relaxed load because any write of 0 that we can observe
571
+ // leaves the field in a permanently zero state (so a
572
+ // "stale" read of 0 is fine), and any other value is
573
+ // confirmed via the CAS below.
574
+ let n = inner. strong . load ( Relaxed ) ;
487
575
if n == 0 { return None }
488
- let old = inner. strong . compare_and_swap ( n, n + 1 , SeqCst ) ;
576
+
577
+ // Relaxed is valid for the same reason it is on Arc's Clone impl
578
+ let old = inner. strong . compare_and_swap ( n, n + 1 , Relaxed ) ;
489
579
if old == n { return Some ( Arc { _ptr : self . _ptr } ) }
490
580
}
491
581
}
@@ -516,9 +606,12 @@ impl<T: ?Sized> Clone for Weak<T> {
516
606
/// ```
517
607
#[ inline]
518
608
fn clone ( & self ) -> Weak < T > {
519
- // See comments in Arc::clone() for why this is relaxed
609
+ // See comments in Arc::clone() for why this is relaxed. This can use a
610
+ // fetch_add (ignoring the lock) because the weak count is only locked
611
+ // where are *no other* weak pointers in existence. (So we can't be
612
+ // running this code in that case).
520
613
self . inner ( ) . weak . fetch_add ( 1 , Relaxed ) ;
521
- Weak { _ptr : self . _ptr }
614
+ return Weak { _ptr : self . _ptr }
522
615
}
523
616
}
524
617
@@ -561,6 +654,11 @@ impl<T: ?Sized> Drop for Weak<T> {
561
654
// If we find out that we were the last weak pointer, then its time to
562
655
// deallocate the data entirely. See the discussion in Arc::drop() about
563
656
// the memory orderings
657
+ //
658
+ // It's not necessary to check for the locked state here, because the
659
+ // weak count can only be locked if there was precisely one weak ref,
660
+ // meaning that drop could only subsequently run ON that remaining weak
661
+ // ref, which can only happen after the lock is released.
564
662
if self . inner ( ) . weak . fetch_sub ( 1 , Release ) == 1 {
565
663
atomic:: fence ( Acquire ) ;
566
664
unsafe { deallocate ( ptr as * mut u8 ,
@@ -792,13 +890,13 @@ mod tests {
792
890
let mut cow1 = cow0. clone ( ) ;
793
891
let mut cow2 = cow1. clone ( ) ;
794
892
795
- assert ! ( 75 == * cow0 . make_unique( ) ) ;
796
- assert ! ( 75 == * cow1 . make_unique( ) ) ;
797
- assert ! ( 75 == * cow2 . make_unique( ) ) ;
893
+ assert ! ( 75 == * Arc :: make_unique( & mut cow0 ) ) ;
894
+ assert ! ( 75 == * Arc :: make_unique( & mut cow1 ) ) ;
895
+ assert ! ( 75 == * Arc :: make_unique( & mut cow2 ) ) ;
798
896
799
- * cow0 . make_unique ( ) += 1 ;
800
- * cow1 . make_unique ( ) += 2 ;
801
- * cow2 . make_unique ( ) += 3 ;
897
+ * Arc :: make_unique ( & mut cow0 ) += 1 ;
898
+ * Arc :: make_unique ( & mut cow1 ) += 2 ;
899
+ * Arc :: make_unique ( & mut cow2 ) += 3 ;
802
900
803
901
assert ! ( 76 == * cow0) ;
804
902
assert ! ( 77 == * cow1) ;
@@ -822,7 +920,7 @@ mod tests {
822
920
assert ! ( 75 == * cow2) ;
823
921
824
922
unsafe {
825
- * cow0 . make_unique ( ) += 1 ;
923
+ * Arc :: make_unique ( & mut cow0 ) += 1 ;
826
924
}
827
925
828
926
assert ! ( 76 == * cow0) ;
@@ -845,7 +943,7 @@ mod tests {
845
943
assert ! ( 75 == * cow1_weak. upgrade( ) . unwrap( ) ) ;
846
944
847
945
unsafe {
848
- * cow0 . make_unique ( ) += 1 ;
946
+ * Arc :: make_unique ( & mut cow0 ) += 1 ;
849
947
}
850
948
851
949
assert ! ( 76 == * cow0) ;
0 commit comments