@@ -82,8 +82,10 @@ use core::intrinsics::drop_in_place;
82
82
use core:: mem;
83
83
use core:: nonzero:: NonZero ;
84
84
use core:: ops:: { Deref , CoerceUnsized } ;
85
+ use core:: ptr;
85
86
use core:: marker:: Unsize ;
86
87
use core:: hash:: { Hash , Hasher } ;
88
+ use core:: usize;
87
89
use heap:: deallocate;
88
90
89
91
/// An atomically reference counted wrapper for shared state.
@@ -156,7 +158,12 @@ impl<T: ?Sized + fmt::Debug> fmt::Debug for Weak<T> {
156
158
157
159
struct ArcInner < T : ?Sized > {
158
160
strong : atomic:: AtomicUsize ,
161
+
162
+ // the value usize::MAX acts as a sentinel for temporarily "locking" the
163
+ // ability to upgrade weak pointers or downgrade strong ones; this is used
164
+ // to avoid races in `make_unique` and `get_mut`.
159
165
weak : atomic:: AtomicUsize ,
166
+
160
167
data : T ,
161
168
}
162
169
@@ -203,9 +210,25 @@ impl<T: ?Sized> Arc<T> {
203
210
#[ unstable( feature = "arc_weak" ,
204
211
reason = "Weak pointers may not belong in this module." ) ]
205
212
pub fn downgrade ( & self ) -> Weak < T > {
206
- // See the clone() impl for why this is relaxed
207
- self . inner ( ) . weak . fetch_add ( 1 , Relaxed ) ;
208
- Weak { _ptr : self . _ptr }
213
+ loop {
214
+ // This Relaaxed is OK because we're checking the value in the CAS
215
+ // below.
216
+ let cur = self . inner ( ) . weak . load ( Relaxed ) ;
217
+
218
+ // check if the weak counter is currently "locked"; if so, spin.
219
+ if cur == usize:: MAX { continue }
220
+
221
+ // NOTE: this code currently ignores the possibility of overflow
222
+ // into usize::MAX; in general both Rc and Arc need to be adjusted
223
+ // to deal with overflow.
224
+
225
+ // Unlike with Clone(), we need this to be an Acquire read to
226
+ // synchronize with the write coming from `is_unique`, so that the
227
+ // events prior to that write happen before this read.
228
+ if self . inner ( ) . weak . compare_and_swap ( cur, cur + 1 , Acquire ) == cur {
229
+ return Weak { _ptr : self . _ptr }
230
+ }
231
+ }
209
232
}
210
233
211
234
/// Get the number of weak references to this value.
@@ -260,51 +283,6 @@ pub fn weak_count<T: ?Sized>(this: &Arc<T>) -> usize { Arc::weak_count(this) }
260
283
#[ deprecated( since = "1.2.0" , reason = "renamed to Arc::strong_count" ) ]
261
284
pub fn strong_count < T : ?Sized > ( this : & Arc < T > ) -> usize { Arc :: strong_count ( this) }
262
285
263
-
264
- /// Returns a mutable reference to the contained value if the `Arc<T>` is unique.
265
- ///
266
- /// Returns `None` if the `Arc<T>` is not unique.
267
- ///
268
- /// This function is marked **unsafe** because it is racy if weak pointers
269
- /// are active.
270
- ///
271
- /// # Examples
272
- ///
273
- /// ```
274
- /// # #![feature(arc_unique, alloc)]
275
- /// extern crate alloc;
276
- /// # fn main() {
277
- /// use alloc::arc::{Arc, get_mut};
278
- ///
279
- /// # unsafe {
280
- /// let mut x = Arc::new(3);
281
- /// *get_mut(&mut x).unwrap() = 4;
282
- /// assert_eq!(*x, 4);
283
- ///
284
- /// let _y = x.clone();
285
- /// assert!(get_mut(&mut x).is_none());
286
- /// # }
287
- /// # }
288
- /// ```
289
- #[ inline]
290
- #[ unstable( feature = "arc_unique" ) ]
291
- #[ deprecated( since = "1.2.0" ,
292
- reason = "this function is unsafe with weak pointers" ) ]
293
- pub unsafe fn get_mut < T : ?Sized > ( this : & mut Arc < T > ) -> Option < & mut T > {
294
- // FIXME(#24880) potential race with upgraded weak pointers here
295
- if Arc :: strong_count ( this) == 1 && Arc :: weak_count ( this) == 0 {
296
- // This unsafety is ok because we're guaranteed that the pointer
297
- // returned is the *only* pointer that will ever be returned to T. Our
298
- // reference count is guaranteed to be 1 at this point, and we required
299
- // the Arc itself to be `mut`, so we're returning the only possible
300
- // reference to the inner data.
301
- let inner = & mut * * this. _ptr ;
302
- Some ( & mut inner. data )
303
- } else {
304
- None
305
- }
306
- }
307
-
308
286
#[ stable( feature = "rust1" , since = "1.0.0" ) ]
309
287
impl < T : ?Sized > Clone for Arc < T > {
310
288
/// Makes a clone of the `Arc<T>`.
@@ -352,44 +330,150 @@ impl<T: Clone> Arc<T> {
352
330
/// Make a mutable reference from the given `Arc<T>`.
353
331
///
354
332
/// This is also referred to as a copy-on-write operation because the inner
355
- /// data is cloned if the reference count is greater than one.
356
- ///
357
- /// This method is marked **unsafe** because it is racy if weak pointers
358
- /// are active.
333
+ /// data is cloned if the (strong) reference count is greater than one. If
334
+ /// we hold the only strong reference, any existing weak references will no
335
+ /// longer be upgradeable.
359
336
///
360
337
/// # Examples
361
338
///
362
339
/// ```
363
340
/// # #![feature(arc_unique)]
364
341
/// use std::sync::Arc;
365
342
///
366
- /// # unsafe {
367
343
/// let mut five = Arc::new(5);
368
344
///
369
- /// let mut_five = five.make_unique();
370
- /// # }
345
+ /// let mut_five = Arc::make_unique(&mut five);
371
346
/// ```
372
347
#[ inline]
373
348
#[ unstable( feature = "arc_unique" ) ]
374
- # [ deprecated ( since = "1.2.0" ,
375
- reason = "this function is unsafe with weak pointers" ) ]
376
- pub unsafe fn make_unique ( & mut self ) -> & mut T {
377
- // FIXME(#24880) potential race with upgraded weak pointers here
349
+ pub fn make_unique ( this : & mut Arc < T > ) -> & mut T {
350
+ // Note that we hold both a strong reference and a weak reference.
351
+ // Thus, releasing our strong reference only will not, by itself, cause
352
+ // the memory to be deallocated.
378
353
//
379
- // Note that we hold a strong reference, which also counts as a weak
380
- // reference, so we only clone if there is an additional reference of
381
- // either kind.
382
- if self . inner ( ) . strong . load ( SeqCst ) != 1 ||
383
- self . inner ( ) . weak . load ( SeqCst ) != 1 {
384
- * self = Arc :: new ( ( * * self ) . clone ( ) )
354
+ // Use Acquire to ensure that we see any writes to `weak` that happen
355
+ // before release writes (i.e., decrements) to `strong`. Since we hold a
356
+ // weak count, there's no chance the ArcInner itself could be
357
+ // deallocated.
358
+ if this. inner ( ) . strong . compare_and_swap ( 1 , 0 , Acquire ) != 1 {
359
+ // Another srong pointer exists; clone
360
+ * this = Arc :: new ( ( * * this) . clone ( ) ) ;
361
+ } else if this. inner ( ) . weak . load ( Relaxed ) != 1 {
362
+ // Relaxed suffices in the above because this is fundamentally an
363
+ // optimization: we are always racing with weak pointers being
364
+ // dropped. Worst case, we end up allocated a new Arc unnecessarily.
365
+
366
+ // We removed the last strong ref, but there are additional weak
367
+ // refs remaining. We'll move the contents to a new Arc, and
368
+ // invalidate the other weak refs.
369
+
370
+ // Note that it is not possible for the read of `weak` to yield
371
+ // usize::MAX (i.e., locked), since the weak count can only be
372
+ // locked by a thread with a strong reference.
373
+
374
+ // Materialize our own implicit weak pointer, so that it can clean
375
+ // up the ArcInner as needed.
376
+ let weak = Weak { _ptr : this. _ptr } ;
377
+
378
+ // mark the data itself as already deallocated
379
+ unsafe {
380
+ // there is no data race in the implicit write caused by `read`
381
+ // here (due to zeroing) because data is no longer accessed by
382
+ // other threads (due to there being no more strong refs at this
383
+ // point).
384
+ let mut swap = Arc :: new ( ptr:: read ( & ( * * weak. _ptr ) . data ) ) ;
385
+ mem:: swap ( this, & mut swap) ;
386
+ mem:: forget ( swap) ;
387
+ }
388
+ } else {
389
+ // We were the sole reference of either kind; bump back up the
390
+ // strong ref count.
391
+ this. inner ( ) . strong . store ( 1 , Release ) ;
385
392
}
393
+
386
394
// As with `get_mut()`, the unsafety is ok because our reference was
387
395
// either unique to begin with, or became one upon cloning the contents.
388
- let inner = & mut * * self . _ptr ;
389
- & mut inner. data
396
+ unsafe {
397
+ let inner = & mut * * this. _ptr ;
398
+ & mut inner. data
399
+ }
390
400
}
391
401
}
392
402
403
+ impl < T : ?Sized > Arc < T > {
404
+ /// Returns a mutable reference to the contained value if the `Arc<T>` is unique.
405
+ ///
406
+ /// Returns `None` if the `Arc<T>` is not unique.
407
+ ///
408
+ /// # Examples
409
+ ///
410
+ /// ```
411
+ /// # #![feature(arc_unique, alloc)]
412
+ /// extern crate alloc;
413
+ /// # fn main() {
414
+ /// use alloc::arc::Arc;
415
+ ///
416
+ /// let mut x = Arc::new(3);
417
+ /// *Arc::get_mut(&mut x).unwrap() = 4;
418
+ /// assert_eq!(*x, 4);
419
+ ///
420
+ /// let _y = x.clone();
421
+ /// assert!(Arc::get_mut(&mut x).is_none());
422
+ /// # }
423
+ /// ```
424
+ #[ inline]
425
+ #[ unstable( feature = "arc_unique" ) ]
426
+ pub fn get_mut ( this : & mut Arc < T > ) -> Option < & mut T > {
427
+ if this. is_unique ( ) {
428
+ // This unsafety is ok because we're guaranteed that the pointer
429
+ // returned is the *only* pointer that will ever be returned to T. Our
430
+ // reference count is guaranteed to be 1 at this point, and we required
431
+ // the Arc itself to be `mut`, so we're returning the only possible
432
+ // reference to the inner data.
433
+ unsafe {
434
+ let inner = & mut * * this. _ptr ;
435
+ Some ( & mut inner. data )
436
+ }
437
+ } else {
438
+ None
439
+ }
440
+ }
441
+
442
+ /// Determine whether this is the unique reference (including weak refs) to
443
+ /// the underlying data.
444
+ ///
445
+ /// Note that this requires locking the weak ref count.
446
+ fn is_unique ( & mut self ) -> bool {
447
+ // lock the weak pointer count if we appear to be the sole weak pointer
448
+ // holder.
449
+ //
450
+ // The acquire label here ensures a happens-before relationship with any
451
+ // writes to `strong` prior to decrements of the `weak` count (via drop,
452
+ // which uses Release).
453
+ if self . inner ( ) . weak . compare_and_swap ( 1 , usize:: MAX , Acquire ) == 1 {
454
+ // Due to the previous acquire read, this will observe any writes to
455
+ // `strong` that were due to upgrading weak pointers; only strong
456
+ // clones remain, which require that the strong count is > 1 anyway.
457
+ let unique = self . inner ( ) . strong . load ( Relaxed ) == 1 ;
458
+
459
+ // The release write here synchronizes with a read in `downgrade`,
460
+ // effectively preventing the above read of `strong` from happening
461
+ // after the write.
462
+ self . inner ( ) . weak . store ( 1 , Release ) ; // release the lock
463
+ unique
464
+ } else {
465
+ false
466
+ }
467
+ }
468
+ }
469
+
470
+ #[ inline]
471
+ #[ unstable( feature = "arc_unique" ) ]
472
+ #[ deprecated( since = "1.2" , reason = "use Arc::get_mut instead" ) ]
473
+ pub fn get_mut < T : ?Sized > ( this : & mut Arc < T > ) -> Option < & mut T > {
474
+ Arc :: get_mut ( this)
475
+ }
476
+
393
477
#[ stable( feature = "rust1" , since = "1.0.0" ) ]
394
478
impl < T : ?Sized > Drop for Arc < T > {
395
479
/// Drops the `Arc<T>`.
@@ -485,9 +569,15 @@ impl<T: ?Sized> Weak<T> {
485
569
// fetch_add because once the count hits 0 it must never be above 0.
486
570
let inner = self . inner ( ) ;
487
571
loop {
488
- let n = inner. strong . load ( SeqCst ) ;
572
+ // Relaxed load because any write of 0 that we can observe
573
+ // leaves the field in a permanently zero state (so a
574
+ // "stale" read of 0 is fine), and any other value is
575
+ // confirmed via the CAS below.
576
+ let n = inner. strong . load ( Relaxed ) ;
489
577
if n == 0 { return None }
490
- let old = inner. strong . compare_and_swap ( n, n + 1 , SeqCst ) ;
578
+
579
+ // Relaxed is valid for the same reason it is on Arc's Clone impl
580
+ let old = inner. strong . compare_and_swap ( n, n + 1 , Relaxed ) ;
491
581
if old == n { return Some ( Arc { _ptr : self . _ptr } ) }
492
582
}
493
583
}
@@ -518,9 +608,12 @@ impl<T: ?Sized> Clone for Weak<T> {
518
608
/// ```
519
609
#[ inline]
520
610
fn clone ( & self ) -> Weak < T > {
521
- // See comments in Arc::clone() for why this is relaxed
611
+ // See comments in Arc::clone() for why this is relaxed. This can use a
612
+ // fetch_add (ignoring the lock) because the weak count is only locked
613
+ // where are *no other* weak pointers in existence. (So we can't be
614
+ // running this code in that case).
522
615
self . inner ( ) . weak . fetch_add ( 1 , Relaxed ) ;
523
- Weak { _ptr : self . _ptr }
616
+ return Weak { _ptr : self . _ptr }
524
617
}
525
618
}
526
619
@@ -563,6 +656,11 @@ impl<T: ?Sized> Drop for Weak<T> {
563
656
// If we find out that we were the last weak pointer, then its time to
564
657
// deallocate the data entirely. See the discussion in Arc::drop() about
565
658
// the memory orderings
659
+ //
660
+ // It's not necessary to check for the locked state here, because the
661
+ // weak count can only be locked if there was precisely one weak ref,
662
+ // meaning that drop could only subsequently run ON that remaining weak
663
+ // ref, which can only happen after the lock is released.
566
664
if self . inner ( ) . weak . fetch_sub ( 1 , Release ) == 1 {
567
665
atomic:: fence ( Acquire ) ;
568
666
unsafe { deallocate ( ptr as * mut u8 ,
@@ -794,13 +892,13 @@ mod tests {
794
892
let mut cow1 = cow0. clone ( ) ;
795
893
let mut cow2 = cow1. clone ( ) ;
796
894
797
- assert ! ( 75 == * cow0 . make_unique( ) ) ;
798
- assert ! ( 75 == * cow1 . make_unique( ) ) ;
799
- assert ! ( 75 == * cow2 . make_unique( ) ) ;
895
+ assert ! ( 75 == * Arc :: make_unique( & mut cow0 ) ) ;
896
+ assert ! ( 75 == * Arc :: make_unique( & mut cow1 ) ) ;
897
+ assert ! ( 75 == * Arc :: make_unique( & mut cow2 ) ) ;
800
898
801
- * cow0 . make_unique ( ) += 1 ;
802
- * cow1 . make_unique ( ) += 2 ;
803
- * cow2 . make_unique ( ) += 3 ;
899
+ * Arc :: make_unique ( & mut cow0 ) += 1 ;
900
+ * Arc :: make_unique ( & mut cow1 ) += 2 ;
901
+ * Arc :: make_unique ( & mut cow2 ) += 3 ;
804
902
805
903
assert ! ( 76 == * cow0) ;
806
904
assert ! ( 77 == * cow1) ;
@@ -824,7 +922,7 @@ mod tests {
824
922
assert ! ( 75 == * cow2) ;
825
923
826
924
unsafe {
827
- * cow0 . make_unique ( ) += 1 ;
925
+ * Arc :: make_unique ( & mut cow0 ) += 1 ;
828
926
}
829
927
830
928
assert ! ( 76 == * cow0) ;
@@ -847,7 +945,7 @@ mod tests {
847
945
assert ! ( 75 == * cow1_weak. upgrade( ) . unwrap( ) ) ;
848
946
849
947
unsafe {
850
- * cow0 . make_unique ( ) += 1 ;
948
+ * Arc :: make_unique ( & mut cow0 ) += 1 ;
851
949
}
852
950
853
951
assert ! ( 76 == * cow0) ;
0 commit comments