diff --git a/library/alloc/src/rc.rs b/library/alloc/src/rc.rs index 664bc5ffc3441..719f154141b56 100644 --- a/library/alloc/src/rc.rs +++ b/library/alloc/src/rc.rs @@ -460,42 +460,7 @@ impl Rc { where F: FnOnce(&Weak) -> T, { - // Construct the inner in the "uninitialized" state with a single - // weak reference. - let uninit_ptr: NonNull<_> = Box::leak(Box::new(RcBox { - strong: Cell::new(0), - weak: Cell::new(1), - value: mem::MaybeUninit::::uninit(), - })) - .into(); - - let init_ptr: NonNull> = uninit_ptr.cast(); - - let weak = Weak { ptr: init_ptr, alloc: Global }; - - // It's important we don't give up ownership of the weak pointer, or - // else the memory might be freed by the time `data_fn` returns. If - // we really wanted to pass ownership, we could create an additional - // weak pointer for ourselves, but this would result in additional - // updates to the weak reference count which might not be necessary - // otherwise. - let data = data_fn(&weak); - - let strong = unsafe { - let inner = init_ptr.as_ptr(); - ptr::write(ptr::addr_of_mut!((*inner).value), data); - - let prev_value = (*inner).strong.get(); - debug_assert_eq!(prev_value, 0, "No prior strong references should exist"); - (*inner).strong.set(1); - - Rc::from_inner(init_ptr) - }; - - // Strong references should collectively own a shared weak reference, - // so don't run the destructor for our old weak reference. - mem::forget(weak); - strong + Self::new_cyclic_in(data_fn, Global) } /// Constructs a new `Rc` with uninitialized contents. @@ -801,8 +766,6 @@ impl Rc { where F: FnOnce(&Weak) -> T, { - // Note: comments and implementation are copied from Rc::new_cyclic. - // Construct the inner in the "uninitialized" state with a single // weak reference. let (uninit_raw_ptr, alloc) = Box::into_raw_with_allocator(Box::new_in( @@ -836,6 +799,8 @@ impl Rc { // Strong references should collectively own a shared weak reference, // so don't run the destructor for our old weak reference. + // Calling into_raw_with_allocator has the double effect of giving us back the allocator, + // and forgetting the weak reference. let alloc = weak.into_raw_with_allocator().1; Rc::from_inner_in(init_ptr, alloc) diff --git a/library/alloc/src/sync.rs b/library/alloc/src/sync.rs index 4f8039fd1f8b8..496865e303b40 100644 --- a/library/alloc/src/sync.rs +++ b/library/alloc/src/sync.rs @@ -450,54 +450,7 @@ impl Arc { where F: FnOnce(&Weak) -> T, { - // Construct the inner in the "uninitialized" state with a single - // weak reference. - let uninit_ptr: NonNull<_> = Box::leak(Box::new(ArcInner { - strong: atomic::AtomicUsize::new(0), - weak: atomic::AtomicUsize::new(1), - data: mem::MaybeUninit::::uninit(), - })) - .into(); - let init_ptr: NonNull> = uninit_ptr.cast(); - - let weak = Weak { ptr: init_ptr, alloc: Global }; - - // It's important we don't give up ownership of the weak pointer, or - // else the memory might be freed by the time `data_fn` returns. If - // we really wanted to pass ownership, we could create an additional - // weak pointer for ourselves, but this would result in additional - // updates to the weak reference count which might not be necessary - // otherwise. - let data = data_fn(&weak); - - // Now we can properly initialize the inner value and turn our weak - // reference into a strong reference. - let strong = unsafe { - let inner = init_ptr.as_ptr(); - ptr::write(ptr::addr_of_mut!((*inner).data), data); - - // The above write to the data field must be visible to any threads which - // observe a non-zero strong count. Therefore we need at least "Release" ordering - // in order to synchronize with the `compare_exchange_weak` in `Weak::upgrade`. - // - // "Acquire" ordering is not required. When considering the possible behaviours - // of `data_fn` we only need to look at what it could do with a reference to a - // non-upgradeable `Weak`: - // - It can *clone* the `Weak`, increasing the weak reference count. - // - It can drop those clones, decreasing the weak reference count (but never to zero). - // - // These side effects do not impact us in any way, and no other side effects are - // possible with safe code alone. - let prev_value = (*inner).strong.fetch_add(1, Release); - debug_assert_eq!(prev_value, 0, "No prior strong references should exist"); - - Arc::from_inner(init_ptr) - }; - - // Strong references should collectively own a shared weak reference, - // so don't run the destructor for our old weak reference. - mem::forget(weak); - strong + Self::new_cyclic_in(data_fn, Global) } /// Constructs a new `Arc` with uninitialized contents. @@ -821,8 +774,6 @@ impl Arc { where F: FnOnce(&Weak) -> T, { - // Note: these comments and much of the implementation is copied from Arc::new_cyclic. - // Construct the inner in the "uninitialized" state with a single // weak reference. let (uninit_raw_ptr, alloc) = Box::into_raw_with_allocator(Box::new_in( @@ -869,6 +820,8 @@ impl Arc { // Strong references should collectively own a shared weak reference, // so don't run the destructor for our old weak reference. + // Calling into_raw_with_allocator has the double effect of giving us back the allocator, + // and forgetting the weak reference. let alloc = weak.into_raw_with_allocator().1; Arc::from_inner_in(init_ptr, alloc)