diff --git a/library/alloc/src/boxed.rs b/library/alloc/src/boxed.rs index 2b3a18a439fc9..f9bf8b6682eed 100644 --- a/library/alloc/src/boxed.rs +++ b/library/alloc/src/boxed.rs @@ -1077,6 +1077,36 @@ impl Box { // additional requirements. unsafe { Pin::new_unchecked(boxed) } } + + /// Allocates a box with the requested layout, which may be for a possibly-unsized value where + /// the layout has been determined from a value. + /// + /// The function `mem_to_imbued` is the called to turn the raw memory pointer into a pointer to + /// the type `T`. It's expected that this will add additional fat-pointer metadata. + #[cfg(not(no_global_oom_handling))] + pub(crate) unsafe fn allocate_for_layout( + allocator: &A, + layout: Layout, + mem_to_imbued: impl FnOnce(*mut u8) -> *mut T, + ) -> *mut T { + mem_to_imbued( + allocator.allocate(layout).unwrap_or_else(|_| handle_alloc_error(layout)).as_ptr() + as *mut u8, + ) + } + + /// Allocates a box with sufficient space for the pointee and copy the metadata. + #[unstable(feature = "unwrap_rc_as_box", issue = "none")] + #[cfg(not(no_global_oom_handling))] + #[doc(hidden)] + pub unsafe fn allocate_for_ptr(allocator: &A, ptr: *const T) -> *mut T { + // Allocate for the `ArcInner` using the given value. + unsafe { + Self::allocate_for_layout(allocator, Layout::for_value(&*ptr), |mem| { + ptr.set_ptr_value(mem) as *mut T + }) + } + } } #[stable(feature = "rust1", since = "1.0.0")] diff --git a/library/alloc/src/lib.rs b/library/alloc/src/lib.rs index 2aed9d03bc09d..de4930a2fda63 100644 --- a/library/alloc/src/lib.rs +++ b/library/alloc/src/lib.rs @@ -136,6 +136,7 @@ #![feature(unicode_internals)] #![feature(unsize)] #![feature(unsized_fn_params)] +#![cfg_attr(not(no_global_oom_handling), feature(unwrap_rc_as_box))] #![feature(allocator_internals)] #![feature(slice_partition_dedup)] #![feature(maybe_uninit_extra, maybe_uninit_slice, maybe_uninit_uninit_array)] diff --git a/library/alloc/src/rc.rs b/library/alloc/src/rc.rs index 0814652a5d47d..d8e68350d2535 100644 --- a/library/alloc/src/rc.rs +++ b/library/alloc/src/rc.rs @@ -612,21 +612,10 @@ impl Rc { #[inline] #[stable(feature = "rc_unique", since = "1.4.0")] pub fn try_unwrap(this: Self) -> Result { - if Rc::strong_count(&this) == 1 { - unsafe { - let val = ptr::read(&*this); // copy the contained object - - // Indicate to Weaks that they can't be promoted by decrementing - // the strong count, and then remove the implicit "strong weak" - // pointer while also handling drop logic by just crafting a - // fake Weak. - this.inner().dec_strong(); - let _weak = Weak { ptr: this.ptr }; - forget(this); - Ok(val) - } - } else { - Err(this) + let weak = Self::leak_as_owning_weak(this)?; + unsafe { + let val = ptr::read(weak.as_ptr()); // copy the contained object + Ok(val) } } } @@ -997,6 +986,72 @@ impl Rc { unsafe { mem::drop(Rc::from_raw(ptr)) }; } + /// Reduce the strong count, if the `Rc` has exactly one strong reference. + /// + /// Otherwise, an [`Err`] is returned with the same `Rc` that was passed in. + /// + /// This will succeed even if there are outstanding weak references. + /// + /// After this operation succeeds, no more strong references to the allocation can be created, + /// making the caller the owner of the contained value. This returns a `Weak` that manages the + /// allocation while the caller can (unsafely) take advantage of their ownership. In contrast + /// to `try_unwrap` this also works for unsized pointees. + fn leak_as_owning_weak(this: Self) -> Result, Self> { + if Rc::strong_count(&this) == 1 { + // Indicate to Weaks that they can't be promoted by decrementing + // the strong count, and then produce the implicit "strong weak" + // pointer that is still handling dropping of the allocation. + this.inner().dec_strong(); + let this = mem::ManuallyDrop::new(this); + let weak = Weak { ptr: this.ptr }; + // Return the 'fake weak'. + Ok(weak) + } else { + Err(this) + } + } + + /// Returns the boxed inner value, if the `Rc` has exactly one strong reference. + /// + /// Otherwise, an [`Err`] is returned with the same `Rc` that was + /// passed in. + /// + /// This will succeed even if there are outstanding weak references. + /// + /// # Examples + /// + /// ``` + /// #![feature(unwrap_rc_as_box)] + /// + /// use std::rc::Rc; + /// + /// let x: Rc = Rc::from("Hello, world"); + /// assert!(matches!( + /// Rc::try_unwrap_as_box(x), + /// Ok(b) if &b[..2] == ("He") + /// )); + /// ``` + #[cfg(not(no_global_oom_handling))] + #[unstable(feature = "unwrap_rc_as_box", reason = "recently added", issue = "none")] + pub fn try_unwrap_as_box(this: Self) -> Result, Self> { + let owning_weak = Self::leak_as_owning_weak(this)?; + let src_ptr = owning_weak.as_ptr(); + + unsafe { + // We 'own' this value right now so it is still initialized. + let size = mem::size_of_val(&*src_ptr); + // The raw allocation for our Box—after this we don't panic as otherwise we would leak + // this memory. We can't use MaybeUninit here as that is only valid for sized types. + let raw_box = Box::::allocate_for_ptr(&Global, src_ptr); + + // This is a new allocation so it can not overlap with the one which `owning_weak` is + // still holding onto. + ptr::copy_nonoverlapping(src_ptr as *const u8, raw_box as *mut u8, size); + + Ok(Box::from_raw(raw_box)) + } + } + /// Returns `true` if there are no other `Rc` or [`Weak`] pointers to /// this allocation. #[inline] diff --git a/library/alloc/src/sync.rs b/library/alloc/src/sync.rs index a066e0b49e25c..d51d1ec83da86 100644 --- a/library/alloc/src/sync.rs +++ b/library/alloc/src/sync.rs @@ -617,19 +617,10 @@ impl Arc { #[inline] #[stable(feature = "arc_unique", since = "1.4.0")] pub fn try_unwrap(this: Self) -> Result { - if this.inner().strong.compare_exchange(1, 0, Relaxed, Relaxed).is_err() { - return Err(this); - } - - acquire!(this.inner().strong); + let weak = Self::leak_as_owning_weak(this)?; unsafe { - let elem = ptr::read(&this.ptr.as_ref().data); - - // Make a weak pointer to clean up the implicit strong-weak reference - let _weak = Weak { ptr: this.ptr }; - mem::forget(this); - + let elem = ptr::read(&weak.ptr.as_ref().data); Ok(elem) } } @@ -1047,6 +1038,68 @@ impl Arc { unsafe { mem::drop(Arc::from_raw(ptr)) }; } + /// Reduce the strong count, if this is the last strong reference. + /// + /// When this operation succeeds that no more strong references to the allocation can be + /// created, making this the owner of the contained value. This returns a `Weak` that manages + /// the allocation while the caller can (unsafely) take advantage of their ownership. In + /// contrast to `try_unwrap` this also works for unsized pointees. + fn leak_as_owning_weak(this: Self) -> Result, Self> { + if this.inner().strong.compare_exchange(1, 0, Relaxed, Relaxed).is_err() { + return Err(this); + } + + acquire!(this.inner().strong); + + // At this point we own the pointee. We keep it alive by a Weak reference while having the + // caller handling ownership. This leaks the value but not the allocation, which is + // eventually deallocated via the returned `Weak`. + // The weak pointer also cleans up the implicit strong-weak reference + let this = mem::ManuallyDrop::new(this); + Ok(Weak { ptr: this.ptr }) + } + + /// Returns the boxed inner value, if the `Arc` has exactly one strong reference. + /// + /// Otherwise, an [`Err`] is returned with the same `Arc` that was + /// passed in. + /// + /// This will succeed even if there are outstanding weak references. + /// + /// # Examples + /// + /// ``` + /// #![feature(unwrap_rc_as_box)] + /// + /// use std::sync::Arc; + /// + /// let x: Arc = Arc::from("Hello, world"); + /// assert!(matches!( + /// Arc::try_unwrap_as_box(x), + /// Ok(b) if &b[..2] == ("He") + /// )); + /// ``` + #[cfg(not(no_global_oom_handling))] + #[unstable(feature = "unwrap_rc_as_box", reason = "recently added", issue = "none")] + pub fn try_unwrap_as_box(this: Self) -> Result, Self> { + let owning_weak = Self::leak_as_owning_weak(this)?; + let src_ptr = owning_weak.as_ptr(); + + unsafe { + // We 'own' this value right now so it is still initialized. + let size = mem::size_of_val(&*src_ptr); + // The raw allocation for our Box—after this we don't panic as otherwise we would leak + // this memory. We can't use MaybeUninit here as that is only valid for sized types. + let raw_box = Box::::allocate_for_ptr(&Global, src_ptr); + + // This is a new allocation so it can not overlap with the one which `owning_weak` is + // still holding onto. + ptr::copy_nonoverlapping(src_ptr as *const u8, raw_box as *mut u8, size); + + Ok(Box::from_raw(raw_box)) + } + } + #[inline] fn inner(&self) -> &ArcInner { // This unsafety is ok because while this arc is alive we're guaranteed