From 97b8832c6cd6a8a8b5601ac7dacd4cea35b47694 Mon Sep 17 00:00:00 2001 From: Scott McMurray Date: Sun, 10 Nov 2024 23:59:19 -0800 Subject: [PATCH] `Rc`: store the pointer to the value, rather than to the allocation --- library/alloc/src/lib.rs | 2 + library/alloc/src/rc.rs | 1284 ++++++++--------- library/alloc/src/sync.rs | 5 +- tests/codegen/placement-new.rs | 7 +- tests/codegen/rc-dedup.rs | 52 + ...any_types.PreCodegen.after.panic-abort.mir | 162 +++ ...ny_types.PreCodegen.after.panic-unwind.mir | 178 +++ tests/mir-opt/pre-codegen/clone_rcs.rs | 22 + 8 files changed, 1062 insertions(+), 650 deletions(-) create mode 100644 tests/codegen/rc-dedup.rs create mode 100644 tests/mir-opt/pre-codegen/clone_rcs.clone_rcs_of_many_types.PreCodegen.after.panic-abort.mir create mode 100644 tests/mir-opt/pre-codegen/clone_rcs.clone_rcs_of_many_types.PreCodegen.after.panic-unwind.mir create mode 100644 tests/mir-opt/pre-codegen/clone_rcs.rs diff --git a/library/alloc/src/lib.rs b/library/alloc/src/lib.rs index dd9dfa3f5e26d..4ff6aae66d8cf 100644 --- a/library/alloc/src/lib.rs +++ b/library/alloc/src/lib.rs @@ -104,10 +104,12 @@ #![feature(async_closure)] #![feature(async_fn_traits)] #![feature(async_iterator)] +#![feature(box_as_ptr)] #![feature(box_uninit_write)] #![feature(clone_to_uninit)] #![feature(coerce_unsized)] #![feature(const_align_of_val)] +#![feature(const_alloc_layout)] #![feature(const_box)] #![feature(const_eval_select)] #![feature(const_heap)] diff --git a/library/alloc/src/rc.rs b/library/alloc/src/rc.rs index 3a9bd1b5bf119..8c273ce517a70 100644 --- a/library/alloc/src/rc.rs +++ b/library/alloc/src/rc.rs @@ -251,13 +251,13 @@ use core::intrinsics::abort; #[cfg(not(no_global_oom_handling))] use core::iter; use core::marker::{PhantomData, Unsize}; -use core::mem::{self, ManuallyDrop, align_of_val_raw}; +use core::mem::{self, ManuallyDrop, SizedTypeProperties}; use core::ops::{CoerceUnsized, Deref, DerefMut, DerefPure, DispatchFromDyn, LegacyReceiver}; use core::panic::{RefUnwindSafe, UnwindSafe}; #[cfg(not(no_global_oom_handling))] use core::pin::Pin; use core::pin::PinCoerceUnsized; -use core::ptr::{self, NonNull, drop_in_place}; +use core::ptr::{self, NonNull, Pointee}; #[cfg(not(no_global_oom_handling))] use core::slice::from_raw_parts_mut; use core::{borrow, fmt, hint}; @@ -278,23 +278,134 @@ use crate::vec::Vec; #[cfg(test)] mod tests; -// This is repr(C) to future-proof against possible field-reordering, which -// would interfere with otherwise safe [into|from]_raw() of transmutable -// inner types. -#[repr(C)] -struct RcInner { - strong: Cell, +struct RcCounts { weak: Cell, - value: T, + strong: Cell, } -/// Calculate layout for `RcInner` using the inner value's layout -fn rc_inner_layout_for_value_layout(layout: Layout) -> Layout { - // Calculate layout using the given value layout. - // Previously, layout was calculated on the expression - // `&*(ptr as *const RcInner)`, but this created a misaligned - // reference (see #54908). - Layout::new::>().extend(layout).unwrap().0.pad_to_align() +impl RcCounts { + #[inline] + fn init_for_rc(&self) { + // There is an implicit weak pointer owned by all the strong + // pointers, which ensures that the weak destructor never frees + // the allocation while the strong destructor is running, even + // if the weak pointer is stored inside the strong one. + self.weak.set(1); + self.strong.set(1); + } + + #[inline] + fn init_for_cyclic(&self) { + // There is an implicit weak pointer owned by all the strong + // pointers, which ensures that the weak destructor never frees + // the allocation while the strong destructor is running, even + // if the weak pointer is stored inside the strong one. + self.weak.set(1); + // But for the cyclic initialization, we need to start with a strong + // count of zero so it can't be `upgrade`d during initialization. + self.strong.set(0); + } + + /// Given a pointer to the value in an `Rc` or `Weak`, return a pointer to + /// the `RcCounts` associated with that type. + /// + /// Note that to do anything useful with the returned pointer, + /// the input pointer must have provenance including the counts, + /// and not just have provenance for the value. + /// + /// # Safety + /// + /// Must be a value pointer into an actual `Rc`/`Weak` allocation, + /// so that the pointer manipulation this does remains inside that allocation. + unsafe fn from_value_ptr(ptr: NonNull) -> NonNull { + unsafe { ptr.cast::().sub(1) } + } + + // NOTE: We checked_add here to deal with mem::forget safely. In particular + // if you mem::forget Rcs (or Weaks), the ref-count can overflow, and then + // you can free the allocation while outstanding Rcs (or Weaks) exist. + // We abort because this is such a degenerate scenario that we don't care about + // what happens -- no real program should ever experience this. + // + // This should have negligible overhead since you don't actually need to + // clone these much in Rust thanks to ownership and move-semantics. + + #[inline] + fn strong(&self) -> usize { + self.strong.get() + } + + #[inline] + fn inc_strong(&self) { + let strong = self.strong(); + + // We insert an `assume` here to hint LLVM at an otherwise + // missed optimization. + // SAFETY: The reference count will never be zero when this is + // called. + unsafe { + hint::assert_unchecked(strong != 0); + } + + let strong = strong.wrapping_add(1); + self.strong.set(strong); + + // We want to abort on overflow instead of dropping the value. + // Checking for overflow after the store instead of before + // allows for slightly better code generation. + if core::intrinsics::unlikely(strong == 0) { + abort(); + } + } + + #[inline] + fn dec_strong(&self) -> usize { + let new_strong = self.strong() - 1; + self.strong.set(new_strong); + new_strong + } + + #[inline] + fn weak(&self) -> usize { + self.weak.get() + } + + #[inline] + fn inc_weak(&self) { + let weak = self.weak(); + + // We insert an `assume` here to hint LLVM at an otherwise + // missed optimization. + // SAFETY: The reference count will never be zero when this is + // called. + unsafe { + hint::assert_unchecked(weak != 0); + } + + let weak = weak.wrapping_add(1); + self.weak.set(weak); + + // We want to abort on overflow instead of dropping the value. + // Checking for overflow after the store instead of before + // allows for slightly better code generation. + if core::intrinsics::unlikely(weak == 0) { + abort(); + } + } + + #[inline] + fn dec_weak(&self) -> usize { + let new_weak = self.weak() - 1; + self.weak.set(new_weak); + new_weak + } +} + +const SENTINEL: NonNull<()> = unsafe { NonNull::new_unchecked(ptr::without_provenance_mut(1)) }; + +#[inline] +fn is_sentinel(ptr: NonNull) -> bool { + ptr.cast::<()>() == SENTINEL } /// A single-threaded reference-counting pointer. 'Rc' stands for 'Reference @@ -315,8 +426,24 @@ pub struct Rc< T: ?Sized, #[unstable(feature = "allocator_api", issue = "32838")] A: Allocator = Global, > { - ptr: NonNull>, - phantom: PhantomData>, + /// This is pointing to the *value*. + /// + /// ```text + /// ↓ counts ptr + /// +--------------------------+ + /// | padding | counts | value | + /// +--------------------------+ + /// ↑ allocated ptr ↑ value ptr + /// ``` + /// + /// For small value alignment there will be no padding, but if it's high enough + /// then we have no choice but to have padding in order to have both the + /// counts and the value properly aligned. + /// + /// By putting the padding at the front (if needed), then getting to the + /// counts from the value is always the same offset for all value types. + value_ptr: NonNull, + phantom: PhantomData<(RcCounts, T)>, alloc: A, } @@ -342,40 +469,26 @@ impl, U: ?Sized, A: Allocator> CoerceUnsized> for #[unstable(feature = "dispatch_from_dyn", issue = "none")] impl, U: ?Sized> DispatchFromDyn> for Rc {} -impl Rc { - #[inline] - unsafe fn from_inner(ptr: NonNull>) -> Self { - unsafe { Self::from_inner_in(ptr, Global) } - } - - #[inline] - unsafe fn from_ptr(ptr: *mut RcInner) -> Self { - unsafe { Self::from_inner(NonNull::new_unchecked(ptr)) } - } -} - impl Rc { - #[inline(always)] - fn inner(&self) -> &RcInner { - // This unsafety is ok because while this Rc is alive we're guaranteed - // that the inner pointer is valid. - unsafe { self.ptr.as_ref() } - } - #[inline] - fn into_inner_with_allocator(this: Self) -> (NonNull>, A) { - let this = mem::ManuallyDrop::new(this); - (this.ptr, unsafe { ptr::read(&this.alloc) }) + fn counts(&self) -> &RcCounts { + // SAFETY: while this Rc is alive we're guaranteed that the inner pointer is valid, + // so the pointer math is inbounds and we can give out a reference. + unsafe { + let counts_ptr = RcCounts::from_value_ptr(self.value_ptr); + counts_ptr.as_ref() + } } #[inline] - unsafe fn from_inner_in(ptr: NonNull>, alloc: A) -> Self { - Self { ptr, phantom: PhantomData, alloc } + fn into_value_ptr_with_allocator(this: Self) -> (NonNull, A) { + let this = mem::ManuallyDrop::new(this); + (this.value_ptr, unsafe { ptr::read(&this.alloc) }) } #[inline] - unsafe fn from_ptr_in(ptr: *mut RcInner, alloc: A) -> Self { - unsafe { Self::from_inner_in(NonNull::new_unchecked(ptr), alloc) } + unsafe fn from_value_ptr_in(value_ptr: NonNull, alloc: A) -> Self { + Self { value_ptr, phantom: PhantomData, alloc } } // Non-inlined part of `drop`. @@ -384,13 +497,10 @@ impl Rc { // Reconstruct the "strong weak" pointer and drop it when this // variable goes out of scope. This ensures that the memory is // deallocated even if the destructor of `T` panics. - let _weak = Weak { ptr: self.ptr, alloc: &self.alloc }; + let _weak = Weak { value_ptr_or_sentinel: self.value_ptr, alloc: &self.alloc }; // Destroy the contained object. - // We cannot use `get_mut_unchecked` here, because `self.alloc` is borrowed. - unsafe { - ptr::drop_in_place(&mut (*self.ptr.as_ptr()).value); - } + unsafe { self.value_ptr.drop_in_place() } } } @@ -407,16 +517,7 @@ impl Rc { #[cfg(not(no_global_oom_handling))] #[stable(feature = "rust1", since = "1.0.0")] pub fn new(value: T) -> Rc { - // There is an implicit weak pointer owned by all the strong - // pointers, which ensures that the weak destructor never frees - // the allocation while the strong destructor is running, even - // if the weak pointer is stored inside the strong one. - unsafe { - Self::from_inner( - Box::leak(Box::new(RcInner { strong: Cell::new(1), weak: Cell::new(1), value })) - .into(), - ) - } + Self::new_in(value, Global) } /// Constructs a new `Rc` while giving you a `Weak` to the allocation, @@ -501,13 +602,7 @@ impl Rc { #[stable(feature = "new_uninit", since = "1.82.0")] #[must_use] pub fn new_uninit() -> Rc> { - unsafe { - Rc::from_ptr(Rc::allocate_for_layout( - Layout::new::(), - |layout| Global.allocate(layout), - <*mut u8>::cast, - )) - } + Self::new_uninit_in(Global) } /// Constructs a new `Rc` with uninitialized contents, with the memory @@ -534,13 +629,7 @@ impl Rc { #[unstable(feature = "new_zeroed_alloc", issue = "129396")] #[must_use] pub fn new_zeroed() -> Rc> { - unsafe { - Rc::from_ptr(Rc::allocate_for_layout( - Layout::new::(), - |layout| Global.allocate_zeroed(layout), - <*mut u8>::cast, - )) - } + Self::new_zeroed_in(Global) } /// Constructs a new `Rc`, returning an error if the allocation fails @@ -556,20 +645,7 @@ impl Rc { /// ``` #[unstable(feature = "allocator_api", issue = "32838")] pub fn try_new(value: T) -> Result, AllocError> { - // There is an implicit weak pointer owned by all the strong - // pointers, which ensures that the weak destructor never frees - // the allocation while the strong destructor is running, even - // if the weak pointer is stored inside the strong one. - unsafe { - Ok(Self::from_inner( - Box::leak(Box::try_new(RcInner { - strong: Cell::new(1), - weak: Cell::new(1), - value, - })?) - .into(), - )) - } + Self::try_new_in(value, Global) } /// Constructs a new `Rc` with uninitialized contents, returning an error if the allocation fails @@ -595,13 +671,7 @@ impl Rc { #[unstable(feature = "allocator_api", issue = "32838")] // #[unstable(feature = "new_uninit", issue = "63291")] pub fn try_new_uninit() -> Result>, AllocError> { - unsafe { - Ok(Rc::from_ptr(Rc::try_allocate_for_layout( - Layout::new::(), - |layout| Global.allocate(layout), - <*mut u8>::cast, - )?)) - } + Self::try_new_uninit_in(Global) } /// Constructs a new `Rc` with uninitialized contents, with the memory @@ -628,21 +698,16 @@ impl Rc { #[unstable(feature = "allocator_api", issue = "32838")] //#[unstable(feature = "new_uninit", issue = "63291")] pub fn try_new_zeroed() -> Result>, AllocError> { - unsafe { - Ok(Rc::from_ptr(Rc::try_allocate_for_layout( - Layout::new::(), - |layout| Global.allocate_zeroed(layout), - <*mut u8>::cast, - )?)) - } + Self::try_new_zeroed_in(Global) } + /// Constructs a new `Pin>`. If `T` does not implement `Unpin`, then /// `value` will be pinned in memory and unable to be moved. #[cfg(not(no_global_oom_handling))] #[stable(feature = "pin", since = "1.33.0")] #[must_use] pub fn pin(value: T) -> Pin> { - unsafe { Pin::new_unchecked(Rc::new(value)) } + Self::pin_in(value, Global) } } @@ -662,12 +727,8 @@ impl Rc { #[unstable(feature = "allocator_api", issue = "32838")] #[inline] pub fn new_in(value: T, alloc: A) -> Rc { - // NOTE: Prefer match over unwrap_or_else since closure sometimes not inlineable. - // That would make code size bigger. - match Self::try_new_in(value, alloc) { - Ok(m) => m, - Err(_) => handle_alloc_error(Layout::new::>()), - } + let urc = UniqueRcUninit::new_uninit_in(alloc); + urc.write(value) } /// Constructs a new `Rc` with uninitialized contents in the provided allocator. @@ -697,16 +758,9 @@ impl Rc { // #[unstable(feature = "new_uninit", issue = "63291")] #[inline] pub fn new_uninit_in(alloc: A) -> Rc, A> { - unsafe { - Rc::from_ptr_in( - Rc::allocate_for_layout( - Layout::new::(), - |layout| alloc.allocate(layout), - <*mut u8>::cast, - ), - alloc, - ) - } + let urc = UniqueRcUninit::new_uninit_in(alloc); + // SAFETY: a `MaybeUninit` is always considered initialized + unsafe { urc.into_rc() } } /// Constructs a new `Rc` with uninitialized contents, with the memory @@ -735,16 +789,9 @@ impl Rc { // #[unstable(feature = "new_uninit", issue = "63291")] #[inline] pub fn new_zeroed_in(alloc: A) -> Rc, A> { - unsafe { - Rc::from_ptr_in( - Rc::allocate_for_layout( - Layout::new::(), - |layout| alloc.allocate_zeroed(layout), - <*mut u8>::cast, - ), - alloc, - ) - } + let urc = UniqueRcUninit::new_zeroed_in(alloc); + // SAFETY: a `MaybeUninit` is always considered initialized + unsafe { urc.into_rc() } } /// Constructs a new `Rc` in the given allocator while giving you a `Weak` to the allocation, @@ -782,20 +829,14 @@ impl Rc { where F: FnOnce(&Weak) -> T, { - // Construct the inner in the "uninitialized" state with a single - // weak reference. - let (uninit_raw_ptr, alloc) = Box::into_raw_with_allocator(Box::new_in( - RcInner { - strong: Cell::new(0), - weak: Cell::new(1), - value: mem::MaybeUninit::::uninit(), - }, - alloc, - )); - let uninit_ptr: NonNull<_> = (unsafe { &mut *uninit_raw_ptr }).into(); - let init_ptr: NonNull> = uninit_ptr.cast(); + let value_ptr = unsafe { + let thin_ptr = + polymorphic::allocate_for_layout(T::LAYOUT, |layout| alloc.allocate(layout)); + thin_ptr.cast::() + }; - let weak = Weak { ptr: init_ptr, alloc: alloc }; + let weak = Weak { value_ptr_or_sentinel: value_ptr, alloc: alloc }; + weak.counts().unwrap().init_for_cyclic(); // It's important we don't give up ownership of the weak pointer, or // else the memory might be freed by the time `data_fn` returns. If @@ -806,20 +847,19 @@ impl Rc { let data = data_fn(&weak); let strong = unsafe { - let inner = init_ptr.as_ptr(); - ptr::write(&raw mut (*inner).value, data); + value_ptr.write(data); - let prev_value = (*inner).strong.get(); - debug_assert_eq!(prev_value, 0, "No prior strong references should exist"); - (*inner).strong.set(1); + let counts = weak.counts().unwrap(); + debug_assert_eq!(counts.strong(), 0, "No prior strong references should exist"); + // Do *not* use `inc_strong` here, as that assumes the previous count is non-zero + counts.strong.set(1); // Strong references should collectively own a shared weak reference, // so don't run the destructor for our old weak reference. // Calling into_raw_with_allocator has the double effect of giving us back the allocator, // and forgetting the weak reference. let alloc = weak.into_raw_with_allocator().1; - - Rc::from_inner_in(init_ptr, alloc) + Self::from_value_ptr_in(value_ptr, alloc) }; strong @@ -841,15 +881,8 @@ impl Rc { #[unstable(feature = "allocator_api", issue = "32838")] #[inline] pub fn try_new_in(value: T, alloc: A) -> Result { - // There is an implicit weak pointer owned by all the strong - // pointers, which ensures that the weak destructor never frees - // the allocation while the strong destructor is running, even - // if the weak pointer is stored inside the strong one. - let (ptr, alloc) = Box::into_unique(Box::try_new_in( - RcInner { strong: Cell::new(1), weak: Cell::new(1), value }, - alloc, - )?); - Ok(unsafe { Self::from_inner_in(ptr.into(), alloc) }) + let urc = UniqueRcUninit::try_new_uninit_in(alloc)?; + Ok(urc.write(value)) } /// Constructs a new `Rc` with uninitialized contents, in the provided allocator, returning an @@ -880,16 +913,9 @@ impl Rc { // #[unstable(feature = "new_uninit", issue = "63291")] #[inline] pub fn try_new_uninit_in(alloc: A) -> Result, A>, AllocError> { - unsafe { - Ok(Rc::from_ptr_in( - Rc::try_allocate_for_layout( - Layout::new::(), - |layout| alloc.allocate(layout), - <*mut u8>::cast, - )?, - alloc, - )) - } + let urc = UniqueRcUninit::try_new_uninit_in(alloc)?; + // SAFETY: a `MaybeUninit` is always considered initialized + Ok(unsafe { urc.into_rc() }) } /// Constructs a new `Rc` with uninitialized contents, with the memory @@ -919,16 +945,9 @@ impl Rc { //#[unstable(feature = "new_uninit", issue = "63291")] #[inline] pub fn try_new_zeroed_in(alloc: A) -> Result, A>, AllocError> { - unsafe { - Ok(Rc::from_ptr_in( - Rc::try_allocate_for_layout( - Layout::new::(), - |layout| alloc.allocate_zeroed(layout), - <*mut u8>::cast, - )?, - alloc, - )) - } + let urc = UniqueRcUninit::try_new_zeroed_in(alloc)?; + // SAFETY: a `MaybeUninit` is always considered initialized + Ok(unsafe { urc.into_rc() }) } /// Constructs a new `Pin>` in the provided allocator. If `T` does not implement `Unpin`, then @@ -975,8 +994,8 @@ impl Rc { // the strong count, and then remove the implicit "strong weak" // pointer while also handling drop logic by just crafting a // fake Weak. - this.inner().dec_strong(); - let _weak = Weak { ptr: this.ptr, alloc }; + this.counts().dec_strong(); + let _weak = Weak { value_ptr_or_sentinel: this.value_ptr, alloc }; Ok(val) } else { Err(this) @@ -1046,7 +1065,7 @@ impl Rc<[T]> { #[stable(feature = "new_uninit", since = "1.82.0")] #[must_use] pub fn new_uninit_slice(len: usize) -> Rc<[mem::MaybeUninit]> { - unsafe { Rc::from_ptr(Rc::allocate_for_slice(len)) } + Self::new_uninit_slice_in(len, Global) } /// Constructs a new reference-counted slice with uninitialized contents, with the memory being @@ -1073,16 +1092,7 @@ impl Rc<[T]> { #[unstable(feature = "new_zeroed_alloc", issue = "129396")] #[must_use] pub fn new_zeroed_slice(len: usize) -> Rc<[mem::MaybeUninit]> { - unsafe { - Rc::from_ptr(Rc::allocate_for_layout( - Layout::array::(len).unwrap(), - |layout| Global.allocate_zeroed(layout), - |mem| { - ptr::slice_from_raw_parts_mut(mem.cast::(), len) - as *mut RcInner<[mem::MaybeUninit]> - }, - )) - } + Self::new_zeroed_slice_in(len, Global) } } @@ -1116,7 +1126,9 @@ impl Rc<[T], A> { // #[unstable(feature = "new_uninit", issue = "63291")] #[inline] pub fn new_uninit_slice_in(len: usize, alloc: A) -> Rc<[mem::MaybeUninit], A> { - unsafe { Rc::from_ptr_in(Rc::allocate_for_slice_in(len, &alloc), alloc) } + let urc = UniqueRcUninit::new_uninit_slice_in(len, alloc); + // SAFETY: a `MaybeUninit` is always considered initialized + unsafe { urc.into_rc() } } /// Constructs a new reference-counted slice with uninitialized contents, with the memory being @@ -1145,19 +1157,14 @@ impl Rc<[T], A> { // #[unstable(feature = "new_uninit", issue = "63291")] #[inline] pub fn new_zeroed_slice_in(len: usize, alloc: A) -> Rc<[mem::MaybeUninit], A> { - unsafe { - Rc::from_ptr_in( - Rc::allocate_for_layout( - Layout::array::(len).unwrap(), - |layout| alloc.allocate_zeroed(layout), - |mem| { - ptr::slice_from_raw_parts_mut(mem.cast::(), len) - as *mut RcInner<[mem::MaybeUninit]> - }, - ), - alloc, - ) - } + let urc = UniqueRcUninit::allocate_unsized_in( + Layout::array::(len).unwrap(), + len, + alloc, + Allocator::allocate_zeroed, + ); + // SAFETY: a `MaybeUninit` is always considered initialized + unsafe { urc.into_rc() } } } @@ -1193,8 +1200,8 @@ impl Rc, A> { #[stable(feature = "new_uninit", since = "1.82.0")] #[inline] pub unsafe fn assume_init(self) -> Rc { - let (ptr, alloc) = Rc::into_inner_with_allocator(self); - unsafe { Rc::from_inner_in(ptr.cast(), alloc) } + let (ptr, alloc) = Rc::into_value_ptr_with_allocator(self); + unsafe { Rc::from_value_ptr_in(ptr.cast(), alloc) } } } @@ -1233,8 +1240,12 @@ impl Rc<[mem::MaybeUninit], A> { #[stable(feature = "new_uninit", since = "1.82.0")] #[inline] pub unsafe fn assume_init(self) -> Rc<[T], A> { - let (ptr, alloc) = Rc::into_inner_with_allocator(self); - unsafe { Rc::from_ptr_in(ptr.as_ptr() as _, alloc) } + let (ptr, alloc) = Rc::into_value_ptr_with_allocator(self); + // Have to manually cast the `NonNull` to preserve metadata + let data: NonNull> = ptr.as_non_null_ptr(); + let len = ptr.len(); + let value_ptr = NonNull::slice_from_raw_parts(data.cast::(), len); + unsafe { Rc::from_value_ptr_in(value_ptr, alloc) } } } @@ -1457,12 +1468,7 @@ impl Rc { #[stable(feature = "weak_into_raw", since = "1.45.0")] #[rustc_never_returns_null_ptr] pub fn as_ptr(this: &Self) -> *const T { - let ptr: *mut RcInner = NonNull::as_ptr(this.ptr); - - // SAFETY: This cannot go through Deref::deref or Rc::inner because - // this is required to retain raw/mut provenance such that e.g. `get_mut` can - // write through the pointer after the Rc is recovered through `from_raw`. - unsafe { &raw mut (*ptr).value } + this.value_ptr.as_ptr() } /// Constructs an `Rc` from a raw pointer in the provided allocator. @@ -1534,12 +1540,11 @@ impl Rc { /// ``` #[unstable(feature = "allocator_api", issue = "32838")] pub unsafe fn from_raw_in(ptr: *const T, alloc: A) -> Self { - let offset = unsafe { data_offset(ptr) }; - - // Reverse the offset to find the original RcInner. - let rc_ptr = unsafe { ptr.byte_sub(offset) as *mut RcInner }; - - unsafe { Self::from_ptr_in(rc_ptr, alloc) } + // SAFETY: the pointer came from `into_raw` originally. + unsafe { + let value_ptr = NonNull::::new_unchecked(ptr.cast_mut()); + Self::from_value_ptr_in(value_ptr, alloc) + } } /// Creates a new [`Weak`] pointer to this allocation. @@ -1560,10 +1565,10 @@ impl Rc { where A: Clone, { - this.inner().inc_weak(); + this.counts().inc_weak(); // Make sure we do not create a dangling Weak - debug_assert!(!is_dangling(this.ptr.as_ptr())); - Weak { ptr: this.ptr, alloc: this.alloc.clone() } + debug_assert!(!is_sentinel(this.value_ptr)); + Weak { value_ptr_or_sentinel: this.value_ptr, alloc: this.alloc.clone() } } /// Gets the number of [`Weak`] pointers to this allocation. @@ -1581,7 +1586,7 @@ impl Rc { #[inline] #[stable(feature = "rc_counts", since = "1.15.0")] pub fn weak_count(this: &Self) -> usize { - this.inner().weak() - 1 + this.counts().weak() - 1 } /// Gets the number of strong (`Rc`) pointers to this allocation. @@ -1599,7 +1604,7 @@ impl Rc { #[inline] #[stable(feature = "rc_counts", since = "1.15.0")] pub fn strong_count(this: &Self) -> usize { - this.inner().strong() + this.counts().strong() } /// Increments the strong reference count on the `Rc` associated with the @@ -1783,7 +1788,7 @@ impl Rc { pub unsafe fn get_mut_unchecked(this: &mut Self) -> &mut T { // We are careful to *not* create a reference covering the "count" fields, as // this would conflict with accesses to the reference counts (e.g. by `Weak`). - unsafe { &mut (*this.ptr.as_ptr()).value } + unsafe { this.value_ptr.as_mut() } } #[inline] @@ -1804,7 +1809,7 @@ impl Rc { /// assert!(!Rc::ptr_eq(&five, &other_five)); /// ``` pub fn ptr_eq(this: &Self, other: &Self) -> bool { - ptr::addr_eq(this.ptr.as_ptr(), other.ptr.as_ptr()) + ptr::addr_eq(this.value_ptr.as_ptr(), other.value_ptr.as_ptr()) } } @@ -1863,20 +1868,24 @@ impl Rc { #[inline] #[stable(feature = "rc_unique", since = "1.4.0")] pub fn make_mut(this: &mut Self) -> &mut T { - let size_of_val = size_of_val::(&**this); + let this_data_ref: &T = &**this; + let value_layout = Layout::for_value::(this_data_ref); + + // `in_progress` drops the allocation if we panic before finishing initializing it. + let in_progress: UniqueRcUninit = UniqueRcUninit::allocate_unsized_in( + value_layout, + ptr::metadata(this_data_ref), + this.alloc.clone(), + Allocator::allocate, + ); if Rc::strong_count(this) != 1 { // Gotta clone the data, there are other Rcs. - let this_data_ref: &T = &**this; - // `in_progress` drops the allocation if we panic before finishing initializing it. - let mut in_progress: UniqueRcUninit = - UniqueRcUninit::new(this_data_ref, this.alloc.clone()); - // Initialize with clone of this. let initialized_clone = unsafe { // Clone. If the clone panics, `in_progress` will be dropped and clean up. - this_data_ref.clone_to_uninit(in_progress.data_ptr().cast()); + this_data_ref.clone_to_uninit(in_progress.value_ptr.as_ptr().cast()); // Cast type of pointer, now that it is initialized. in_progress.into_rc() }; @@ -1888,22 +1897,20 @@ impl Rc { // We don't need panic-protection like the above branch does, but we might as well // use the same mechanism. - let mut in_progress: UniqueRcUninit = - UniqueRcUninit::new(&**this, this.alloc.clone()); unsafe { // Initialize `in_progress` with move of **this. // We have to express this in terms of bytes because `T: ?Sized`; there is no // operation that just copies a value based on its `size_of_val()`. ptr::copy_nonoverlapping( ptr::from_ref(&**this).cast::(), - in_progress.data_ptr().cast::(), - size_of_val, + in_progress.value_ptr.as_ptr().cast::(), + value_layout.size(), ); - this.inner().dec_strong(); + this.counts().dec_strong(); // Remove implicit strong-weak ref (no need to craft a fake // Weak here -- we know other Weaks can clean up for us) - this.inner().dec_weak(); + this.counts().dec_weak(); // Replace `this` with newly constructed Rc that has the moved data. ptr::write(this, in_progress.into_rc()); } @@ -1913,7 +1920,7 @@ impl Rc { // reference count is guaranteed to be 1 at this point, and we required // the `Rc` itself to be `mut`, so we're returning the only possible // reference to the allocation. - unsafe { &mut this.ptr.as_mut().value } + unsafe { this.value_ptr.as_mut() } } } @@ -1977,8 +1984,8 @@ impl Rc { pub fn downcast(self) -> Result, Self> { if (*self).is::() { unsafe { - let (ptr, alloc) = Rc::into_inner_with_allocator(self); - Ok(Rc::from_inner_in(ptr.cast(), alloc)) + let (ptr, alloc) = Rc::into_value_ptr_with_allocator(self); + Ok(Rc::from_value_ptr_in(ptr.cast(), alloc)) } } else { Err(self) @@ -2015,123 +2022,100 @@ impl Rc { #[unstable(feature = "downcast_unchecked", issue = "90850")] pub unsafe fn downcast_unchecked(self) -> Rc { unsafe { - let (ptr, alloc) = Rc::into_inner_with_allocator(self); - Rc::from_inner_in(ptr.cast(), alloc) + let (ptr, alloc) = Rc::into_value_ptr_with_allocator(self); + Rc::from_value_ptr_in(ptr.cast(), alloc) } } } -impl Rc { - /// Allocates an `RcInner` with sufficient space for - /// a possibly-unsized inner value where the value has the layout provided. - /// - /// The function `mem_to_rc_inner` is called with the data pointer - /// and must return back a (potentially fat)-pointer for the `RcInner`. +impl Rc { #[cfg(not(no_global_oom_handling))] - unsafe fn allocate_for_layout( - value_layout: Layout, - allocate: impl FnOnce(Layout) -> Result, AllocError>, - mem_to_rc_inner: impl FnOnce(*mut u8) -> *mut RcInner, - ) -> *mut RcInner { - let layout = rc_inner_layout_for_value_layout(value_layout); - unsafe { - Rc::try_allocate_for_layout(value_layout, allocate, mem_to_rc_inner) - .unwrap_or_else(|_| handle_alloc_error(layout)) - } - } + fn from_box_in(src: Box) -> Rc { + let value_layout = Layout::for_value::(&src); + let value_metadata = ptr::metadata::(&*src); - /// Allocates an `RcInner` with sufficient space for - /// a possibly-unsized inner value where the value has the layout provided, - /// returning an error if allocation fails. - /// - /// The function `mem_to_rc_inner` is called with the data pointer - /// and must return back a (potentially fat)-pointer for the `RcInner`. - #[inline] - unsafe fn try_allocate_for_layout( - value_layout: Layout, - allocate: impl FnOnce(Layout) -> Result, AllocError>, - mem_to_rc_inner: impl FnOnce(*mut u8) -> *mut RcInner, - ) -> Result<*mut RcInner, AllocError> { - let layout = rc_inner_layout_for_value_layout(value_layout); + // This is not a good way but something was broken... - // Allocate for the layout. - let ptr = allocate(layout)?; + let (raw, alloc) = Box::into_raw_with_allocator(src); + let src = unsafe { Box::from_raw_in(raw, &alloc) }; - // Initialize the RcInner - let inner = mem_to_rc_inner(ptr.as_non_null_ptr().as_ptr()); - unsafe { - debug_assert_eq!(Layout::for_value_raw(inner), layout); + let rc = unsafe { + let uru = UniqueRcUninit::::allocate_unsized_in( + value_layout, + value_metadata, + &alloc, + Allocator::allocate, + ); + uru.value_ptr + .as_ptr() + .cast::() + .copy_from_nonoverlapping(Box::as_ptr(&src).cast::(), value_layout.size()); + uru.into_rc() + }; - (&raw mut (*inner).strong).write(Cell::new(1)); - (&raw mut (*inner).weak).write(Cell::new(1)); - } + drop(src); - Ok(inner) - } -} + let (ptr, _) = Rc::into_value_ptr_with_allocator(rc); + unsafe { Rc::from_value_ptr_in(ptr, alloc) } -impl Rc { - /// Allocates an `RcInner` with sufficient space for an unsized inner value - #[cfg(not(no_global_oom_handling))] - unsafe fn allocate_for_ptr_in(ptr: *const T, alloc: &A) -> *mut RcInner { - // Allocate for the `RcInner` using the given value. - unsafe { - Rc::::allocate_for_layout( - Layout::for_value_raw(ptr), - |layout| alloc.allocate(layout), - |mem| mem.with_metadata_of(ptr as *const RcInner), - ) - } - } + //asdf + //todo!() - #[cfg(not(no_global_oom_handling))] - fn from_box_in(src: Box) -> Rc { + /* + + // We need to do a few contortions here because we can't clone `A`, and thus + // can't construct an `Rc` (or `UniqueRcUninit`) as early as we'd like. + + let value_ref: &T = &src; + let value_layout = Layout::for_value::(value_ref); + let value_metadata = ptr::metadata(value_ref); + let alloc_ref = Box::allocator(&src); unsafe { - let value_size = size_of_val(&*src); - let ptr = Self::allocate_for_ptr_in(&*src, Box::allocator(&src)); + // First allocate -- the box's destructor will cleanup if this throws. + let thin_ptr = + polymorphic::allocate_for_layout(value_layout, |layout| alloc_ref.allocate(layout)); - // Copy value as bytes + // Forget the box and copy its value as bytes into the new allocation. + // Neither of these run user code, so cannot panic. + let (bptr, alloc) = Box::into_raw_with_allocator(src); ptr::copy_nonoverlapping( - (&raw const *src) as *const u8, - (&raw mut (*ptr).value) as *mut u8, - value_size, + bptr.cast_const().cast::(), + thin_ptr.as_ptr(), + value_layout.size(), ); - // Free the allocation without dropping its contents - let (bptr, alloc) = Box::into_raw_with_allocator(src); - let src = Box::from_raw_in(bptr as *mut mem::ManuallyDrop, alloc.by_ref()); - drop(src); + // Build the new objects to properly track resources, which cannot panic. + let value_ptr = NonNull::from_raw_parts(thin_ptr, value_metadata); + let rc = Rc::from_value_ptr_in(value_ptr, alloc); + let mu_box = Box::from_raw_in(bptr as *mut mem::ManuallyDrop, &rc.alloc); - Self::from_ptr_in(ptr, alloc) - } - } -} + // Drop the box, which shouldn't panic but could, now that the Rc + // exists to properly drop its value and allocation + drop(mu_box); -impl Rc<[T]> { - /// Allocates an `RcInner<[T]>` with the given length. - #[cfg(not(no_global_oom_handling))] - unsafe fn allocate_for_slice(len: usize) -> *mut RcInner<[T]> { - unsafe { - Self::allocate_for_layout( - Layout::array::(len).unwrap(), - |layout| Global.allocate(layout), - |mem| ptr::slice_from_raw_parts_mut(mem.cast::(), len) as *mut RcInner<[T]>, - ) + rc } + + */ } +} +impl Rc<[T], A> { /// Copy elements from slice into newly allocated `Rc<[T]>` /// /// Unsafe because the caller must either take ownership or bind `T: Copy` #[cfg(not(no_global_oom_handling))] - unsafe fn copy_from_slice(v: &[T]) -> Rc<[T]> { + unsafe fn copy_from_slice_in(v: &[T], alloc: A) -> Self { + let len = v.len(); + let uru = UniqueRcUninit::<[T], A>::new_uninit_slice_in(len, alloc); unsafe { - let ptr = Self::allocate_for_slice(v.len()); - ptr::copy_nonoverlapping(v.as_ptr(), (&raw mut (*ptr).value) as *mut T, v.len()); - Self::from_ptr(ptr) + uru.value_ptr.as_non_null_ptr().as_ptr().copy_from_nonoverlapping(v.as_ptr(), len); + uru.into_rc() } } +} +impl Rc<[T]> { /// Constructs an `Rc<[T]>` from an iterator known to be of a certain size. /// /// Behavior is undefined should the size be wrong. @@ -2141,9 +2125,7 @@ impl Rc<[T]> { // In the event of a panic, elements that have been written // into the new RcInner will be dropped, then the memory freed. struct Guard { - mem: NonNull, elems: *mut T, - layout: Layout, n_elems: usize, } @@ -2152,47 +2134,26 @@ impl Rc<[T]> { unsafe { let slice = from_raw_parts_mut(self.elems, self.n_elems); ptr::drop_in_place(slice); - - Global.deallocate(self.mem, self.layout); } } } unsafe { - let ptr = Self::allocate_for_slice(len); - - let mem = ptr as *mut _ as *mut u8; - let layout = Layout::for_value_raw(ptr); - - // Pointer to first element - let elems = (&raw mut (*ptr).value) as *mut T; + let uru = UniqueRcUninit::<[T]>::new_uninit_slice_in(len, Global); - let mut guard = Guard { mem: NonNull::new_unchecked(mem), elems, layout, n_elems: 0 }; + let elems: *mut T = uru.value_ptr.as_non_null_ptr().as_ptr(); + let mut guard = Guard { elems, n_elems: 0 }; for (i, item) in iter.enumerate() { + debug_assert!(i < uru.value_ptr.len()); ptr::write(elems.add(i), item); guard.n_elems += 1; } - // All clear. Forget the guard so it doesn't free the new RcInner. + // All clear. Forget the guard so it doesn't drop all the elements. mem::forget(guard); - Self::from_ptr(ptr) - } - } -} - -impl Rc<[T], A> { - /// Allocates an `RcInner<[T]>` with the given length. - #[inline] - #[cfg(not(no_global_oom_handling))] - unsafe fn allocate_for_slice_in(len: usize, alloc: &A) -> *mut RcInner<[T]> { - unsafe { - Rc::<[T]>::allocate_for_layout( - Layout::array::(len).unwrap(), - |layout| alloc.allocate(layout), - |mem| ptr::slice_from_raw_parts_mut(mem.cast::(), len) as *mut RcInner<[T]>, - ) + uru.into_rc() } } } @@ -2215,7 +2176,7 @@ impl RcFromSlice for Rc<[T]> { impl RcFromSlice for Rc<[T]> { #[inline] fn from_slice(v: &[T]) -> Self { - unsafe { Rc::copy_from_slice(v) } + unsafe { Rc::copy_from_slice_in(v, Global) } } } @@ -2225,7 +2186,7 @@ impl Deref for Rc { #[inline(always)] fn deref(&self) -> &T { - &self.inner().value + unsafe { self.value_ptr.as_ref() } } } @@ -2271,8 +2232,7 @@ unsafe impl<#[may_dangle] T: ?Sized, A: Allocator> Drop for Rc { #[inline] fn drop(&mut self) { unsafe { - self.inner().dec_strong(); - if self.inner().strong() == 0 { + if self.counts().dec_strong() == 0 { self.drop_slow(); } } @@ -2297,9 +2257,18 @@ impl Clone for Rc { /// ``` #[inline] fn clone(&self) -> Self { + // We want to let LLVM inline this if it wants, but we really don't want + // the MIR inliner to inline it into `clone` and spam it all over the place. + #[inline] + #[rustc_no_mir_inline] + unsafe fn clone_polymorphic(value_ptr: NonNull<()>) { + unsafe { RcCounts::from_value_ptr(value_ptr).as_ref() }.inc_strong(); + } + + let value_ptr = self.value_ptr; unsafe { - self.inner().inc_strong(); - Self::from_inner_in(self.ptr, self.alloc.clone()) + clone_polymorphic(value_ptr.cast()); + Self::from_value_ptr_in(value_ptr, self.alloc.clone()) } } } @@ -2319,16 +2288,8 @@ impl Default for Rc { /// ``` #[inline] fn default() -> Rc { - unsafe { - Self::from_inner( - Box::leak(Box::write(Box::new_uninit(), RcInner { - strong: Cell::new(1), - weak: Cell::new(1), - value: T::default(), - })) - .into(), - ) - } + let uru = UniqueRcUninit::new_uninit_in(Global); + uru.write(T::default()) } } @@ -2770,18 +2731,29 @@ impl From> for Rc<[T], A> { /// ``` #[inline] fn from(v: Vec) -> Rc<[T], A> { - unsafe { - let (vec_ptr, len, cap, alloc) = v.into_raw_parts_with_alloc(); + // We can't clone the allocator, so to get access it, rebuild the `Vec` + // using a reference to the allocator so it's still panic-safe. + let (ptr, len, capacity, alloc) = v.into_raw_parts_with_alloc(); + // SAFETY: Just got these from into_raw_parts above. + let mut v = unsafe { Vec::from_raw_parts_in(ptr, len, capacity, &alloc) }; - let rc_ptr = Self::allocate_for_slice_in(len, &alloc); - ptr::copy_nonoverlapping(vec_ptr, (&raw mut (*rc_ptr).value) as *mut T, len); + // SAFETY: This can only panic from allocation failure, at which point it + // didn't even try to copy anything, and leaving them in the vec is fine. + // If it returned successfully, we'll immediately leak them from the `Vec` + // to make sure they're not dropped twice + let rc = unsafe { Rc::copy_from_slice_in(v.as_slice(), &alloc) }; - // Create a `Vec` with length 0, to deallocate the buffer - // without dropping its contents or the allocator - let _ = Vec::from_raw_parts_in(vec_ptr, 0, cap, &alloc); + // Now that these items are in `rc` instead, remove them from `v` without dropping them. + // SAFETY: `set_len(0)` is always sound. + unsafe { v.set_len(0) }; - Self::from_ptr_in(rc_ptr, alloc) - } + // Release the allocation in the Vec to end the reference + drop(v); + + // Rebuild the Rc with the proper allocator + let raw = Rc::into_raw(rc); + // SAFETY: just got this from into_raw above + unsafe { Rc::from_raw_in(raw, alloc) } } } @@ -2837,8 +2809,8 @@ impl TryFrom> for Rc<[T; N], A> { fn try_from(boxed_slice: Rc<[T], A>) -> Result { if boxed_slice.len() == N { - let (ptr, alloc) = Rc::into_inner_with_allocator(boxed_slice); - Ok(unsafe { Rc::from_inner_in(ptr.cast(), alloc) }) + let (value_ptr, alloc) = Rc::into_value_ptr_with_allocator(boxed_slice); + Ok(unsafe { Rc::from_value_ptr_in(value_ptr.cast(), alloc) }) } else { Err(boxed_slice) } @@ -2961,11 +2933,11 @@ pub struct Weak< > { // This is a `NonNull` to allow optimizing the size of this type in enums, // but it is not necessarily a valid pointer. - // `Weak::new` sets this to `usize::MAX` so that it doesn’t need + // `Weak::new` sets this to `1_usize` so that it doesn’t need // to allocate space on the heap. That's not a value a real pointer - // will ever have because RcInner has alignment at least 2. + // will ever have because the counts are always before the value. // This is only possible when `T: Sized`; unsized `T` never dangle. - ptr: NonNull>, + value_ptr_or_sentinel: NonNull, alloc: A, } @@ -2999,12 +2971,7 @@ impl Weak { #[rustc_const_stable(feature = "const_weak_new", since = "1.73.0")] #[must_use] pub const fn new() -> Weak { - Weak { - ptr: unsafe { - NonNull::new_unchecked(ptr::without_provenance_mut::>(usize::MAX)) - }, - alloc: Global, - } + Self::new_in(Global) } } @@ -3025,27 +2992,12 @@ impl Weak { /// ``` #[inline] #[unstable(feature = "allocator_api", issue = "32838")] - pub fn new_in(alloc: A) -> Weak { - Weak { - ptr: unsafe { - NonNull::new_unchecked(ptr::without_provenance_mut::>(usize::MAX)) - }, - alloc, - } + #[cfg_attr(bootstrap, rustc_const_stable(feature = "const_weak_new", since = "1.73.0"))] + pub const fn new_in(alloc: A) -> Weak { + Weak { value_ptr_or_sentinel: SENTINEL.cast(), alloc } } } -pub(crate) fn is_dangling(ptr: *const T) -> bool { - (ptr.cast::<()>()).addr() == usize::MAX -} - -/// Helper type to allow accessing the reference counts without -/// making any assertions about the data field. -struct WeakInner<'a> { - weak: &'a Cell, - strong: &'a Cell, -} - impl Weak { /// Converts a raw pointer previously created by [`into_raw`] back into `Weak`. /// @@ -3132,18 +3084,8 @@ impl Weak { #[must_use] #[stable(feature = "rc_as_ptr", since = "1.45.0")] pub fn as_ptr(&self) -> *const T { - let ptr: *mut RcInner = NonNull::as_ptr(self.ptr); - - if is_dangling(ptr) { - // If the pointer is dangling, we return the sentinel directly. This cannot be - // a valid payload address, as the payload is at least as aligned as RcInner (usize). - ptr as *const T - } else { - // SAFETY: if is_dangling returns false, then the pointer is dereferenceable. - // The payload may be dropped at this point, and we have to maintain provenance, - // so use raw pointer manipulation. - unsafe { &raw mut (*ptr).value } - } + // Don't need to check the sentinel since it's fine if we return it as-is. + self.value_ptr_or_sentinel.as_ptr() } /// Consumes the `Weak` and turns it into a raw pointer. @@ -3264,22 +3206,15 @@ impl Weak { #[inline] #[unstable(feature = "allocator_api", issue = "32838")] pub unsafe fn from_raw_in(ptr: *const T, alloc: A) -> Self { - // See Weak::as_ptr for context on how the input pointer is derived. - - let ptr = if is_dangling(ptr) { - // This is a dangling Weak. - ptr as *mut RcInner - } else { - // Otherwise, we're guaranteed the pointer came from a nondangling Weak. - // SAFETY: data_offset is safe to call, as ptr references a real (potentially dropped) T. - let offset = unsafe { data_offset(ptr) }; - // Thus, we reverse the offset to get the whole RcInner. - // SAFETY: the pointer originated from a Weak, so this offset is safe. - unsafe { ptr.byte_sub(offset) as *mut RcInner } - }; - - // SAFETY: we now have recovered the original Weak pointer, so can create the Weak. - Weak { ptr: unsafe { NonNull::new_unchecked(ptr) }, alloc } + let ptr = ptr.cast_mut(); + debug_assert!(!ptr.is_null()); + // SAFETY: `Weak::as_ptr` just returns the `value_ptr_or_sentinel` directly, so + // since the input pointer has to come from `into_raw` (which calls `as_ptr`) + // we can just cast back again. + unsafe { + let value_ptr_or_sentinel = NonNull::new_unchecked(ptr); + Weak { value_ptr_or_sentinel, alloc } + } } /// Attempts to upgrade the `Weak` pointer to an [`Rc`], delaying @@ -3312,15 +3247,15 @@ impl Weak { where A: Clone, { - let inner = self.inner()?; + let counts = self.counts()?; - if inner.strong() == 0 { + if counts.strong() == 0 { None } else { - unsafe { - inner.inc_strong(); - Some(Rc::from_inner_in(self.ptr, self.alloc.clone())) - } + counts.inc_strong(); + let value_ptr = self.value_ptr_or_sentinel; + let alloc = self.alloc.clone(); + Some(unsafe { Rc::from_value_ptr_in(value_ptr, alloc) }) } } @@ -3330,7 +3265,7 @@ impl Weak { #[must_use] #[stable(feature = "weak_counts", since = "1.41.0")] pub fn strong_count(&self) -> usize { - if let Some(inner) = self.inner() { inner.strong() } else { 0 } + if let Some(counts) = self.counts() { counts.strong() } else { 0 } } /// Gets the number of `Weak` pointers pointing to this allocation. @@ -3339,9 +3274,9 @@ impl Weak { #[must_use] #[stable(feature = "weak_counts", since = "1.41.0")] pub fn weak_count(&self) -> usize { - if let Some(inner) = self.inner() { - if inner.strong() > 0 { - inner.weak() - 1 // subtract the implicit weak ptr + if let Some(counts) = self.counts() { + if counts.strong() > 0 { + counts.weak() - 1 // subtract the implicit weak ptr } else { 0 } @@ -3350,19 +3285,19 @@ impl Weak { } } - /// Returns `None` when the pointer is dangling and there is no allocated `RcInner`, + /// Returns `None` when the pointer is dangling and there is no allocated `RcCounts`, /// (i.e., when this `Weak` was created by `Weak::new`). #[inline] - fn inner(&self) -> Option> { - if is_dangling(self.ptr.as_ptr()) { + fn counts(&self) -> Option<&RcCounts> { + if is_sentinel(self.value_ptr_or_sentinel) { None } else { // We are careful to *not* create a reference covering the "data" field, as // the field may be mutated concurrently (for example, if the last `Rc` // is dropped, the data field will be dropped in-place). Some(unsafe { - let ptr = self.ptr.as_ptr(); - WeakInner { strong: &(*ptr).strong, weak: &(*ptr).weak } + let counts_ptr = RcCounts::from_value_ptr(self.value_ptr_or_sentinel); + counts_ptr.as_ref() }) } } @@ -3410,7 +3345,7 @@ impl Weak { #[must_use] #[stable(feature = "weak_ptr_eq", since = "1.39.0")] pub fn ptr_eq(&self, other: &Self) -> bool { - ptr::addr_eq(self.ptr.as_ptr(), other.ptr.as_ptr()) + ptr::addr_eq(self.value_ptr_or_sentinel.as_ptr(), other.value_ptr_or_sentinel.as_ptr()) } } @@ -3441,14 +3376,18 @@ unsafe impl<#[may_dangle] T: ?Sized, A: Allocator> Drop for Weak { /// assert!(other_weak_foo.upgrade().is_none()); /// ``` fn drop(&mut self) { - let inner = if let Some(inner) = self.inner() { inner } else { return }; + let Some(counts) = self.counts() else { + // The weak pointer is dangling; nothing to deallocate. + return; + }; - inner.dec_weak(); // the weak count starts at 1, and will only go to zero if all // the strong pointers have disappeared. - if inner.weak() == 0 { + if counts.dec_weak() == 0 { unsafe { - self.alloc.deallocate(self.ptr.cast(), Layout::for_value_raw(self.ptr.as_ptr())); + let value_ptr = self.value_ptr_or_sentinel; + let inner = Layout::for_value_raw::(value_ptr.as_ptr()); + polymorphic::deallocate_for_layout(inner, &self.alloc, value_ptr.cast::()); } } } @@ -3469,10 +3408,10 @@ impl Clone for Weak { /// ``` #[inline] fn clone(&self) -> Weak { - if let Some(inner) = self.inner() { - inner.inc_weak() + if let Some(counts) = self.counts() { + counts.inc_weak() } - Weak { ptr: self.ptr, alloc: self.alloc.clone() } + Weak { value_ptr_or_sentinel: self.value_ptr_or_sentinel, alloc: self.alloc.clone() } } } @@ -3503,111 +3442,6 @@ impl Default for Weak { } } -// NOTE: We checked_add here to deal with mem::forget safely. In particular -// if you mem::forget Rcs (or Weaks), the ref-count can overflow, and then -// you can free the allocation while outstanding Rcs (or Weaks) exist. -// We abort because this is such a degenerate scenario that we don't care about -// what happens -- no real program should ever experience this. -// -// This should have negligible overhead since you don't actually need to -// clone these much in Rust thanks to ownership and move-semantics. - -#[doc(hidden)] -trait RcInnerPtr { - fn weak_ref(&self) -> &Cell; - fn strong_ref(&self) -> &Cell; - - #[inline] - fn strong(&self) -> usize { - self.strong_ref().get() - } - - #[inline] - fn inc_strong(&self) { - let strong = self.strong(); - - // We insert an `assume` here to hint LLVM at an otherwise - // missed optimization. - // SAFETY: The reference count will never be zero when this is - // called. - unsafe { - hint::assert_unchecked(strong != 0); - } - - let strong = strong.wrapping_add(1); - self.strong_ref().set(strong); - - // We want to abort on overflow instead of dropping the value. - // Checking for overflow after the store instead of before - // allows for slightly better code generation. - if core::intrinsics::unlikely(strong == 0) { - abort(); - } - } - - #[inline] - fn dec_strong(&self) { - self.strong_ref().set(self.strong() - 1); - } - - #[inline] - fn weak(&self) -> usize { - self.weak_ref().get() - } - - #[inline] - fn inc_weak(&self) { - let weak = self.weak(); - - // We insert an `assume` here to hint LLVM at an otherwise - // missed optimization. - // SAFETY: The reference count will never be zero when this is - // called. - unsafe { - hint::assert_unchecked(weak != 0); - } - - let weak = weak.wrapping_add(1); - self.weak_ref().set(weak); - - // We want to abort on overflow instead of dropping the value. - // Checking for overflow after the store instead of before - // allows for slightly better code generation. - if core::intrinsics::unlikely(weak == 0) { - abort(); - } - } - - #[inline] - fn dec_weak(&self) { - self.weak_ref().set(self.weak() - 1); - } -} - -impl RcInnerPtr for RcInner { - #[inline(always)] - fn weak_ref(&self) -> &Cell { - &self.weak - } - - #[inline(always)] - fn strong_ref(&self) -> &Cell { - &self.strong - } -} - -impl<'a> RcInnerPtr for WeakInner<'a> { - #[inline(always)] - fn weak_ref(&self) -> &Cell { - self.weak - } - - #[inline(always)] - fn strong_ref(&self) -> &Cell { - self.strong - } -} - #[stable(feature = "rust1", since = "1.0.0")] impl borrow::Borrow for Rc { fn borrow(&self) -> &T { @@ -3625,28 +3459,6 @@ impl AsRef for Rc { #[stable(feature = "pin", since = "1.33.0")] impl Unpin for Rc {} -/// Gets the offset within an `RcInner` for the payload behind a pointer. -/// -/// # Safety -/// -/// The pointer must point to (and have valid metadata for) a previously -/// valid instance of T, but the T is allowed to be dropped. -unsafe fn data_offset(ptr: *const T) -> usize { - // Align the unsized value to the end of the RcInner. - // Because RcInner is repr(C), it will always be the last field in memory. - // SAFETY: since the only unsized types possible are slices, trait objects, - // and extern types, the input safety requirement is currently enough to - // satisfy the requirements of align_of_val_raw; this is an implementation - // detail of the language that must not be relied upon outside of std. - unsafe { data_offset_align(align_of_val_raw(ptr)) } -} - -#[inline] -fn data_offset_align(align: usize) -> usize { - let layout = Layout::new::>(); - layout.size() + layout.padding_needed_for(align) -} - /// A uniquely owned [`Rc`]. /// /// This represents an `Rc` that is known to be uniquely owned -- that is, have exactly one strong @@ -3689,8 +3501,8 @@ pub struct UniqueRc< T: ?Sized, #[unstable(feature = "allocator_api", issue = "32838")] A: Allocator = Global, > { - ptr: NonNull>, - phantom: PhantomData>, + value_ptr: NonNull, + phantom: PhantomData, alloc: A, } @@ -3725,21 +3537,29 @@ impl UniqueRc { #[cfg(not(no_global_oom_handling))] #[unstable(feature = "unique_rc_arc", issue = "112566")] pub fn new_in(value: T, alloc: A) -> Self { - let (ptr, alloc) = Box::into_unique(Box::new_in( - RcInner { - strong: Cell::new(0), - // keep one weak reference so if all the weak pointers that are created are dropped - // the UniqueRc still stays valid. - weak: Cell::new(1), - value, - }, - alloc, - )); - Self { ptr: ptr.into(), phantom: PhantomData, alloc } + unsafe { + let thin_ptr = + polymorphic::allocate_for_layout(T::LAYOUT, |layout| alloc.allocate(layout)); + let value_ptr = thin_ptr.cast::(); + value_ptr.write(value); + let urc = UniqueRc { value_ptr, phantom: PhantomData, alloc }; + urc.counts().init_for_cyclic(); + urc + } } } impl UniqueRc { + #[inline] + fn counts(&self) -> &RcCounts { + // SAFETY: while this Rc is alive we're guaranteed that the inner pointer is valid, + // so the pointer math is inbounds and we can give out a reference. + unsafe { + let counts_ptr = RcCounts::from_value_ptr(self.value_ptr); + counts_ptr.as_ref() + } + } + /// Converts the `UniqueRc` into a regular [`Rc`]. /// /// This consumes the `UniqueRc` and returns a regular [`Rc`] that contains the `value` that @@ -3749,7 +3569,11 @@ impl UniqueRc { /// references. #[unstable(feature = "unique_rc_arc", issue = "112566")] pub fn into_rc(this: Self) -> Rc { - let mut this = ManuallyDrop::new(this); + let counts = this.counts(); + debug_assert_eq!(counts.strong(), 0, "No prior strong references should exist"); + counts.strong.set(1); + + let this = ManuallyDrop::new(this); // Move the allocator out. // SAFETY: `this.alloc` will not be accessed again, nor dropped because it is in @@ -3759,8 +3583,7 @@ impl UniqueRc { // SAFETY: This pointer was allocated at creation time so we know it is valid. unsafe { // Convert our weak reference into a strong reference - this.ptr.as_mut().strong.set(1); - Rc::from_inner_in(this.ptr, alloc) + Rc::from_value_ptr_in(this.value_ptr, alloc) } } } @@ -3772,12 +3595,8 @@ impl UniqueRc { /// to a [`Rc`] using [`UniqueRc::into_rc`]. #[unstable(feature = "unique_rc_arc", issue = "112566")] pub fn downgrade(this: &Self) -> Weak { - // SAFETY: This pointer was allocated at creation time and we guarantee that we only have - // one strong reference before converting to a regular Rc. - unsafe { - this.ptr.as_ref().inc_weak(); - } - Weak { ptr: this.ptr, alloc: this.alloc.clone() } + this.counts().inc_weak(); + Weak { value_ptr_or_sentinel: this.value_ptr, alloc: this.alloc.clone() } } } @@ -3787,7 +3606,7 @@ impl Deref for UniqueRc { fn deref(&self) -> &T { // SAFETY: This pointer was allocated at creation time so we know it is valid. - unsafe { &self.ptr.as_ref().value } + unsafe { self.value_ptr.as_ref() } } } @@ -3800,88 +3619,257 @@ impl DerefMut for UniqueRc { // SAFETY: This pointer was allocated at creation time so we know it is valid. We know we // have unique ownership and therefore it's safe to make a mutable reference because // `UniqueRc` owns the only strong reference to itself. - unsafe { &mut (*self.ptr.as_ptr()).value } + unsafe { self.value_ptr.as_mut() } } } #[unstable(feature = "unique_rc_arc", issue = "112566")] unsafe impl<#[may_dangle] T: ?Sized, A: Allocator> Drop for UniqueRc { fn drop(&mut self) { + let value_ptr = self.value_ptr; + unsafe { // destroy the contained object - drop_in_place(DerefMut::deref_mut(self)); + value_ptr.drop_in_place(); // remove the implicit "strong weak" pointer now that we've destroyed the contents. - self.ptr.as_ref().dec_weak(); - - if self.ptr.as_ref().weak() == 0 { - self.alloc.deallocate(self.ptr.cast(), Layout::for_value_raw(self.ptr.as_ptr())); + let counts = self.counts(); + debug_assert_eq!(counts.strong(), 0, "No prior strong references should exist"); + if counts.dec_weak() == 0 { + let inner = Layout::for_value_raw::(value_ptr.as_ptr()); + polymorphic::deallocate_for_layout(inner, &self.alloc, value_ptr.cast::()); } } } } -/// A unique owning pointer to a [`RcInner`] **that does not imply the contents are initialized,** +/// A unique owning pointer **that does not imply the contents are initialized,** /// but will deallocate it (without dropping the value) when dropped. /// /// This is a helper for [`Rc::make_mut()`] to ensure correct cleanup on panic. /// It is nearly a duplicate of `UniqueRc, A>` except that it allows `T: !Sized`, /// which `MaybeUninit` does not. #[cfg(not(no_global_oom_handling))] -struct UniqueRcUninit { - ptr: NonNull>, - layout_for_value: Layout, - alloc: Option, +struct UniqueRcUninit { + // Must have accurate metadata for `Layout::for_value_raw`, in order to drop + value_ptr: NonNull, + alloc: ManuallyDrop, } #[cfg(not(no_global_oom_handling))] impl UniqueRcUninit { - /// Allocates a RcInner with layout suitable to contain `for_value` or a clone of it. - fn new(for_value: &T, alloc: A) -> UniqueRcUninit { - let layout = Layout::for_value(for_value); - let ptr = unsafe { - Rc::allocate_for_layout( - layout, - |layout_for_rc_inner| alloc.allocate(layout_for_rc_inner), - |mem| mem.with_metadata_of(ptr::from_ref(for_value) as *const RcInner), - ) - }; - Self { ptr: NonNull::new(ptr).unwrap(), layout_for_value: layout, alloc: Some(alloc) } + #[inline] + fn try_allocate_unsized_in( + value_layout: Layout, + meta: ::Metadata, + alloc: A, + allocate_fn: impl FnOnce(&A, Layout) -> Result, AllocError>, + ) -> Result { + unsafe { + let thin_ptr = polymorphic::try_allocate_for_layout(value_layout, |layout| { + allocate_fn(&alloc, layout) + })?; + let value_ptr = NonNull::from_raw_parts(thin_ptr, meta); + let rc = Self::from_value_ptr_in(value_ptr, alloc); + rc.counts().init_for_rc(); + Ok(rc) + } } - /// Returns the pointer to be written into to initialize the [`Rc`]. - fn data_ptr(&mut self) -> *mut T { - let offset = data_offset_align(self.layout_for_value.align()); - unsafe { self.ptr.as_ptr().byte_add(offset) as *mut T } + #[cfg(not(no_global_oom_handling))] + #[inline] + fn allocate_unsized_in( + value_layout: Layout, + meta: ::Metadata, + alloc: A, + allocate_fn: impl FnOnce(&A, Layout) -> Result, AllocError>, + ) -> Self { + unsafe { + let thin_ptr = polymorphic::allocate_for_layout(value_layout, |layout| { + allocate_fn(&alloc, layout) + }); + let value_ptr = NonNull::from_raw_parts(thin_ptr, meta); + let uru = Self::from_value_ptr_in(value_ptr, alloc); + uru.counts().init_for_rc(); + uru + } + } + + fn from_value_ptr_in(value_ptr: NonNull, alloc: A) -> Self { + Self { value_ptr, alloc: ManuallyDrop::new(alloc) } + } + + #[inline] + fn counts(&self) -> &RcCounts { + // SAFETY: while this Rc is alive we're guaranteed that the inner pointer is valid, + // so the pointer math is inbounds and we can give out a reference. + unsafe { + let counts_ptr = RcCounts::from_value_ptr(self.value_ptr); + counts_ptr.as_ref() + } } /// Upgrade this into a normal [`Rc`]. /// /// # Safety /// - /// The data must have been initialized (by writing to [`Self::data_ptr()`]). + /// The data must have been initialized (by writing to [`Self::value_ptr`]). unsafe fn into_rc(self) -> Rc { let mut this = ManuallyDrop::new(self); - let ptr = this.ptr; - let alloc = this.alloc.take().unwrap(); // SAFETY: The pointer is valid as per `UniqueRcUninit::new`, and the caller is responsible // for having initialized the data. - unsafe { Rc::from_ptr_in(ptr.as_ptr(), alloc) } + unsafe { Rc::from_value_ptr_in(this.value_ptr, ManuallyDrop::take(&mut this.alloc)) } + } +} + +impl UniqueRcUninit<[T], A> { + #[cfg(not(no_global_oom_handling))] + #[inline] + fn new_uninit_slice_in(len: usize, alloc: A) -> Self { + let array_layout = Layout::array::(len).unwrap(); + Self::allocate_unsized_in(array_layout, len, alloc, Allocator::allocate) + } +} + +impl UniqueRcUninit { + #[cfg(not(no_global_oom_handling))] + #[inline] + fn new_uninit_in(alloc: A) -> Self { + Self::allocate_in(alloc, Allocator::allocate) + } + + #[cfg(not(no_global_oom_handling))] + #[inline] + fn new_zeroed_in(alloc: A) -> Self { + Self::allocate_in(alloc, Allocator::allocate_zeroed) + } + + #[inline] + fn try_new_uninit_in(alloc: A) -> Result { + Self::try_allocate_in(alloc, Allocator::allocate) + } + + #[inline] + fn try_new_zeroed_in(alloc: A) -> Result { + Self::try_allocate_in(alloc, Allocator::allocate_zeroed) + } + + #[inline] + fn try_allocate_in( + alloc: A, + allocate_fn: impl FnOnce(&A, Layout) -> Result, AllocError>, + ) -> Result { + Self::try_allocate_unsized_in(T::LAYOUT, (), alloc, allocate_fn) + } + + #[cfg(not(no_global_oom_handling))] + #[inline] + fn allocate_in( + alloc: A, + allocate_fn: impl FnOnce(&A, Layout) -> Result, AllocError>, + ) -> Self { + Self::allocate_unsized_in(T::LAYOUT, (), alloc, allocate_fn) + } + + #[inline] + fn write(self, value: T) -> Rc { + // SAFETY: by type invariant, this memory is writeable. + // `write`ing doesn't read the previous value, so it's fine that it is + // (probably) still uninitialized. (If it was initialized, that's still + // sound, just might leak, which is the caller's problem, not ours.) + unsafe { self.value_ptr.write(value) }; + // SAFETY: Just initialized the value + unsafe { self.into_rc() } } } #[cfg(not(no_global_oom_handling))] impl Drop for UniqueRcUninit { fn drop(&mut self) { + let counts = self.counts(); + debug_assert_eq!(counts.strong(), 1, "This should still be unique"); + debug_assert_eq!(counts.weak(), 1, "This should still be unique"); + // SAFETY: // * new() produced a pointer safe to deallocate. // * We own the pointer unless into_rc() was called, which forgets us. unsafe { - self.alloc.take().unwrap().deallocate( - self.ptr.cast(), - rc_inner_layout_for_value_layout(self.layout_for_value), - ); + let alloc = ManuallyDrop::take(&mut self.alloc); + let inner = Layout::for_value_raw::(self.value_ptr.as_ptr()); + polymorphic::deallocate_for_layout(inner, alloc, self.value_ptr.cast::()); + } + } +} + +/// These (de)allocation methods are intentionally polymorphized: +/// they don't take the type being allocated at all, only the `Layout` (and allocator), +/// so that we don't need to emit them separately for every value type. +mod polymorphic { + use super::*; + + #[inline] + pub(super) unsafe fn try_allocate_for_layout( + value_layout: Layout, + allocate: impl FnOnce(Layout) -> Result, AllocError>, + ) -> Result, AllocError> { + unsafe { try_allocate_for_layout_inner(value_layout, allocate, |_, e| e) } + } + + #[cfg(not(no_global_oom_handling))] + #[inline] + pub(super) unsafe fn allocate_for_layout( + value_layout: Layout, + allocate: impl FnOnce(Layout) -> Result, AllocError>, + ) -> NonNull { + // FIXME(edition_2024/never_fallback): shouldn't need the never type annotation here + let Ok(thin_ptr) = unsafe { + try_allocate_for_layout_inner::(value_layout, allocate, |layout, _| { + handle_alloc_error(layout) + }) + }; + thin_ptr + } + + pub(super) unsafe fn deallocate_for_layout( + value_layout: Layout, + alloc: impl Allocator, + value_ptr: NonNull, + ) { + let (layout, offset) = rc_layout_and_offset_from_value_layout(value_layout); + unsafe { + let alloc_ptr = value_ptr.sub(offset); + alloc.deallocate(alloc_ptr, layout); + } + } + + /// We want to put both the counts and the value into the same allocation. + /// + /// This does *not* need to be rounded up to alignment, as we never make a single + /// Rust type that covers both the counts and the value. So if you have an `Rc`, + /// the returned size will be 2×usize+1, not a multiple of `align_of::()`. + #[inline] + const fn rc_layout_and_offset_from_value_layout(inner: Layout) -> (Layout, usize) { + match RcCounts::LAYOUT.extend(inner) { + Ok(v) => v, + Err(_) => panic!("Inner layout is too big to put in an `Rc`"), + } + } + + #[inline] + unsafe fn try_allocate_for_layout_inner( + value_layout: Layout, + allocate: impl FnOnce(Layout) -> Result, AllocError>, + error_handler: impl FnOnce(Layout, AllocError) -> E, + ) -> Result, E> { + let (layout, offset) = rc_layout_and_offset_from_value_layout(value_layout); + + match allocate(layout) { + Ok(allocated_ptr) => { + let thin_ptr = unsafe { allocated_ptr.as_non_null_ptr().add(offset) }; + Ok(thin_ptr) + } + Err(e) => Err(error_handler(layout, e)), } } } diff --git a/library/alloc/src/sync.rs b/library/alloc/src/sync.rs index da2d6bb3bce24..49e4ea9f1c2a2 100644 --- a/library/alloc/src/sync.rs +++ b/library/alloc/src/sync.rs @@ -33,7 +33,6 @@ use crate::alloc::handle_alloc_error; use crate::alloc::{AllocError, Allocator, Global, Layout}; use crate::borrow::{Cow, ToOwned}; use crate::boxed::Box; -use crate::rc::is_dangling; #[cfg(not(no_global_oom_handling))] use crate::string::String; #[cfg(not(no_global_oom_handling))] @@ -42,6 +41,10 @@ use crate::vec::Vec; #[cfg(test)] mod tests; +fn is_dangling(ptr: *const T) -> bool { + (ptr.cast::<()>()).addr() == usize::MAX +} + /// A soft limit on the amount of references that may be made to an `Arc`. /// /// Going above this limit will abort your program (although not diff --git a/tests/codegen/placement-new.rs b/tests/codegen/placement-new.rs index 0ec2b6a6f20e7..77f5c55597809 100644 --- a/tests/codegen/placement-new.rs +++ b/tests/codegen/placement-new.rs @@ -21,10 +21,15 @@ pub fn box_default_inplace() -> Box<(String, String)> { // CHECK-LABEL: @rc_default_inplace #[no_mangle] pub fn rc_default_inplace() -> Rc<(String, String)> { + // The pointer in the Rc is to the value (after the counts), not the allocation, + // so this test needs to check for the offsetting too. + // CHECK-NOT: alloca // CHECK: [[RC:%.*]] = {{.*}}call {{.*}}__rust_alloc( // CHECK-NOT: call void @llvm.memcpy - // CHECK: ret ptr [[RC]] + // CHECK: [[RC_VAL:%.*]] = getelementptr inbounds i8, ptr [[RC]], {{i64 16|i32 8|i16 4}} + // CHECK-NOT: call void @llvm.memcpy + // CHECK: ret ptr [[RC_VAL]] Rc::default() } diff --git a/tests/codegen/rc-dedup.rs b/tests/codegen/rc-dedup.rs new file mode 100644 index 0000000000000..cb522ff97085d --- /dev/null +++ b/tests/codegen/rc-dedup.rs @@ -0,0 +1,52 @@ +//@ compile-flags: -O +//@ compile-flags: -Zmerge-functions=aliases +#![crate_type = "lib"] + +use std::rc::Rc; + +// Ensure that different pointee types for `Rc` use *exactly* the same code, +// so that LLVM is able to merge them. + +// CHECK-LABEL: @small_deref ={{.+}}alias ptr (ptr), ptr @big_deref +// CHECK-LABEL: @small_clone ={{.+}}alias ptr (ptr), ptr @big_clone + +#[repr(align(2))] +pub struct SmallLowAlign(u16); + +#[repr(align(128))] +pub struct BigHighAlign([u32; 32]); + +// CHECK-NOT: small_deref +#[no_mangle] +pub fn small_deref(p: &Rc) -> *const SmallLowAlign { + let r: &SmallLowAlign = p; + r +} + +// CHECK-LABEL: @big_deref +#[no_mangle] +pub fn big_deref(p: &Rc) -> *const BigHighAlign { + // CHECK-NOT: alloca + // CHECK: %[[Q:.+]] = load ptr, ptr %p + // CHECK: ret ptr %[[Q]] + let r: &BigHighAlign = p; + r +} + +// CHECK-NOT: small_clone +#[no_mangle] +pub fn small_clone(p: &Rc) -> Rc { + Rc::clone(p) +} + +// CHECK-LABEL: @big_clone +#[no_mangle] +pub fn big_clone(p: &Rc) -> Rc { + // CHECK-NOT: alloca + // CHECK: %[[VAL_P:.+]] = load ptr, ptr %p + // CHECK: %[[STRONG_P:.+]] = getelementptr inbounds i8, ptr %[[VAL_P]], {{i64 -8|i32 -4|i16 -2}} + // CHECK: load {{.+}}, ptr %[[STRONG_P]] + // CHECK: store {{.+}}, ptr %[[STRONG_P]] + // CHECK: ret ptr %[[VAL_P]] + Rc::clone(p) +} diff --git a/tests/mir-opt/pre-codegen/clone_rcs.clone_rcs_of_many_types.PreCodegen.after.panic-abort.mir b/tests/mir-opt/pre-codegen/clone_rcs.clone_rcs_of_many_types.PreCodegen.after.panic-abort.mir new file mode 100644 index 0000000000000..95a23c4426cf1 --- /dev/null +++ b/tests/mir-opt/pre-codegen/clone_rcs.clone_rcs_of_many_types.PreCodegen.after.panic-abort.mir @@ -0,0 +1,162 @@ +// MIR for `clone_rcs_of_many_types` after PreCodegen + +fn clone_rcs_of_many_types(_1: &Rc, _2: &Rc, _3: &Rc, _4: &Rc) -> (Rc, Rc, Rc, Rc) { + debug a => _1; + debug b => _2; + debug c => _3; + debug d => _4; + let mut _0: (std::rc::Rc, std::rc::Rc, std::rc::Rc, std::rc::Rc); + let mut _10: std::rc::Rc; + let mut _16: std::rc::Rc; + let mut _22: std::rc::Rc; + let mut _28: std::rc::Rc; + scope 1 (inlined as Clone>::clone) { + let _5: std::ptr::NonNull; + let mut _8: std::ptr::NonNull<()>; + let _9: (); + scope 2 { + scope 3 (inlined NonNull::::cast::<()>) { + let mut _7: *const (); + scope 4 (inlined NonNull::::as_ptr) { + let mut _6: *const u8; + } + } + scope 5 (inlined Rc::::from_value_ptr_in) { + } + scope 6 (inlined ::clone) { + } + } + } + scope 7 (inlined as Clone>::clone) { + let _11: std::ptr::NonNull; + let mut _14: std::ptr::NonNull<()>; + let _15: (); + scope 8 { + scope 9 (inlined NonNull::::cast::<()>) { + let mut _13: *const (); + scope 10 (inlined NonNull::::as_ptr) { + let mut _12: *const u32; + } + } + scope 11 (inlined Rc::::from_value_ptr_in) { + } + scope 12 (inlined ::clone) { + } + } + } + scope 13 (inlined as Clone>::clone) { + let _17: std::ptr::NonNull; + let mut _20: std::ptr::NonNull<()>; + let _21: (); + scope 14 { + scope 15 (inlined NonNull::::cast::<()>) { + let mut _19: *const (); + scope 16 (inlined NonNull::::as_ptr) { + let mut _18: *const u128; + } + } + scope 17 (inlined Rc::::from_value_ptr_in) { + } + scope 18 (inlined ::clone) { + } + } + } + scope 19 (inlined as Clone>::clone) { + let _23: std::ptr::NonNull; + let mut _26: std::ptr::NonNull<()>; + let _27: (); + scope 20 { + scope 21 (inlined NonNull::::cast::<()>) { + let mut _25: *const (); + scope 22 (inlined NonNull::::as_ptr) { + let mut _24: *const HighAlign; + } + } + scope 23 (inlined Rc::::from_value_ptr_in) { + } + scope 24 (inlined ::clone) { + } + } + } + + bb0: { + StorageLive(_10); + StorageLive(_5); + _5 = copy ((*_1).0: std::ptr::NonNull); + StorageLive(_8); + StorageLive(_6); + StorageLive(_7); + _6 = copy (_5.0: *const u8); + _7 = copy _6 as *const () (PtrToPtr); + _8 = NonNull::<()> { pointer: move _7 }; + StorageDead(_7); + StorageDead(_6); + _9 = as Clone>::clone::clone_polymorphic(move _8) -> [return: bb1, unwind unreachable]; + } + + bb1: { + StorageDead(_8); + _10 = Rc:: { value_ptr: copy _5, phantom: const PhantomData::<(rc::RcCounts, u8)>, alloc: const std::alloc::Global }; + StorageDead(_5); + StorageLive(_16); + StorageLive(_11); + _11 = copy ((*_2).0: std::ptr::NonNull); + StorageLive(_14); + StorageLive(_12); + StorageLive(_13); + _12 = copy (_11.0: *const u32); + _13 = copy _12 as *const () (PtrToPtr); + _14 = NonNull::<()> { pointer: move _13 }; + StorageDead(_13); + StorageDead(_12); + _15 = as Clone>::clone::clone_polymorphic(move _14) -> [return: bb2, unwind unreachable]; + } + + bb2: { + StorageDead(_14); + _16 = Rc:: { value_ptr: copy _11, phantom: const PhantomData::<(rc::RcCounts, u32)>, alloc: const std::alloc::Global }; + StorageDead(_11); + StorageLive(_22); + StorageLive(_17); + _17 = copy ((*_3).0: std::ptr::NonNull); + StorageLive(_20); + StorageLive(_18); + StorageLive(_19); + _18 = copy (_17.0: *const u128); + _19 = copy _18 as *const () (PtrToPtr); + _20 = NonNull::<()> { pointer: move _19 }; + StorageDead(_19); + StorageDead(_18); + _21 = as Clone>::clone::clone_polymorphic(move _20) -> [return: bb3, unwind unreachable]; + } + + bb3: { + StorageDead(_20); + _22 = Rc:: { value_ptr: copy _17, phantom: const PhantomData::<(rc::RcCounts, u128)>, alloc: const std::alloc::Global }; + StorageDead(_17); + StorageLive(_28); + StorageLive(_23); + _23 = copy ((*_4).0: std::ptr::NonNull); + StorageLive(_26); + StorageLive(_24); + StorageLive(_25); + _24 = copy (_23.0: *const HighAlign); + _25 = copy _24 as *const () (PtrToPtr); + _26 = NonNull::<()> { pointer: move _25 }; + StorageDead(_25); + StorageDead(_24); + _27 = as Clone>::clone::clone_polymorphic(move _26) -> [return: bb4, unwind unreachable]; + } + + bb4: { + StorageDead(_26); + _28 = Rc:: { value_ptr: copy _23, phantom: const PhantomData::<(rc::RcCounts, HighAlign)>, alloc: const std::alloc::Global }; + StorageDead(_23); + _0 = (move _10, move _16, move _22, move _28); + StorageDead(_28); + StorageDead(_22); + StorageDead(_16); + StorageDead(_10); + return; + } +} diff --git a/tests/mir-opt/pre-codegen/clone_rcs.clone_rcs_of_many_types.PreCodegen.after.panic-unwind.mir b/tests/mir-opt/pre-codegen/clone_rcs.clone_rcs_of_many_types.PreCodegen.after.panic-unwind.mir new file mode 100644 index 0000000000000..33dff667fb309 --- /dev/null +++ b/tests/mir-opt/pre-codegen/clone_rcs.clone_rcs_of_many_types.PreCodegen.after.panic-unwind.mir @@ -0,0 +1,178 @@ +// MIR for `clone_rcs_of_many_types` after PreCodegen + +fn clone_rcs_of_many_types(_1: &Rc, _2: &Rc, _3: &Rc, _4: &Rc) -> (Rc, Rc, Rc, Rc) { + debug a => _1; + debug b => _2; + debug c => _3; + debug d => _4; + let mut _0: (std::rc::Rc, std::rc::Rc, std::rc::Rc, std::rc::Rc); + let mut _10: std::rc::Rc; + let mut _16: std::rc::Rc; + let mut _22: std::rc::Rc; + let mut _28: std::rc::Rc; + scope 1 (inlined as Clone>::clone) { + let _5: std::ptr::NonNull; + let mut _8: std::ptr::NonNull<()>; + let _9: (); + scope 2 { + scope 3 (inlined NonNull::::cast::<()>) { + let mut _7: *const (); + scope 4 (inlined NonNull::::as_ptr) { + let mut _6: *const u8; + } + } + scope 5 (inlined Rc::::from_value_ptr_in) { + } + scope 6 (inlined ::clone) { + } + } + } + scope 7 (inlined as Clone>::clone) { + let _11: std::ptr::NonNull; + let mut _14: std::ptr::NonNull<()>; + let _15: (); + scope 8 { + scope 9 (inlined NonNull::::cast::<()>) { + let mut _13: *const (); + scope 10 (inlined NonNull::::as_ptr) { + let mut _12: *const u32; + } + } + scope 11 (inlined Rc::::from_value_ptr_in) { + } + scope 12 (inlined ::clone) { + } + } + } + scope 13 (inlined as Clone>::clone) { + let _17: std::ptr::NonNull; + let mut _20: std::ptr::NonNull<()>; + let _21: (); + scope 14 { + scope 15 (inlined NonNull::::cast::<()>) { + let mut _19: *const (); + scope 16 (inlined NonNull::::as_ptr) { + let mut _18: *const u128; + } + } + scope 17 (inlined Rc::::from_value_ptr_in) { + } + scope 18 (inlined ::clone) { + } + } + } + scope 19 (inlined as Clone>::clone) { + let _23: std::ptr::NonNull; + let mut _26: std::ptr::NonNull<()>; + let _27: (); + scope 20 { + scope 21 (inlined NonNull::::cast::<()>) { + let mut _25: *const (); + scope 22 (inlined NonNull::::as_ptr) { + let mut _24: *const HighAlign; + } + } + scope 23 (inlined Rc::::from_value_ptr_in) { + } + scope 24 (inlined ::clone) { + } + } + } + + bb0: { + StorageLive(_10); + StorageLive(_5); + _5 = copy ((*_1).0: std::ptr::NonNull); + StorageLive(_8); + StorageLive(_6); + StorageLive(_7); + _6 = copy (_5.0: *const u8); + _7 = copy _6 as *const () (PtrToPtr); + _8 = NonNull::<()> { pointer: move _7 }; + StorageDead(_7); + StorageDead(_6); + _9 = as Clone>::clone::clone_polymorphic(move _8) -> [return: bb1, unwind continue]; + } + + bb1: { + StorageDead(_8); + _10 = Rc:: { value_ptr: copy _5, phantom: const PhantomData::<(rc::RcCounts, u8)>, alloc: const std::alloc::Global }; + StorageDead(_5); + StorageLive(_16); + StorageLive(_11); + _11 = copy ((*_2).0: std::ptr::NonNull); + StorageLive(_14); + StorageLive(_12); + StorageLive(_13); + _12 = copy (_11.0: *const u32); + _13 = copy _12 as *const () (PtrToPtr); + _14 = NonNull::<()> { pointer: move _13 }; + StorageDead(_13); + StorageDead(_12); + _15 = as Clone>::clone::clone_polymorphic(move _14) -> [return: bb2, unwind: bb7]; + } + + bb2: { + StorageDead(_14); + _16 = Rc:: { value_ptr: copy _11, phantom: const PhantomData::<(rc::RcCounts, u32)>, alloc: const std::alloc::Global }; + StorageDead(_11); + StorageLive(_22); + StorageLive(_17); + _17 = copy ((*_3).0: std::ptr::NonNull); + StorageLive(_20); + StorageLive(_18); + StorageLive(_19); + _18 = copy (_17.0: *const u128); + _19 = copy _18 as *const () (PtrToPtr); + _20 = NonNull::<()> { pointer: move _19 }; + StorageDead(_19); + StorageDead(_18); + _21 = as Clone>::clone::clone_polymorphic(move _20) -> [return: bb3, unwind: bb6]; + } + + bb3: { + StorageDead(_20); + _22 = Rc:: { value_ptr: copy _17, phantom: const PhantomData::<(rc::RcCounts, u128)>, alloc: const std::alloc::Global }; + StorageDead(_17); + StorageLive(_28); + StorageLive(_23); + _23 = copy ((*_4).0: std::ptr::NonNull); + StorageLive(_26); + StorageLive(_24); + StorageLive(_25); + _24 = copy (_23.0: *const HighAlign); + _25 = copy _24 as *const () (PtrToPtr); + _26 = NonNull::<()> { pointer: move _25 }; + StorageDead(_25); + StorageDead(_24); + _27 = as Clone>::clone::clone_polymorphic(move _26) -> [return: bb4, unwind: bb5]; + } + + bb4: { + StorageDead(_26); + _28 = Rc:: { value_ptr: copy _23, phantom: const PhantomData::<(rc::RcCounts, HighAlign)>, alloc: const std::alloc::Global }; + StorageDead(_23); + _0 = (move _10, move _16, move _22, move _28); + StorageDead(_28); + StorageDead(_22); + StorageDead(_16); + StorageDead(_10); + return; + } + + bb5 (cleanup): { + drop(_22) -> [return: bb6, unwind terminate(cleanup)]; + } + + bb6 (cleanup): { + drop(_16) -> [return: bb7, unwind terminate(cleanup)]; + } + + bb7 (cleanup): { + drop(_10) -> [return: bb8, unwind terminate(cleanup)]; + } + + bb8 (cleanup): { + resume; + } +} diff --git a/tests/mir-opt/pre-codegen/clone_rcs.rs b/tests/mir-opt/pre-codegen/clone_rcs.rs new file mode 100644 index 0000000000000..757a01ee46d26 --- /dev/null +++ b/tests/mir-opt/pre-codegen/clone_rcs.rs @@ -0,0 +1,22 @@ +//@ compile-flags: -O -C debuginfo=none +// EMIT_MIR_FOR_EACH_PANIC_STRATEGY + +use std::rc::Rc; + +#[repr(align(128))] +pub struct HighAlign([u8; 128]); + +// EMIT_MIR clone_rcs.clone_rcs_of_many_types.PreCodegen.after.mir +fn clone_rcs_of_many_types( + a: &Rc, + b: &Rc, + c: &Rc, + d: &Rc, +) -> (Rc, Rc, Rc, Rc) { + // CHECK-NOT: inlined{{.+}}clone_polymorphic + // CHECK: clone_polymorphic({{.+}}) -> + // CHECK: clone_polymorphic({{.+}}) -> + // CHECK: clone_polymorphic({{.+}}) -> + // CHECK: clone_polymorphic({{.+}}) -> + (a.clone(), b.clone(), c.clone(), d.clone()) +}