From 1308bf660e3d50fecfa46959af6e625451ceb5b7 Mon Sep 17 00:00:00 2001 From: EFanZh Date: Tue, 12 Nov 2024 02:35:04 +0800 Subject: [PATCH] Store pointer to data directly in `Rc` and `Arc` --- library/alloc/src/rc.rs | 292 ++++++++++++------------ library/alloc/src/sync.rs | 254 ++++++++++----------- src/etc/gdb_providers.py | 12 +- src/etc/lldb_batchmode.py | 4 +- src/etc/lldb_lookup.py | 4 +- src/etc/lldb_providers.py | 30 ++- tests/codegen/placement-new.rs | 12 +- tests/codegen/zero-cost-rc-arc-deref.rs | 82 +++++++ tests/debuginfo/strings-and-strs.rs | 2 +- tests/ui/abi/compatibility.rs | 4 +- 10 files changed, 403 insertions(+), 293 deletions(-) create mode 100644 tests/codegen/zero-cost-rc-arc-deref.rs diff --git a/library/alloc/src/rc.rs b/library/alloc/src/rc.rs index fc8646e96d948..233a9bf2df522 100644 --- a/library/alloc/src/rc.rs +++ b/library/alloc/src/rc.rs @@ -297,6 +297,36 @@ fn rc_inner_layout_for_value_layout(layout: Layout) -> Layout { Layout::new::>().extend(layout).unwrap().0.pad_to_align() } +/// Get a pointer to `RcInner` from a pointer to its containing `T` value. +/// +/// # Safety +/// +/// The pointer must point to (and have valid metadata for) a previously +/// valid instance of `T` contained in a `RcInner`, but the `RcInner` is allowed to be dropped. +#[inline] +unsafe fn inner_ptr_from_value_ptr(ptr: NonNull) -> NonNull> +where + T: ?Sized, +{ + let ptr = ptr.as_ptr(); + + unsafe { NonNull::new_unchecked(ptr.byte_sub(data_offset(ptr)) as _) } +} + +/// Get a pointer to `T` value from a pointer to its containing `RcInner`. +/// +/// # Safety +/// +/// The pointer must point to (and have valid metadata for) a previously +/// valid instance of `RcInner`, but the `RcInner` is allowed to be dropped. +#[inline] +unsafe fn value_ptr_from_inner_ptr(ptr: NonNull>) -> NonNull +where + T: ?Sized, +{ + unsafe { NonNull::new_unchecked(&raw mut (*ptr.as_ptr()).value) } +} + /// A single-threaded reference-counting pointer. 'Rc' stands for 'Reference /// Counted'. /// @@ -314,7 +344,7 @@ pub struct Rc< T: ?Sized, #[unstable(feature = "allocator_api", issue = "32838")] A: Allocator = Global, > { - ptr: NonNull>, + ptr: NonNull, phantom: PhantomData>, alloc: A, } @@ -347,34 +377,33 @@ impl Rc { unsafe { Self::from_inner_in(ptr, Global) } } + #[cfg(not(no_global_oom_handling))] #[inline] - unsafe fn from_ptr(ptr: *mut RcInner) -> Self { - unsafe { Self::from_inner(NonNull::new_unchecked(ptr)) } + unsafe fn from_value(ptr: NonNull) -> Self { + unsafe { Self::from_value_in(ptr, Global) } } } impl Rc { #[inline(always)] fn inner(&self) -> &RcInner { - // This unsafety is ok because while this Rc is alive we're guaranteed - // that the inner pointer is valid. - unsafe { self.ptr.as_ref() } + unsafe { inner_ptr_from_value_ptr(self.ptr).as_ref() } } #[inline] - fn into_inner_with_allocator(this: Self) -> (NonNull>, A) { + fn into_ptr_with_allocator(this: Self) -> (NonNull, A) { let this = mem::ManuallyDrop::new(this); (this.ptr, unsafe { ptr::read(&this.alloc) }) } #[inline] unsafe fn from_inner_in(ptr: NonNull>, alloc: A) -> Self { - Self { ptr, phantom: PhantomData, alloc } + unsafe { Self::from_value_in(value_ptr_from_inner_ptr(ptr), alloc) } } #[inline] - unsafe fn from_ptr_in(ptr: *mut RcInner, alloc: A) -> Self { - unsafe { Self::from_inner_in(NonNull::new_unchecked(ptr), alloc) } + unsafe fn from_value_in(ptr: NonNull, alloc: A) -> Self { + Self { ptr, phantom: PhantomData, alloc } } // Non-inlined part of `drop`. @@ -388,7 +417,7 @@ impl Rc { // Destroy the contained object. // We cannot use `get_mut_unchecked` here, because `self.alloc` is borrowed. unsafe { - ptr::drop_in_place(&mut (*self.ptr.as_ptr()).value); + ptr::drop_in_place(self.ptr.as_ptr()); } } } @@ -501,7 +530,7 @@ impl Rc { #[must_use] pub fn new_uninit() -> Rc> { unsafe { - Rc::from_ptr(Rc::allocate_for_layout( + Rc::from_inner(Rc::allocate_for_layout( Layout::new::(), |layout| Global.allocate(layout), <*mut u8>::cast, @@ -534,7 +563,7 @@ impl Rc { #[must_use] pub fn new_zeroed() -> Rc> { unsafe { - Rc::from_ptr(Rc::allocate_for_layout( + Rc::from_inner(Rc::allocate_for_layout( Layout::new::(), |layout| Global.allocate_zeroed(layout), <*mut u8>::cast, @@ -595,7 +624,7 @@ impl Rc { // #[unstable(feature = "new_uninit", issue = "63291")] pub fn try_new_uninit() -> Result>, AllocError> { unsafe { - Ok(Rc::from_ptr(Rc::try_allocate_for_layout( + Ok(Rc::from_inner(Rc::try_allocate_for_layout( Layout::new::(), |layout| Global.allocate(layout), <*mut u8>::cast, @@ -628,7 +657,7 @@ impl Rc { //#[unstable(feature = "new_uninit", issue = "63291")] pub fn try_new_zeroed() -> Result>, AllocError> { unsafe { - Ok(Rc::from_ptr(Rc::try_allocate_for_layout( + Ok(Rc::from_inner(Rc::try_allocate_for_layout( Layout::new::(), |layout| Global.allocate_zeroed(layout), <*mut u8>::cast, @@ -697,7 +726,7 @@ impl Rc { #[inline] pub fn new_uninit_in(alloc: A) -> Rc, A> { unsafe { - Rc::from_ptr_in( + Rc::from_inner_in( Rc::allocate_for_layout( Layout::new::(), |layout| alloc.allocate(layout), @@ -735,7 +764,7 @@ impl Rc { #[inline] pub fn new_zeroed_in(alloc: A) -> Rc, A> { unsafe { - Rc::from_ptr_in( + Rc::from_inner_in( Rc::allocate_for_layout( Layout::new::(), |layout| alloc.allocate_zeroed(layout), @@ -793,8 +822,9 @@ impl Rc { )); let uninit_ptr: NonNull<_> = (unsafe { &mut *uninit_raw_ptr }).into(); let init_ptr: NonNull> = uninit_ptr.cast(); + let value_ptr = unsafe { value_ptr_from_inner_ptr(init_ptr) }; - let weak = Weak { ptr: init_ptr, alloc: alloc }; + let weak = Weak { ptr: value_ptr, alloc }; // It's important we don't give up ownership of the weak pointer, or // else the memory might be freed by the time `data_fn` returns. If @@ -806,7 +836,7 @@ impl Rc { let strong = unsafe { let inner = init_ptr.as_ptr(); - ptr::write(&raw mut (*inner).value, data); + ptr::write(value_ptr.as_ptr(), data); let prev_value = (*inner).strong.get(); debug_assert_eq!(prev_value, 0, "No prior strong references should exist"); @@ -818,7 +848,7 @@ impl Rc { // and forgetting the weak reference. let alloc = weak.into_raw_with_allocator().1; - Rc::from_inner_in(init_ptr, alloc) + Rc::from_value_in(value_ptr, alloc) }; strong @@ -880,7 +910,7 @@ impl Rc { #[inline] pub fn try_new_uninit_in(alloc: A) -> Result, A>, AllocError> { unsafe { - Ok(Rc::from_ptr_in( + Ok(Rc::from_inner_in( Rc::try_allocate_for_layout( Layout::new::(), |layout| alloc.allocate(layout), @@ -919,7 +949,7 @@ impl Rc { #[inline] pub fn try_new_zeroed_in(alloc: A) -> Result, A>, AllocError> { unsafe { - Ok(Rc::from_ptr_in( + Ok(Rc::from_inner_in( Rc::try_allocate_for_layout( Layout::new::(), |layout| alloc.allocate_zeroed(layout), @@ -1045,7 +1075,7 @@ impl Rc<[T]> { #[stable(feature = "new_uninit", since = "1.82.0")] #[must_use] pub fn new_uninit_slice(len: usize) -> Rc<[mem::MaybeUninit]> { - unsafe { Rc::from_ptr(Rc::allocate_for_slice(len)) } + unsafe { Rc::from_inner(Rc::allocate_for_slice(len)) } } /// Constructs a new reference-counted slice with uninitialized contents, with the memory being @@ -1073,7 +1103,7 @@ impl Rc<[T]> { #[must_use] pub fn new_zeroed_slice(len: usize) -> Rc<[mem::MaybeUninit]> { unsafe { - Rc::from_ptr(Rc::allocate_for_layout( + Rc::from_inner(Rc::allocate_for_layout( Layout::array::(len).unwrap(), |layout| Global.allocate_zeroed(layout), |mem| { @@ -1115,7 +1145,7 @@ impl Rc<[T], A> { // #[unstable(feature = "new_uninit", issue = "63291")] #[inline] pub fn new_uninit_slice_in(len: usize, alloc: A) -> Rc<[mem::MaybeUninit], A> { - unsafe { Rc::from_ptr_in(Rc::allocate_for_slice_in(len, &alloc), alloc) } + unsafe { Rc::from_inner_in(Rc::allocate_for_slice_in(len, &alloc), alloc) } } /// Constructs a new reference-counted slice with uninitialized contents, with the memory being @@ -1145,7 +1175,7 @@ impl Rc<[T], A> { #[inline] pub fn new_zeroed_slice_in(len: usize, alloc: A) -> Rc<[mem::MaybeUninit], A> { unsafe { - Rc::from_ptr_in( + Rc::from_inner_in( Rc::allocate_for_layout( Layout::array::(len).unwrap(), |layout| alloc.allocate_zeroed(layout), @@ -1192,8 +1222,8 @@ impl Rc, A> { #[stable(feature = "new_uninit", since = "1.82.0")] #[inline] pub unsafe fn assume_init(self) -> Rc { - let (ptr, alloc) = Rc::into_inner_with_allocator(self); - unsafe { Rc::from_inner_in(ptr.cast(), alloc) } + let (ptr, alloc) = Rc::into_ptr_with_allocator(self); + unsafe { Rc::from_value_in(ptr.cast(), alloc) } } } @@ -1232,8 +1262,8 @@ impl Rc<[mem::MaybeUninit], A> { #[stable(feature = "new_uninit", since = "1.82.0")] #[inline] pub unsafe fn assume_init(self) -> Rc<[T], A> { - let (ptr, alloc) = Rc::into_inner_with_allocator(self); - unsafe { Rc::from_ptr_in(ptr.as_ptr() as _, alloc) } + let (ptr, alloc) = Rc::into_ptr_with_allocator(self); + unsafe { Rc::from_value_in(NonNull::new_unchecked(ptr.as_ptr() as _), alloc) } } } @@ -1430,11 +1460,9 @@ impl Rc { #[must_use = "losing the pointer will leak memory"] #[unstable(feature = "allocator_api", issue = "32838")] pub fn into_raw_with_allocator(this: Self) -> (*const T, A) { - let this = mem::ManuallyDrop::new(this); - let ptr = Self::as_ptr(&this); - // Safety: `this` is ManuallyDrop so the allocator will not be double-dropped - let alloc = unsafe { ptr::read(&this.alloc) }; - (ptr, alloc) + let (ptr, alloc) = Self::into_ptr_with_allocator(this); + + (ptr.as_ptr(), alloc) } /// Provides a raw pointer to the data. @@ -1456,12 +1484,7 @@ impl Rc { #[stable(feature = "weak_into_raw", since = "1.45.0")] #[rustc_never_returns_null_ptr] pub fn as_ptr(this: &Self) -> *const T { - let ptr: *mut RcInner = NonNull::as_ptr(this.ptr); - - // SAFETY: This cannot go through Deref::deref or Rc::inner because - // this is required to retain raw/mut provenance such that e.g. `get_mut` can - // write through the pointer after the Rc is recovered through `from_raw`. - unsafe { &raw mut (*ptr).value } + this.ptr.as_ptr() } /// Constructs an `Rc` from a raw pointer in the provided allocator. @@ -1533,12 +1556,7 @@ impl Rc { /// ``` #[unstable(feature = "allocator_api", issue = "32838")] pub unsafe fn from_raw_in(ptr: *const T, alloc: A) -> Self { - let offset = unsafe { data_offset(ptr) }; - - // Reverse the offset to find the original RcInner. - let rc_ptr = unsafe { ptr.byte_sub(offset) as *mut RcInner }; - - unsafe { Self::from_ptr_in(rc_ptr, alloc) } + unsafe { Self::from_value_in(NonNull::new_unchecked(ptr.cast_mut()), alloc) } } /// Creates a new [`Weak`] pointer to this allocation. @@ -1780,9 +1798,7 @@ impl Rc { #[inline] #[unstable(feature = "get_mut_unchecked", issue = "63292")] pub unsafe fn get_mut_unchecked(this: &mut Self) -> &mut T { - // We are careful to *not* create a reference covering the "count" fields, as - // this would conflict with accesses to the reference counts (e.g. by `Weak`). - unsafe { &mut (*this.ptr.as_ptr()).value } + unsafe { this.ptr.as_mut() } } #[inline] @@ -1869,13 +1885,13 @@ impl Rc { let this_data_ref: &T = &**this; // `in_progress` drops the allocation if we panic before finishing initializing it. - let mut in_progress: UniqueRcUninit = + let in_progress: UniqueRcUninit = UniqueRcUninit::new(this_data_ref, this.alloc.clone()); // Initialize with clone of this. let initialized_clone = unsafe { // Clone. If the clone panics, `in_progress` will be dropped and clean up. - this_data_ref.clone_to_uninit(in_progress.data_ptr()); + this_data_ref.clone_to_uninit(in_progress.ptr.as_ptr()); // Cast type of pointer, now that it is initialized. in_progress.into_rc() }; @@ -1887,7 +1903,7 @@ impl Rc { // We don't need panic-protection like the above branch does, but we might as well // use the same mechanism. - let mut in_progress: UniqueRcUninit = + let in_progress: UniqueRcUninit = UniqueRcUninit::new(&**this, this.alloc.clone()); unsafe { // Initialize `in_progress` with move of **this. @@ -1895,7 +1911,7 @@ impl Rc { // operation that just copies a value based on its `size_of_val()`. ptr::copy_nonoverlapping( ptr::from_ref(&**this).cast::(), - in_progress.data_ptr().cast::(), + in_progress.ptr.cast::().as_ptr(), size_of_val, ); @@ -1912,7 +1928,7 @@ impl Rc { // reference count is guaranteed to be 1 at this point, and we required // the `Rc` itself to be `mut`, so we're returning the only possible // reference to the allocation. - unsafe { &mut this.ptr.as_mut().value } + unsafe { this.ptr.as_mut() } } } @@ -1976,8 +1992,8 @@ impl Rc { pub fn downcast(self) -> Result, Self> { if (*self).is::() { unsafe { - let (ptr, alloc) = Rc::into_inner_with_allocator(self); - Ok(Rc::from_inner_in(ptr.cast(), alloc)) + let (ptr, alloc) = Rc::into_ptr_with_allocator(self); + Ok(Rc::from_value_in(ptr.cast(), alloc)) } } else { Err(self) @@ -2014,8 +2030,8 @@ impl Rc { #[unstable(feature = "downcast_unchecked", issue = "90850")] pub unsafe fn downcast_unchecked(self) -> Rc { unsafe { - let (ptr, alloc) = Rc::into_inner_with_allocator(self); - Rc::from_inner_in(ptr.cast(), alloc) + let (ptr, alloc) = Rc::into_ptr_with_allocator(self); + Rc::from_value_in(ptr.cast(), alloc) } } } @@ -2031,7 +2047,7 @@ impl Rc { value_layout: Layout, allocate: impl FnOnce(Layout) -> Result, AllocError>, mem_to_rc_inner: impl FnOnce(*mut u8) -> *mut RcInner, - ) -> *mut RcInner { + ) -> NonNull> { let layout = rc_inner_layout_for_value_layout(value_layout); unsafe { Rc::try_allocate_for_layout(value_layout, allocate, mem_to_rc_inner) @@ -2050,7 +2066,7 @@ impl Rc { value_layout: Layout, allocate: impl FnOnce(Layout) -> Result, AllocError>, mem_to_rc_inner: impl FnOnce(*mut u8) -> *mut RcInner, - ) -> Result<*mut RcInner, AllocError> { + ) -> Result>, AllocError> { let layout = rc_inner_layout_for_value_layout(value_layout); // Allocate for the layout. @@ -2063,16 +2079,16 @@ impl Rc { (&raw mut (*inner).strong).write(Cell::new(1)); (&raw mut (*inner).weak).write(Cell::new(1)); - } - Ok(inner) + Ok(NonNull::new_unchecked(inner)) + } } } impl Rc { /// Allocates an `RcInner` with sufficient space for an unsized inner value #[cfg(not(no_global_oom_handling))] - unsafe fn allocate_for_ptr_in(ptr: *const T, alloc: &A) -> *mut RcInner { + unsafe fn allocate_for_ptr_in(ptr: *const T, alloc: &A) -> NonNull> { // Allocate for the `RcInner` using the given value. unsafe { Rc::::allocate_for_layout( @@ -2088,11 +2104,12 @@ impl Rc { unsafe { let value_size = size_of_val(&*src); let ptr = Self::allocate_for_ptr_in(&*src, Box::allocator(&src)); + let value_ptr = value_ptr_from_inner_ptr(ptr); // Copy value as bytes ptr::copy_nonoverlapping( (&raw const *src) as *const u8, - (&raw mut (*ptr).value) as *mut u8, + value_ptr.cast().as_ptr(), value_size, ); @@ -2101,7 +2118,7 @@ impl Rc { let src = Box::from_raw_in(bptr as *mut mem::ManuallyDrop, alloc.by_ref()); drop(src); - Self::from_ptr_in(ptr, alloc) + Self::from_value_in(value_ptr, alloc) } } } @@ -2109,7 +2126,7 @@ impl Rc { impl Rc<[T]> { /// Allocates an `RcInner<[T]>` with the given length. #[cfg(not(no_global_oom_handling))] - unsafe fn allocate_for_slice(len: usize) -> *mut RcInner<[T]> { + unsafe fn allocate_for_slice(len: usize) -> NonNull> { unsafe { Self::allocate_for_layout( Layout::array::(len).unwrap(), @@ -2126,8 +2143,10 @@ impl Rc<[T]> { unsafe fn copy_from_slice(v: &[T]) -> Rc<[T]> { unsafe { let ptr = Self::allocate_for_slice(v.len()); - ptr::copy_nonoverlapping(v.as_ptr(), (&raw mut (*ptr).value) as *mut T, v.len()); - Self::from_ptr(ptr) + let value_ptr = value_ptr_from_inner_ptr(ptr); + ptr::copy_nonoverlapping(v.as_ptr(), value_ptr.as_non_null_ptr().as_ptr(), v.len()); + + Self::from_value(value_ptr) } } @@ -2160,11 +2179,13 @@ impl Rc<[T]> { unsafe { let ptr = Self::allocate_for_slice(len); - let mem = ptr as *mut _ as *mut u8; - let layout = Layout::for_value_raw(ptr); + let mem = ptr.as_ptr() as *mut _ as *mut u8; + let layout = Layout::for_value_raw(ptr.as_ptr()); + + let value_ptr = value_ptr_from_inner_ptr(ptr); // Pointer to first element - let elems = (&raw mut (*ptr).value) as *mut T; + let elems = value_ptr.as_non_null_ptr().as_ptr(); let mut guard = Guard { mem: NonNull::new_unchecked(mem), elems, layout, n_elems: 0 }; @@ -2176,7 +2197,7 @@ impl Rc<[T]> { // All clear. Forget the guard so it doesn't free the new RcInner. mem::forget(guard); - Self::from_ptr(ptr) + Self::from_value(value_ptr) } } } @@ -2185,7 +2206,7 @@ impl Rc<[T], A> { /// Allocates an `RcInner<[T]>` with the given length. #[inline] #[cfg(not(no_global_oom_handling))] - unsafe fn allocate_for_slice_in(len: usize, alloc: &A) -> *mut RcInner<[T]> { + unsafe fn allocate_for_slice_in(len: usize, alloc: &A) -> NonNull> { unsafe { Rc::<[T]>::allocate_for_layout( Layout::array::(len).unwrap(), @@ -2224,7 +2245,7 @@ impl Deref for Rc { #[inline(always)] fn deref(&self) -> &T { - &self.inner().value + unsafe { self.ptr.as_ref() } } } @@ -2298,7 +2319,7 @@ impl Clone for Rc { fn clone(&self) -> Self { unsafe { self.inner().inc_strong(); - Self::from_inner_in(self.ptr, self.alloc.clone()) + Self::from_value_in(self.ptr, self.alloc.clone()) } } } @@ -2733,13 +2754,14 @@ impl From> for Rc<[T], A> { let (vec_ptr, len, cap, alloc) = v.into_raw_parts_with_alloc(); let rc_ptr = Self::allocate_for_slice_in(len, &alloc); - ptr::copy_nonoverlapping(vec_ptr, (&raw mut (*rc_ptr).value) as *mut T, len); + let value_ptr = value_ptr_from_inner_ptr(rc_ptr); + ptr::copy_nonoverlapping(vec_ptr, value_ptr.as_non_null_ptr().as_ptr(), len); // Create a `Vec` with length 0, to deallocate the buffer // without dropping its contents or the allocator let _ = Vec::from_raw_parts_in(vec_ptr, 0, cap, &alloc); - Self::from_ptr_in(rc_ptr, alloc) + Self::from_value_in(value_ptr, alloc) } } } @@ -2796,8 +2818,8 @@ impl TryFrom> for Rc<[T; N], A> { fn try_from(boxed_slice: Rc<[T], A>) -> Result { if boxed_slice.len() == N { - let (ptr, alloc) = Rc::into_inner_with_allocator(boxed_slice); - Ok(unsafe { Rc::from_inner_in(ptr.cast(), alloc) }) + let (ptr, alloc) = Rc::into_ptr_with_allocator(boxed_slice); + Ok(unsafe { Rc::from_value_in(ptr.cast(), alloc) }) } else { Err(boxed_slice) } @@ -2922,7 +2944,7 @@ pub struct Weak< // to allocate space on the heap. That's not a value a real pointer // will ever have because RcInner has alignment at least 2. // This is only possible when `T: Sized`; unsized `T` never dangle. - ptr: NonNull>, + ptr: NonNull, alloc: A, } @@ -2957,9 +2979,7 @@ impl Weak { #[must_use] pub const fn new() -> Weak { Weak { - ptr: unsafe { - NonNull::new_unchecked(ptr::without_provenance_mut::>(usize::MAX)) - }, + ptr: unsafe { NonNull::new_unchecked(ptr::without_provenance_mut::(usize::MAX)) }, alloc: Global, } } @@ -2984,9 +3004,7 @@ impl Weak { #[unstable(feature = "allocator_api", issue = "32838")] pub fn new_in(alloc: A) -> Weak { Weak { - ptr: unsafe { - NonNull::new_unchecked(ptr::without_provenance_mut::>(usize::MAX)) - }, + ptr: unsafe { NonNull::new_unchecked(ptr::without_provenance_mut::(usize::MAX)) }, alloc, } } @@ -3089,18 +3107,7 @@ impl Weak { #[must_use] #[stable(feature = "rc_as_ptr", since = "1.45.0")] pub fn as_ptr(&self) -> *const T { - let ptr: *mut RcInner = NonNull::as_ptr(self.ptr); - - if is_dangling(ptr) { - // If the pointer is dangling, we return the sentinel directly. This cannot be - // a valid payload address, as the payload is at least as aligned as RcInner (usize). - ptr as *const T - } else { - // SAFETY: if is_dangling returns false, then the pointer is dereferenceable. - // The payload may be dropped at this point, and we have to maintain provenance, - // so use raw pointer manipulation. - unsafe { &raw mut (*ptr).value } - } + self.ptr.as_ptr() } /// Consumes the `Weak` and turns it into a raw pointer. @@ -3221,22 +3228,7 @@ impl Weak { #[inline] #[unstable(feature = "allocator_api", issue = "32838")] pub unsafe fn from_raw_in(ptr: *const T, alloc: A) -> Self { - // See Weak::as_ptr for context on how the input pointer is derived. - - let ptr = if is_dangling(ptr) { - // This is a dangling Weak. - ptr as *mut RcInner - } else { - // Otherwise, we're guaranteed the pointer came from a nondangling Weak. - // SAFETY: data_offset is safe to call, as ptr references a real (potentially dropped) T. - let offset = unsafe { data_offset(ptr) }; - // Thus, we reverse the offset to get the whole RcInner. - // SAFETY: the pointer originated from a Weak, so this offset is safe. - unsafe { ptr.byte_sub(offset) as *mut RcInner } - }; - - // SAFETY: we now have recovered the original Weak pointer, so can create the Weak. - Weak { ptr: unsafe { NonNull::new_unchecked(ptr) }, alloc } + Weak { ptr: unsafe { NonNull::new_unchecked(ptr.cast_mut()) }, alloc } } /// Attempts to upgrade the `Weak` pointer to an [`Rc`], delaying @@ -3276,7 +3268,7 @@ impl Weak { } else { unsafe { inner.inc_strong(); - Some(Rc::from_inner_in(self.ptr, self.alloc.clone())) + Some(Rc::from_value_in(self.ptr, self.alloc.clone())) } } } @@ -3318,7 +3310,8 @@ impl Weak { // the field may be mutated concurrently (for example, if the last `Rc` // is dropped, the data field will be dropped in-place). Some(unsafe { - let ptr = self.ptr.as_ptr(); + let ptr = inner_ptr_from_value_ptr(self.ptr).as_ptr(); + WeakInner { strong: &(*ptr).strong, weak: &(*ptr).weak } }) } @@ -3405,7 +3398,9 @@ unsafe impl<#[may_dangle] T: ?Sized, A: Allocator> Drop for Weak { // the strong pointers have disappeared. if inner.weak() == 0 { unsafe { - self.alloc.deallocate(self.ptr.cast(), Layout::for_value_raw(self.ptr.as_ptr())); + let inner_ptr = inner_ptr_from_value_ptr(self.ptr); + + self.alloc.deallocate(inner_ptr.cast(), Layout::for_value_raw(inner_ptr.as_ptr())); } } } @@ -3646,7 +3641,7 @@ pub struct UniqueRc< T: ?Sized, #[unstable(feature = "allocator_api", issue = "32838")] A: Allocator = Global, > { - ptr: NonNull>, + ptr: NonNull, phantom: PhantomData>, alloc: A, } @@ -3692,11 +3687,20 @@ impl UniqueRc { }, alloc, )); - Self { ptr: ptr.into(), phantom: PhantomData, alloc } + Self { + ptr: unsafe { value_ptr_from_inner_ptr(ptr.as_non_null_ptr()) }, + phantom: PhantomData, + alloc, + } } } impl UniqueRc { + #[inline(always)] + fn inner(&self) -> &RcInner { + unsafe { inner_ptr_from_value_ptr(self.ptr).as_ref() } + } + /// Converts the `UniqueRc` into a regular [`Rc`]. /// /// This consumes the `UniqueRc` and returns a regular [`Rc`] that contains the `value` that @@ -3706,7 +3710,7 @@ impl UniqueRc { /// references. #[unstable(feature = "unique_rc_arc", issue = "112566")] pub fn into_rc(this: Self) -> Rc { - let mut this = ManuallyDrop::new(this); + let this = ManuallyDrop::new(this); // Move the allocator out. // SAFETY: `this.alloc` will not be accessed again, nor dropped because it is in @@ -3716,8 +3720,9 @@ impl UniqueRc { // SAFETY: This pointer was allocated at creation time so we know it is valid. unsafe { // Convert our weak reference into a strong reference - this.ptr.as_mut().strong.set(1); - Rc::from_inner_in(this.ptr, alloc) + + this.inner().strong.set(1); + Rc::from_value_in(this.ptr, alloc) } } } @@ -3729,11 +3734,8 @@ impl UniqueRc { /// to a [`Rc`] using [`UniqueRc::into_rc`]. #[unstable(feature = "unique_rc_arc", issue = "112566")] pub fn downgrade(this: &Self) -> Weak { - // SAFETY: This pointer was allocated at creation time and we guarantee that we only have - // one strong reference before converting to a regular Rc. - unsafe { - this.ptr.as_ref().inc_weak(); - } + this.inner().inc_weak(); + Weak { ptr: this.ptr, alloc: this.alloc.clone() } } } @@ -3744,7 +3746,7 @@ impl Deref for UniqueRc { fn deref(&self) -> &T { // SAFETY: This pointer was allocated at creation time so we know it is valid. - unsafe { &self.ptr.as_ref().value } + unsafe { self.ptr.as_ref() } } } @@ -3757,7 +3759,7 @@ impl DerefMut for UniqueRc { // SAFETY: This pointer was allocated at creation time so we know it is valid. We know we // have unique ownership and therefore it's safe to make a mutable reference because // `UniqueRc` owns the only strong reference to itself. - unsafe { &mut (*self.ptr.as_ptr()).value } + unsafe { self.ptr.as_mut() } } } @@ -3768,11 +3770,15 @@ unsafe impl<#[may_dangle] T: ?Sized, A: Allocator> Drop for UniqueRc { // destroy the contained object drop_in_place(DerefMut::deref_mut(self)); + let inner = self.inner(); + // remove the implicit "strong weak" pointer now that we've destroyed the contents. - self.ptr.as_ref().dec_weak(); + inner.dec_weak(); - if self.ptr.as_ref().weak() == 0 { - self.alloc.deallocate(self.ptr.cast(), Layout::for_value_raw(self.ptr.as_ptr())); + if inner.weak() == 0 { + let inner_ptr = inner_ptr_from_value_ptr(self.ptr); + + self.alloc.deallocate(inner_ptr.cast(), Layout::for_value_raw(inner_ptr.as_ptr())); } } } @@ -3786,7 +3792,7 @@ unsafe impl<#[may_dangle] T: ?Sized, A: Allocator> Drop for UniqueRc { /// which `MaybeUninit` does not. #[cfg(not(no_global_oom_handling))] struct UniqueRcUninit { - ptr: NonNull>, + ptr: NonNull, layout_for_value: Layout, alloc: Option, } @@ -3803,20 +3809,18 @@ impl UniqueRcUninit { |mem| mem.with_metadata_of(ptr::from_ref(for_value) as *const RcInner), ) }; - Self { ptr: NonNull::new(ptr).unwrap(), layout_for_value: layout, alloc: Some(alloc) } - } - - /// Returns the pointer to be written into to initialize the [`Rc`]. - fn data_ptr(&mut self) -> *mut T { - let offset = data_offset_align(self.layout_for_value.align()); - unsafe { self.ptr.as_ptr().byte_add(offset) as *mut T } + Self { + ptr: unsafe { value_ptr_from_inner_ptr(ptr) }, + layout_for_value: layout, + alloc: Some(alloc), + } } /// Upgrade this into a normal [`Rc`]. /// /// # Safety /// - /// The data must have been initialized (by writing to [`Self::data_ptr()`]). + /// The data must have been initialized (by writing to [`Self::ptr`]). unsafe fn into_rc(self) -> Rc { let mut this = ManuallyDrop::new(self); let ptr = this.ptr; @@ -3824,7 +3828,7 @@ impl UniqueRcUninit { // SAFETY: The pointer is valid as per `UniqueRcUninit::new`, and the caller is responsible // for having initialized the data. - unsafe { Rc::from_ptr_in(ptr.as_ptr(), alloc) } + unsafe { Rc::from_value_in(ptr, alloc) } } } @@ -3836,7 +3840,7 @@ impl Drop for UniqueRcUninit { // * We own the pointer unless into_rc() was called, which forgets us. unsafe { self.alloc.take().unwrap().deallocate( - self.ptr.cast(), + inner_ptr_from_value_ptr(self.ptr).cast(), rc_inner_layout_for_value_layout(self.layout_for_value), ); } diff --git a/library/alloc/src/sync.rs b/library/alloc/src/sync.rs index 98a2fe242570f..d2b7334d01a97 100644 --- a/library/alloc/src/sync.rs +++ b/library/alloc/src/sync.rs @@ -242,7 +242,7 @@ pub struct Arc< T: ?Sized, #[unstable(feature = "allocator_api", issue = "32838")] A: Allocator = Global, > { - ptr: NonNull>, + ptr: NonNull, phantom: PhantomData>, alloc: A, } @@ -266,26 +266,28 @@ impl Arc { unsafe { Self::from_inner_in(ptr, Global) } } - unsafe fn from_ptr(ptr: *mut ArcInner) -> Self { - unsafe { Self::from_ptr_in(ptr, Global) } + #[cfg(not(no_global_oom_handling))] + #[inline] + unsafe fn from_data(ptr: NonNull) -> Self { + unsafe { Self::from_data_in(ptr, Global) } } } impl Arc { #[inline] - fn into_inner_with_allocator(this: Self) -> (NonNull>, A) { + fn into_ptr_with_allocator(this: Self) -> (NonNull, A) { let this = mem::ManuallyDrop::new(this); (this.ptr, unsafe { ptr::read(&this.alloc) }) } #[inline] unsafe fn from_inner_in(ptr: NonNull>, alloc: A) -> Self { - Self { ptr, phantom: PhantomData, alloc } + unsafe { Self::from_data_in(data_ptr_from_inner_ptr(ptr), alloc) } } #[inline] - unsafe fn from_ptr_in(ptr: *mut ArcInner, alloc: A) -> Self { - unsafe { Self::from_inner_in(NonNull::new_unchecked(ptr), alloc) } + unsafe fn from_data_in(ptr: NonNull, alloc: A) -> Self { + Self { ptr, phantom: PhantomData, alloc } } } @@ -321,7 +323,7 @@ pub struct Weak< // to allocate space on the heap. That's not a value a real pointer // will ever have because RcInner has alignment at least 2. // This is only possible when `T: Sized`; unsized `T` never dangle. - ptr: NonNull>, + ptr: NonNull, alloc: A, } @@ -369,6 +371,36 @@ fn arcinner_layout_for_value_layout(layout: Layout) -> Layout { unsafe impl Send for ArcInner {} unsafe impl Sync for ArcInner {} +/// Get a pointer to `ArcInner` from a pointer to its containing `T` value. +/// +/// # Safety +/// +/// The pointer must point to (and have valid metadata for) a previously +/// valid instance of `T` contained in a `ArcInner`, but the `ArcInner` is allowed to be dropped. +#[inline(always)] +unsafe fn inner_ptr_from_data_ptr(ptr: NonNull) -> NonNull> +where + T: ?Sized, +{ + let ptr = ptr.as_ptr(); + + unsafe { NonNull::new_unchecked(ptr.byte_sub(data_offset(ptr)) as _) } +} + +/// Get a pointer to `T` value from a pointer to its containing `ArcInner`. +/// +/// # Safety +/// +/// The pointer must point to (and have valid metadata for) a previously +/// valid instance of `ArcInner`, but the `ArcInner` is allowed to be dropped. +#[inline(always)] +unsafe fn data_ptr_from_inner_ptr(ptr: NonNull>) -> NonNull +where + T: ?Sized, +{ + unsafe { NonNull::new_unchecked(&raw mut (*ptr.as_ptr()).data) } +} + impl Arc { /// Constructs a new `Arc`. /// @@ -478,7 +510,7 @@ impl Arc { #[must_use] pub fn new_uninit() -> Arc> { unsafe { - Arc::from_ptr(Arc::allocate_for_layout( + Arc::from_inner(Arc::allocate_for_layout( Layout::new::(), |layout| Global.allocate(layout), <*mut u8>::cast, @@ -512,7 +544,7 @@ impl Arc { #[must_use] pub fn new_zeroed() -> Arc> { unsafe { - Arc::from_ptr(Arc::allocate_for_layout( + Arc::from_inner(Arc::allocate_for_layout( Layout::new::(), |layout| Global.allocate_zeroed(layout), <*mut u8>::cast, @@ -585,7 +617,7 @@ impl Arc { // #[unstable(feature = "new_uninit", issue = "63291")] pub fn try_new_uninit() -> Result>, AllocError> { unsafe { - Ok(Arc::from_ptr(Arc::try_allocate_for_layout( + Ok(Arc::from_inner(Arc::try_allocate_for_layout( Layout::new::(), |layout| Global.allocate(layout), <*mut u8>::cast, @@ -618,7 +650,7 @@ impl Arc { // #[unstable(feature = "new_uninit", issue = "63291")] pub fn try_new_zeroed() -> Result>, AllocError> { unsafe { - Ok(Arc::from_ptr(Arc::try_allocate_for_layout( + Ok(Arc::from_inner(Arc::try_allocate_for_layout( Layout::new::(), |layout| Global.allocate_zeroed(layout), <*mut u8>::cast, @@ -686,7 +718,7 @@ impl Arc { #[inline] pub fn new_uninit_in(alloc: A) -> Arc, A> { unsafe { - Arc::from_ptr_in( + Arc::from_inner_in( Arc::allocate_for_layout( Layout::new::(), |layout| alloc.allocate(layout), @@ -724,7 +756,7 @@ impl Arc { #[inline] pub fn new_zeroed_in(alloc: A) -> Arc, A> { unsafe { - Arc::from_ptr_in( + Arc::from_inner_in( Arc::allocate_for_layout( Layout::new::(), |layout| alloc.allocate_zeroed(layout), @@ -783,8 +815,9 @@ impl Arc { )); let uninit_ptr: NonNull<_> = (unsafe { &mut *uninit_raw_ptr }).into(); let init_ptr: NonNull> = uninit_ptr.cast(); + let data_ptr = unsafe { data_ptr_from_inner_ptr(init_ptr) }; - let weak = Weak { ptr: init_ptr, alloc: alloc }; + let weak = Weak { ptr: data_ptr, alloc: alloc }; // It's important we don't give up ownership of the weak pointer, or // else the memory might be freed by the time `data_fn` returns. If @@ -798,7 +831,7 @@ impl Arc { // reference into a strong reference. let strong = unsafe { let inner = init_ptr.as_ptr(); - ptr::write(&raw mut (*inner).data, data); + ptr::write(data_ptr.as_ptr(), data); // The above write to the data field must be visible to any threads which // observe a non-zero strong count. Therefore we need at least "Release" ordering @@ -821,7 +854,7 @@ impl Arc { // and forgetting the weak reference. let alloc = weak.into_raw_with_allocator().1; - Arc::from_inner_in(init_ptr, alloc) + Arc::from_data_in(data_ptr, alloc) }; strong @@ -910,7 +943,7 @@ impl Arc { #[inline] pub fn try_new_uninit_in(alloc: A) -> Result, A>, AllocError> { unsafe { - Ok(Arc::from_ptr_in( + Ok(Arc::from_inner_in( Arc::try_allocate_for_layout( Layout::new::(), |layout| alloc.allocate(layout), @@ -949,7 +982,7 @@ impl Arc { #[inline] pub fn try_new_zeroed_in(alloc: A) -> Result, A>, AllocError> { unsafe { - Ok(Arc::from_ptr_in( + Ok(Arc::from_inner_in( Arc::try_allocate_for_layout( Layout::new::(), |layout| alloc.allocate_zeroed(layout), @@ -1002,7 +1035,7 @@ impl Arc { acquire!(this.inner().strong); let this = ManuallyDrop::new(this); - let elem: T = unsafe { ptr::read(&this.ptr.as_ref().data) }; + let elem: T = unsafe { ptr::read(this.ptr.as_ptr()) }; let alloc: A = unsafe { ptr::read(&this.alloc) }; // copy the allocator // Make a weak pointer to clean up the implicit strong-weak reference @@ -1164,7 +1197,7 @@ impl Arc<[T]> { #[stable(feature = "new_uninit", since = "1.82.0")] #[must_use] pub fn new_uninit_slice(len: usize) -> Arc<[mem::MaybeUninit]> { - unsafe { Arc::from_ptr(Arc::allocate_for_slice(len)) } + unsafe { Arc::from_inner(Arc::allocate_for_slice(len)) } } /// Constructs a new atomically reference-counted slice with uninitialized contents, with the memory being @@ -1193,7 +1226,7 @@ impl Arc<[T]> { #[must_use] pub fn new_zeroed_slice(len: usize) -> Arc<[mem::MaybeUninit]> { unsafe { - Arc::from_ptr(Arc::allocate_for_layout( + Arc::from_inner(Arc::allocate_for_layout( Layout::array::(len).unwrap(), |layout| Global.allocate_zeroed(layout), |mem| { @@ -1235,7 +1268,7 @@ impl Arc<[T], A> { #[unstable(feature = "allocator_api", issue = "32838")] #[inline] pub fn new_uninit_slice_in(len: usize, alloc: A) -> Arc<[mem::MaybeUninit], A> { - unsafe { Arc::from_ptr_in(Arc::allocate_for_slice_in(len, &alloc), alloc) } + unsafe { Arc::from_inner_in(Arc::allocate_for_slice_in(len, &alloc), alloc) } } /// Constructs a new atomically reference-counted slice with uninitialized contents, with the memory being @@ -1264,7 +1297,7 @@ impl Arc<[T], A> { #[inline] pub fn new_zeroed_slice_in(len: usize, alloc: A) -> Arc<[mem::MaybeUninit], A> { unsafe { - Arc::from_ptr_in( + Arc::from_inner_in( Arc::allocate_for_layout( Layout::array::(len).unwrap(), |layout| alloc.allocate_zeroed(layout), @@ -1312,8 +1345,8 @@ impl Arc, A> { #[must_use = "`self` will be dropped if the result is not used"] #[inline] pub unsafe fn assume_init(self) -> Arc { - let (ptr, alloc) = Arc::into_inner_with_allocator(self); - unsafe { Arc::from_inner_in(ptr.cast(), alloc) } + let (ptr, alloc) = Arc::into_ptr_with_allocator(self); + unsafe { Arc::from_data_in(ptr.cast(), alloc) } } } @@ -1353,8 +1386,8 @@ impl Arc<[mem::MaybeUninit], A> { #[must_use = "`self` will be dropped if the result is not used"] #[inline] pub unsafe fn assume_init(self) -> Arc<[T], A> { - let (ptr, alloc) = Arc::into_inner_with_allocator(self); - unsafe { Arc::from_ptr_in(ptr.as_ptr() as _, alloc) } + let (ptr, alloc) = Arc::into_ptr_with_allocator(self); + unsafe { Arc::from_data_in(NonNull::new_unchecked(ptr.as_ptr() as _), alloc) } } } @@ -1579,12 +1612,7 @@ impl Arc { #[stable(feature = "rc_as_ptr", since = "1.45.0")] #[rustc_never_returns_null_ptr] pub fn as_ptr(this: &Self) -> *const T { - let ptr: *mut ArcInner = NonNull::as_ptr(this.ptr); - - // SAFETY: This cannot go through Deref::deref or RcInnerPtr::inner because - // this is required to retain raw/mut provenance such that e.g. `get_mut` can - // write through the pointer after the Rc is recovered through `from_raw`. - unsafe { &raw mut (*ptr).data } + this.ptr.as_ptr() } /// Constructs an `Arc` from a raw pointer. @@ -1657,14 +1685,7 @@ impl Arc { #[inline] #[unstable(feature = "allocator_api", issue = "32838")] pub unsafe fn from_raw_in(ptr: *const T, alloc: A) -> Self { - unsafe { - let offset = data_offset(ptr); - - // Reverse the offset to find the original ArcInner. - let arc_ptr = ptr.byte_sub(offset) as *mut ArcInner; - - Self::from_ptr_in(arc_ptr, alloc) - } + unsafe { Self::from_data_in(NonNull::new_unchecked(ptr.cast_mut()), alloc) } } /// Creates a new [`Weak`] pointer to this allocation. @@ -1866,7 +1887,7 @@ impl Arc { // `ArcInner` structure itself is `Sync` because the inner data is // `Sync` as well, so we're ok loaning out an immutable pointer to these // contents. - unsafe { self.ptr.as_ref() } + unsafe { inner_ptr_from_data_ptr(self.ptr).as_ref() } } // Non-inlined part of `drop`. @@ -1882,7 +1903,7 @@ impl Arc { // Destroy the data at this time, even though we must not free the box // allocation itself (there might still be weak pointers lying around). // We cannot use `get_mut_unchecked` here, because `self.alloc` is borrowed. - unsafe { ptr::drop_in_place(&mut (*self.ptr.as_ptr()).data) }; + unsafe { ptr::drop_in_place(self.ptr.as_ptr()) }; } /// Returns `true` if the two `Arc`s point to the same allocation in a vein similar to @@ -1921,7 +1942,7 @@ impl Arc { value_layout: Layout, allocate: impl FnOnce(Layout) -> Result, AllocError>, mem_to_arcinner: impl FnOnce(*mut u8) -> *mut ArcInner, - ) -> *mut ArcInner { + ) -> NonNull> { let layout = arcinner_layout_for_value_layout(value_layout); let ptr = allocate(layout).unwrap_or_else(|_| handle_alloc_error(layout)); @@ -1939,7 +1960,7 @@ impl Arc { value_layout: Layout, allocate: impl FnOnce(Layout) -> Result, AllocError>, mem_to_arcinner: impl FnOnce(*mut u8) -> *mut ArcInner, - ) -> Result<*mut ArcInner, AllocError> { + ) -> Result>, AllocError> { let layout = arcinner_layout_for_value_layout(value_layout); let ptr = allocate(layout)?; @@ -1953,16 +1974,16 @@ impl Arc { ptr: NonNull<[u8]>, layout: Layout, mem_to_arcinner: impl FnOnce(*mut u8) -> *mut ArcInner, - ) -> *mut ArcInner { + ) -> NonNull> { let inner = mem_to_arcinner(ptr.as_non_null_ptr().as_ptr()); debug_assert_eq!(unsafe { Layout::for_value_raw(inner) }, layout); unsafe { (&raw mut (*inner).strong).write(atomic::AtomicUsize::new(1)); (&raw mut (*inner).weak).write(atomic::AtomicUsize::new(1)); - } - inner + NonNull::new_unchecked(inner) + } } } @@ -1970,7 +1991,7 @@ impl Arc { /// Allocates an `ArcInner` with sufficient space for an unsized inner value. #[inline] #[cfg(not(no_global_oom_handling))] - unsafe fn allocate_for_ptr_in(ptr: *const T, alloc: &A) -> *mut ArcInner { + unsafe fn allocate_for_ptr_in(ptr: *const T, alloc: &A) -> NonNull> { // Allocate for the `ArcInner` using the given value. unsafe { Arc::allocate_for_layout( @@ -1986,11 +2007,12 @@ impl Arc { unsafe { let value_size = size_of_val(&*src); let ptr = Self::allocate_for_ptr_in(&*src, Box::allocator(&src)); + let data_ptr = data_ptr_from_inner_ptr(ptr); // Copy value as bytes ptr::copy_nonoverlapping( (&raw const *src) as *const u8, - (&raw mut (*ptr).data) as *mut u8, + data_ptr.cast().as_ptr(), value_size, ); @@ -1999,7 +2021,7 @@ impl Arc { let src = Box::from_raw_in(bptr as *mut mem::ManuallyDrop, alloc.by_ref()); drop(src); - Self::from_ptr_in(ptr, alloc) + Self::from_data_in(data_ptr, alloc) } } } @@ -2007,7 +2029,7 @@ impl Arc { impl Arc<[T]> { /// Allocates an `ArcInner<[T]>` with the given length. #[cfg(not(no_global_oom_handling))] - unsafe fn allocate_for_slice(len: usize) -> *mut ArcInner<[T]> { + unsafe fn allocate_for_slice(len: usize) -> NonNull> { unsafe { Self::allocate_for_layout( Layout::array::(len).unwrap(), @@ -2024,10 +2046,11 @@ impl Arc<[T]> { unsafe fn copy_from_slice(v: &[T]) -> Arc<[T]> { unsafe { let ptr = Self::allocate_for_slice(v.len()); + let data_ptr = data_ptr_from_inner_ptr(ptr); - ptr::copy_nonoverlapping(v.as_ptr(), (&raw mut (*ptr).data) as *mut T, v.len()); + ptr::copy_nonoverlapping(v.as_ptr(), data_ptr.as_non_null_ptr().as_ptr(), v.len()); - Self::from_ptr(ptr) + Self::from_data(data_ptr) } } @@ -2060,11 +2083,13 @@ impl Arc<[T]> { unsafe { let ptr = Self::allocate_for_slice(len); - let mem = ptr as *mut _ as *mut u8; - let layout = Layout::for_value_raw(ptr); + let mem = ptr.as_ptr() as *mut _ as *mut u8; + let layout = Layout::for_value_raw(ptr.as_ptr()); + + let data_ptr = data_ptr_from_inner_ptr(ptr); // Pointer to first element - let elems = (&raw mut (*ptr).data) as *mut T; + let elems = data_ptr.as_non_null_ptr().as_ptr(); let mut guard = Guard { mem: NonNull::new_unchecked(mem), elems, layout, n_elems: 0 }; @@ -2076,7 +2101,7 @@ impl Arc<[T]> { // All clear. Forget the guard so it doesn't free the new ArcInner. mem::forget(guard); - Self::from_ptr(ptr) + Self::from_data(data_ptr) } } } @@ -2085,7 +2110,7 @@ impl Arc<[T], A> { /// Allocates an `ArcInner<[T]>` with the given length. #[inline] #[cfg(not(no_global_oom_handling))] - unsafe fn allocate_for_slice_in(len: usize, alloc: &A) -> *mut ArcInner<[T]> { + unsafe fn allocate_for_slice_in(len: usize, alloc: &A) -> NonNull> { unsafe { Arc::allocate_for_layout( Layout::array::(len).unwrap(), @@ -2168,7 +2193,7 @@ impl Clone for Arc { abort(); } - unsafe { Self::from_inner_in(self.ptr, self.alloc.clone()) } + unsafe { Self::from_data_in(self.ptr, self.alloc.clone()) } } } @@ -2178,7 +2203,7 @@ impl Deref for Arc { #[inline] fn deref(&self) -> &T { - &self.inner().data + unsafe { self.ptr.as_ref() } } } @@ -2264,12 +2289,12 @@ impl Arc { let this_data_ref: &T = &**this; // `in_progress` drops the allocation if we panic before finishing initializing it. - let mut in_progress: UniqueArcUninit = + let in_progress: UniqueArcUninit = UniqueArcUninit::new(this_data_ref, this.alloc.clone()); let initialized_clone = unsafe { // Clone. If the clone panics, `in_progress` will be dropped and clean up. - this_data_ref.clone_to_uninit(in_progress.data_ptr()); + this_data_ref.clone_to_uninit(in_progress.ptr.as_ptr()); // Cast type of pointer, now that it is initialized. in_progress.into_arc() }; @@ -2295,7 +2320,7 @@ impl Arc { // // We don't need panic-protection like the above branch does, but we might as well // use the same mechanism. - let mut in_progress: UniqueArcUninit = + let in_progress: UniqueArcUninit = UniqueArcUninit::new(&**this, this.alloc.clone()); unsafe { // Initialize `in_progress` with move of **this. @@ -2303,7 +2328,7 @@ impl Arc { // operation that just copies a value based on its `size_of_val()`. ptr::copy_nonoverlapping( ptr::from_ref(&**this).cast::(), - in_progress.data_ptr().cast::(), + in_progress.ptr.cast::().as_ptr(), size_of_val, ); @@ -2460,9 +2485,7 @@ impl Arc { #[inline] #[unstable(feature = "get_mut_unchecked", issue = "63292")] pub unsafe fn get_mut_unchecked(this: &mut Self) -> &mut T { - // We are careful to *not* create a reference covering the "count" fields, as - // this would alias with concurrent access to the reference counts (e.g. by `Weak`). - unsafe { &mut (*this.ptr.as_ptr()).data } + unsafe { this.ptr.as_mut() } } /// Determine whether this is the unique reference (including weak refs) to @@ -2601,8 +2624,8 @@ impl Arc { { if (*self).is::() { unsafe { - let (ptr, alloc) = Arc::into_inner_with_allocator(self); - Ok(Arc::from_inner_in(ptr.cast(), alloc)) + let (ptr, alloc) = Arc::into_ptr_with_allocator(self); + Ok(Arc::from_data_in(ptr.cast(), alloc)) } } else { Err(self) @@ -2642,8 +2665,8 @@ impl Arc { T: Any + Send + Sync, { unsafe { - let (ptr, alloc) = Arc::into_inner_with_allocator(self); - Arc::from_inner_in(ptr.cast(), alloc) + let (ptr, alloc) = Arc::into_ptr_with_allocator(self); + Arc::from_data_in(ptr.cast(), alloc) } } } @@ -2668,9 +2691,7 @@ impl Weak { #[must_use] pub const fn new() -> Weak { Weak { - ptr: unsafe { - NonNull::new_unchecked(ptr::without_provenance_mut::>(usize::MAX)) - }, + ptr: unsafe { NonNull::new_unchecked(ptr::without_provenance_mut::(usize::MAX)) }, alloc: Global, } } @@ -2698,9 +2719,7 @@ impl Weak { #[unstable(feature = "allocator_api", issue = "32838")] pub fn new_in(alloc: A) -> Weak { Weak { - ptr: unsafe { - NonNull::new_unchecked(ptr::without_provenance_mut::>(usize::MAX)) - }, + ptr: unsafe { NonNull::new_unchecked(ptr::without_provenance_mut::(usize::MAX)) }, alloc, } } @@ -2798,18 +2817,7 @@ impl Weak { #[must_use] #[stable(feature = "weak_into_raw", since = "1.45.0")] pub fn as_ptr(&self) -> *const T { - let ptr: *mut ArcInner = NonNull::as_ptr(self.ptr); - - if is_dangling(ptr) { - // If the pointer is dangling, we return the sentinel directly. This cannot be - // a valid payload address, as the payload is at least as aligned as ArcInner (usize). - ptr as *const T - } else { - // SAFETY: if is_dangling returns false, then the pointer is dereferenceable. - // The payload may be dropped at this point, and we have to maintain provenance, - // so use raw pointer manipulation. - unsafe { &raw mut (*ptr).data } - } + self.ptr.as_ptr() } /// Consumes the `Weak` and turns it into a raw pointer. @@ -2929,22 +2937,7 @@ impl Weak { #[inline] #[unstable(feature = "allocator_api", issue = "32838")] pub unsafe fn from_raw_in(ptr: *const T, alloc: A) -> Self { - // See Weak::as_ptr for context on how the input pointer is derived. - - let ptr = if is_dangling(ptr) { - // This is a dangling Weak. - ptr as *mut ArcInner - } else { - // Otherwise, we're guaranteed the pointer came from a nondangling Weak. - // SAFETY: data_offset is safe to call, as ptr references a real (potentially dropped) T. - let offset = unsafe { data_offset(ptr) }; - // Thus, we reverse the offset to get the whole RcInner. - // SAFETY: the pointer originated from a Weak, so this offset is safe. - unsafe { ptr.byte_sub(offset) as *mut ArcInner } - }; - - // SAFETY: we now have recovered the original Weak pointer, so can create the Weak. - Weak { ptr: unsafe { NonNull::new_unchecked(ptr) }, alloc } + Weak { ptr: unsafe { NonNull::new_unchecked(ptr.cast_mut()) }, alloc } } } @@ -3000,7 +2993,7 @@ impl Weak { // expect to observe the fully initialized value. if self.inner()?.strong.fetch_update(Acquire, Relaxed, checked_increment).is_ok() { // SAFETY: pointer is not null, verified in checked_increment - unsafe { Some(Arc::from_inner_in(self.ptr, self.alloc.clone())) } + unsafe { Some(Arc::from_data_in(self.ptr, self.alloc.clone())) } } else { None } @@ -3058,7 +3051,11 @@ impl Weak { // We are careful to *not* create a reference covering the "data" field, as // the field may be mutated concurrently (for example, if the last `Arc` // is dropped, the data field will be dropped in-place). - Some(unsafe { WeakInner { strong: &(*ptr).strong, weak: &(*ptr).weak } }) + Some(unsafe { + let ptr = inner_ptr_from_data_ptr(self.ptr).as_ptr(); + + WeakInner { strong: &(*ptr).strong, weak: &(*ptr).weak } + }) } } @@ -3213,7 +3210,9 @@ unsafe impl<#[may_dangle] T: ?Sized, A: Allocator> Drop for Weak { ); unsafe { - self.alloc.deallocate(self.ptr.cast(), Layout::for_value_raw(self.ptr.as_ptr())) + let inner_ptr = inner_ptr_from_data_ptr(self.ptr); + + self.alloc.deallocate(inner_ptr.cast(), Layout::for_value_raw(inner_ptr.as_ptr())) } } } @@ -3494,8 +3493,8 @@ impl Default for Arc { fn default() -> Self { let arc: Arc<[u8]> = Default::default(); debug_assert!(core::str::from_utf8(&*arc).is_ok()); - let (ptr, alloc) = Arc::into_inner_with_allocator(arc); - unsafe { Arc::from_ptr_in(ptr.as_ptr() as *mut ArcInner, alloc) } + let (ptr, alloc) = Arc::into_ptr_with_allocator(arc); + unsafe { Arc::from_data_in(NonNull::new_unchecked(ptr.as_ptr() as _), alloc) } } } @@ -3690,13 +3689,14 @@ impl From> for Arc<[T], A> { let (vec_ptr, len, cap, alloc) = v.into_raw_parts_with_alloc(); let rc_ptr = Self::allocate_for_slice_in(len, &alloc); - ptr::copy_nonoverlapping(vec_ptr, (&raw mut (*rc_ptr).data) as *mut T, len); + let data_ptr = data_ptr_from_inner_ptr(rc_ptr); + ptr::copy_nonoverlapping(vec_ptr, data_ptr.as_non_null_ptr().as_ptr(), len); // Create a `Vec` with length 0, to deallocate the buffer // without dropping its contents or the allocator let _ = Vec::from_raw_parts_in(vec_ptr, 0, cap, &alloc); - Self::from_ptr_in(rc_ptr, alloc) + Self::from_data_in(data_ptr, alloc) } } } @@ -3753,8 +3753,8 @@ impl TryFrom> for Arc<[T; N], A> { fn try_from(boxed_slice: Arc<[T], A>) -> Result { if boxed_slice.len() == N { - let (ptr, alloc) = Arc::into_inner_with_allocator(boxed_slice); - Ok(unsafe { Arc::from_inner_in(ptr.cast(), alloc) }) + let (ptr, alloc) = Arc::into_ptr_with_allocator(boxed_slice); + Ok(unsafe { Arc::from_data_in(ptr.cast(), alloc) }) } else { Err(boxed_slice) } @@ -3892,7 +3892,7 @@ fn data_offset_align(align: usize) -> usize { /// This is a helper for [`Arc::make_mut()`] to ensure correct cleanup on panic. #[cfg(not(no_global_oom_handling))] struct UniqueArcUninit { - ptr: NonNull>, + ptr: NonNull, layout_for_value: Layout, alloc: Option, } @@ -3909,28 +3909,26 @@ impl UniqueArcUninit { |mem| mem.with_metadata_of(ptr::from_ref(for_value) as *const ArcInner), ) }; - Self { ptr: NonNull::new(ptr).unwrap(), layout_for_value: layout, alloc: Some(alloc) } - } - - /// Returns the pointer to be written into to initialize the [`Arc`]. - fn data_ptr(&mut self) -> *mut T { - let offset = data_offset_align(self.layout_for_value.align()); - unsafe { self.ptr.as_ptr().byte_add(offset) as *mut T } + Self { + ptr: unsafe { data_ptr_from_inner_ptr(ptr) }, + layout_for_value: layout, + alloc: Some(alloc), + } } /// Upgrade this into a normal [`Arc`]. /// /// # Safety /// - /// The data must have been initialized (by writing to [`Self::data_ptr()`]). + /// The data must have been initialized (by writing to [`Self::ptr`]). unsafe fn into_arc(self) -> Arc { let mut this = ManuallyDrop::new(self); - let ptr = this.ptr.as_ptr(); + let ptr = this.ptr; let alloc = this.alloc.take().unwrap(); // SAFETY: The pointer is valid as per `UniqueArcUninit::new`, and the caller is responsible // for having initialized the data. - unsafe { Arc::from_ptr_in(ptr, alloc) } + unsafe { Arc::from_data_in(ptr, alloc) } } } @@ -3942,7 +3940,7 @@ impl Drop for UniqueArcUninit { // * We own the pointer unless into_arc() was called, which forgets us. unsafe { self.alloc.take().unwrap().deallocate( - self.ptr.cast(), + inner_ptr_from_data_ptr(self.ptr).cast(), arcinner_layout_for_value_layout(self.layout_for_value), ); } diff --git a/src/etc/gdb_providers.py b/src/etc/gdb_providers.py index e8f9dee07d3e9..d9e8729a446b6 100644 --- a/src/etc/gdb_providers.py +++ b/src/etc/gdb_providers.py @@ -180,12 +180,22 @@ def children(self): def display_hint(): return "array" +BYTE_PTR_TYPE = gdb.lookup_type("u8").pointer() class StdRcProvider(printer_base): def __init__(self, valobj, is_atomic=False): + def inner_ptr(): + data_ptr = unwrap_unique_or_non_null(valobj["ptr"]) + formatter = "alloc::sync::ArcInner<{}>" if is_atomic else "alloc::rc::RcInner<{}>" + inner_type = gdb.lookup_type(formatter.format(data_ptr.type.target().name)) + data_offset = inner_type.fields()[-1].bitpos // 8 + inner_ptr = data_ptr.reinterpret_cast(BYTE_PTR_TYPE) - data_offset + + return inner_ptr.reinterpret_cast(inner_type.pointer()) + self._valobj = valobj self._is_atomic = is_atomic - self._ptr = unwrap_unique_or_non_null(valobj["ptr"]) + self._ptr = inner_ptr() self._value = self._ptr["data" if is_atomic else "value"] self._strong = self._ptr["strong"]["v" if is_atomic else "value"]["value"] self._weak = self._ptr["weak"]["v" if is_atomic else "value"]["value"] - 1 diff --git a/src/etc/lldb_batchmode.py b/src/etc/lldb_batchmode.py index db1e0035ea063..99dd48e498963 100644 --- a/src/etc/lldb_batchmode.py +++ b/src/etc/lldb_batchmode.py @@ -40,7 +40,7 @@ def print_debug(s): def normalize_whitespace(s): """Replace newlines, tabs, multiple spaces, etc with exactly one space""" - return re.sub("\s+", " ", s) + return re.sub("\\s+", " ", s) def breakpoint_callback(frame, bp_loc, dict): @@ -208,7 +208,7 @@ def watchdog(): for line in script_file: command = line.strip() - if command == "run" or command == "r" or re.match("^process\s+launch.*", command): + if command == "run" or command == "r" or re.match("^process\\s+launch.*", command): # Before starting to run the program, let the thread sleep a bit, so all # breakpoint added events can be processed time.sleep(0.5) diff --git a/src/etc/lldb_lookup.py b/src/etc/lldb_lookup.py index abbd802dcdd12..e616478451816 100644 --- a/src/etc/lldb_lookup.py +++ b/src/etc/lldb_lookup.py @@ -44,9 +44,9 @@ def summary_lookup(valobj, dict): return SizeSummaryProvider(valobj, dict) if rust_type == RustType.STD_RC: - return StdRcSummaryProvider(valobj, dict) + return StdRcSummaryProvider(valobj, dict, False) if rust_type == RustType.STD_ARC: - return StdRcSummaryProvider(valobj, dict) + return StdRcSummaryProvider(valobj, dict, True) if rust_type == RustType.STD_REF: return StdRefSummaryProvider(valobj, dict) diff --git a/src/etc/lldb_providers.py b/src/etc/lldb_providers.py index bace228454ebb..75e4fd696ed4f 100644 --- a/src/etc/lldb_providers.py +++ b/src/etc/lldb_providers.py @@ -660,7 +660,16 @@ def has_children(self): return True -def StdRcSummaryProvider(valobj, dict): +def get_rc_inner(valobj, is_atomic): + data_ptr = unwrap_unique_or_non_null(valobj.GetChildMemberWithName("ptr")) + inner_type = valobj.type.fields[1].type.template_args[0] + data_offset = inner_type.fields[-1].byte_offset + inner_address = data_ptr.GetValueAsAddress() - data_offset + + return data_ptr.CreateValueFromAddress("inner", inner_address, inner_type) + + +def StdRcSummaryProvider(valobj, dict, is_atomic): # type: (SBValue, dict) -> str strong = valobj.GetChildMemberWithName("strong").GetValueAsUnsigned() weak = valobj.GetChildMemberWithName("weak").GetValueAsUnsigned() @@ -670,7 +679,7 @@ def StdRcSummaryProvider(valobj, dict): class StdRcSyntheticProvider: """Pretty-printer for alloc::rc::Rc and alloc::sync::Arc - struct Rc { ptr: NonNull>, ... } + struct Rc { ptr: NonNull, ... } rust 1.31.1: struct NonNull { pointer: NonZero<*const T> } rust 1.33.0: struct NonNull { pointer: *const T } struct NonZero(T) @@ -678,7 +687,7 @@ class StdRcSyntheticProvider: struct Cell { value: UnsafeCell } struct UnsafeCell { value: T } - struct Arc { ptr: NonNull>, ... } + struct Arc { ptr: NonNull, ... } struct ArcInner { strong: atomic::AtomicUsize, weak: atomic::AtomicUsize, data: T } struct AtomicUsize { v: UnsafeCell } """ @@ -687,14 +696,17 @@ def __init__(self, valobj, dict, is_atomic=False): # type: (SBValue, dict, bool) -> StdRcSyntheticProvider self.valobj = valobj - self.ptr = unwrap_unique_or_non_null(self.valobj.GetChildMemberWithName("ptr")) + self.inner = get_rc_inner(valobj, is_atomic) - self.value = self.ptr.GetChildMemberWithName("data" if is_atomic else "value") + self.value = self.inner.GetChildMemberWithName("data" if is_atomic else "value") + + if is_atomic: + read_cell = lambda x: x.GetChildAtIndex(0).GetChildAtIndex(0) + else: + read_cell = lambda x: x.GetChildAtIndex(0) - self.strong = self.ptr.GetChildMemberWithName("strong").GetChildAtIndex( - 0).GetChildMemberWithName("value") - self.weak = self.ptr.GetChildMemberWithName("weak").GetChildAtIndex( - 0).GetChildMemberWithName("value") + self.strong = read_cell(self.inner.GetChildMemberWithName("strong")) + self.weak = read_cell(self.inner.GetChildMemberWithName("weak")) self.value_builder = ValueBuilder(valobj) diff --git a/tests/codegen/placement-new.rs b/tests/codegen/placement-new.rs index 0ec2b6a6f20e7..a437164a926f0 100644 --- a/tests/codegen/placement-new.rs +++ b/tests/codegen/placement-new.rs @@ -22,9 +22,11 @@ pub fn box_default_inplace() -> Box<(String, String)> { #[no_mangle] pub fn rc_default_inplace() -> Rc<(String, String)> { // CHECK-NOT: alloca - // CHECK: [[RC:%.*]] = {{.*}}call {{.*}}__rust_alloc( + // CHECK: [[RC:%.*]] = {{.*}}call {{.*}}__rust_alloc(i[[#BITS:]] // CHECK-NOT: call void @llvm.memcpy - // CHECK: ret ptr [[RC]] + // CHECK: [[DATA:%.*]] = getelementptr inbounds i8, ptr [[RC]], i[[#BITS]] [[#div(BITS,4)]] + // CHECK-NOT: call void @llvm.memcpy + // CHECK: ret ptr [[DATA]] Rc::default() } @@ -32,8 +34,10 @@ pub fn rc_default_inplace() -> Rc<(String, String)> { #[no_mangle] pub fn arc_default_inplace() -> Arc<(String, String)> { // CHECK-NOT: alloca - // CHECK: [[ARC:%.*]] = {{.*}}call {{.*}}__rust_alloc( + // CHECK: [[RC:%.*]] = {{.*}}call {{.*}}__rust_alloc(i[[#BITS:]] + // CHECK-NOT: call void @llvm.memcpy + // CHECK: [[DATA:%.*]] = getelementptr inbounds i8, ptr [[RC]], i[[#BITS]] [[#div(BITS,4)]] // CHECK-NOT: call void @llvm.memcpy - // CHECK: ret ptr [[ARC]] + // CHECK: ret ptr [[DATA]] Arc::default() } diff --git a/tests/codegen/zero-cost-rc-arc-deref.rs b/tests/codegen/zero-cost-rc-arc-deref.rs new file mode 100644 index 0000000000000..ea8605510c063 --- /dev/null +++ b/tests/codegen/zero-cost-rc-arc-deref.rs @@ -0,0 +1,82 @@ +//@ compile-flags: -O -Z merge-functions=disabled + +#![crate_type = "lib"] + +use std::rc::Rc; +use std::sync::Arc; + +// CHECK-LABEL: @deref_rc_sized( +// CHECK-NOT: getelementptr +// CHECK: ret +#[no_mangle] +pub fn deref_rc_sized(rc: &Rc) -> &u32 { + &rc +} + +// CHECK-LABEL: @deref_rc_unsized( +// CHECK-COUNT-1: getelementptr +// CHECK: ret +#[no_mangle] +pub fn deref_rc_unsized(rc: &Rc) -> &str { + &rc +} + +// CHECK-LABEL: @deref_arc_sized( +// CHECK-NOT: getelementptr +// CHECK: ret +#[no_mangle] +pub fn deref_arc_sized(arc: &Arc) -> &u32 { + &arc +} + +// CHECK-LABEL: @deref_arc_unsized( +// CHECK-COUNT-1: getelementptr +// CHECK: ret +#[no_mangle] +pub fn deref_arc_unsized(arc: &Arc) -> &str { + &arc +} + +// CHECK-LABEL: @rc_slice_to_ref_slice_sized( +// CHECK-NOT: getelementptr +// CHECK: tail call void @llvm.memcpy +// CHECK-COUNT-1: getelementptr +// CHECK: ret +#[no_mangle] +pub fn rc_slice_to_ref_slice_sized(s: &[Rc]) -> Box<[&u32]> { + s.iter().map(|x| &**x).collect() +} + +// This test doesn’t work yet. +// +// COM: CHECK-LABEL: @rc_slice_to_ref_slice_unsized( +// COM: CHECK-NOT: getelementptr +// COM: CHECK: tail call void @llvm.memcpy +// COM: CHECK-NOT: getelementptr +// COM: CHECK: ret +// #[no_mangle] +// pub fn rc_slice_to_ref_slice_unsized(s: &[Rc]) -> Box<[&str]> { +// s.iter().map(|x| &**x).collect() +// } + +// CHECK-LABEL: @arc_slice_to_ref_slice_sized( +// CHECK-NOT: getelementptr +// CHECK: tail call void @llvm.memcpy +// CHECK-COUNT-1: getelementptr +// CHECK: ret +#[no_mangle] +pub fn arc_slice_to_ref_slice_sized(s: &[Arc]) -> Box<[&u32]> { + s.iter().map(|x| &**x).collect() +} + +// This test doesn’t work yet. +// +// COM: CHECK-LABEL: @arc_slice_to_ref_slice_unsized( +// COM: CHECK-NOT: getelementptr +// COM: CHECK: tail call void @llvm.memcpy +// COM: CHECK-NOT: getelementptr +// COM: CHECK: ret +// #[no_mangle] +// pub fn arc_slice_to_ref_slice_unsized(s: &[Arc]) -> Box<[&str]> { +// s.iter().map(|x| &**x).collect() +// } diff --git a/tests/debuginfo/strings-and-strs.rs b/tests/debuginfo/strings-and-strs.rs index 3d6589db34b88..02e39a0631937 100644 --- a/tests/debuginfo/strings-and-strs.rs +++ b/tests/debuginfo/strings-and-strs.rs @@ -19,7 +19,7 @@ // gdb-check:$4 = ("Hello", "World") // gdb-command:print str_in_rc -// gdb-check:$5 = alloc::rc::Rc<&str, alloc::alloc::Global> {ptr: core::ptr::non_null::NonNull> {pointer: 0x[...]}, phantom: core::marker::PhantomData>, alloc: alloc::alloc::Global} +// gdb-check:$5 = alloc::rc::Rc<&str, alloc::alloc::Global> {ptr: core::ptr::non_null::NonNull<&str> {pointer: 0x[...]}, phantom: core::marker::PhantomData>, alloc: alloc::alloc::Global} // === LLDB TESTS ================================================================================== // lldb-command:run diff --git a/tests/ui/abi/compatibility.rs b/tests/ui/abi/compatibility.rs index 01d90717107bb..e3a0b1059d0a6 100644 --- a/tests/ui/abi/compatibility.rs +++ b/tests/ui/abi/compatibility.rs @@ -126,7 +126,7 @@ mod prelude { value: T, } pub struct Rc { - ptr: NonNull>, + ptr: NonNull, phantom: PhantomData>, alloc: A, } @@ -140,7 +140,7 @@ mod prelude { data: T, } pub struct Arc { - ptr: NonNull>, + ptr: NonNull, phantom: PhantomData>, alloc: A, }