From 650fa57ef97e7dc9eba0a9eb81c571bae464372e Mon Sep 17 00:00:00 2001 From: EFanZh Date: Mon, 6 Jan 2025 22:45:12 +0800 Subject: [PATCH 1/2] Make `Rc::deref` and `Arc::deref` zero-cost --- library/alloc/src/lib.rs | 2 + library/alloc/src/raw_rc.rs | 1793 +++++++++++++++++++ library/alloc/src/rc.rs | 1351 +++----------- library/alloc/src/rc/tests.rs | 19 +- library/alloc/src/sync.rs | 1599 +++++------------ src/etc/gdb_providers.py | 23 +- src/etc/lldb_providers.py | 47 +- src/etc/natvis/liballoc.natvis | 97 +- src/tools/miri/tests/fail/memleak_rc.stderr | 10 +- tests/codegen/issues/issue-111603.rs | 7 +- tests/codegen/placement-new.rs | 12 +- tests/debuginfo/rc_arc.rs | 50 +- tests/debuginfo/strings-and-strs.rs | 3 +- 13 files changed, 2611 insertions(+), 2402 deletions(-) create mode 100644 library/alloc/src/raw_rc.rs diff --git a/library/alloc/src/lib.rs b/library/alloc/src/lib.rs index aff90f5abb3a0..fd3533c819d4c 100644 --- a/library/alloc/src/lib.rs +++ b/library/alloc/src/lib.rs @@ -209,6 +209,8 @@ mod testing; #[macro_use] mod macros; +#[cfg(not(no_rc))] +mod raw_rc; mod raw_vec; // Heaps provided for low-level allocation strategies diff --git a/library/alloc/src/raw_rc.rs b/library/alloc/src/raw_rc.rs new file mode 100644 index 0000000000000..3d3cdce64a79b --- /dev/null +++ b/library/alloc/src/raw_rc.rs @@ -0,0 +1,1793 @@ +use core::alloc::{AllocError, Allocator, Layout, LayoutError}; +use core::any::Any; +use core::cell::UnsafeCell; +#[cfg(not(no_global_oom_handling))] +use core::clone::CloneToUninit; +use core::fmt::{self, Debug, Display, Formatter, Pointer}; +use core::hash::{Hash, Hasher}; +use core::hint; +#[cfg(not(no_global_oom_handling))] +use core::iter::TrustedLen; +use core::marker::{PhantomData, Unsize}; +#[cfg(not(no_global_oom_handling))] +use core::mem::ManuallyDrop; +use core::mem::{self, MaybeUninit, SizedTypeProperties}; +use core::num::NonZeroUsize; +use core::ops::{CoerceUnsized, DispatchFromDyn}; +use core::pin::PinCoerceUnsized; +use core::ptr::{self, NonNull}; + +#[cfg(not(no_global_oom_handling))] +use crate::alloc; +use crate::alloc::Global; +#[cfg(not(no_global_oom_handling))] +use crate::boxed::Box; +#[cfg(not(no_global_oom_handling))] +use crate::string::String; +#[cfg(not(no_global_oom_handling))] +use crate::vec::Vec; + +pub trait RcOps { + unsafe fn increment_ref_count(count: &UnsafeCell); + unsafe fn decrement_ref_count(count: &UnsafeCell) -> bool; + + unsafe fn upgrade(strong_count: &UnsafeCell) -> bool; + unsafe fn downgrade(weak_count: &UnsafeCell); + + unsafe fn lock_strong_count(strong_count: &UnsafeCell) -> bool; + unsafe fn unlock_strong_count(strong_count: &UnsafeCell); + + unsafe fn is_unique(strong_count: &UnsafeCell, weak_count: &UnsafeCell) -> bool; + + #[cfg(not(no_global_oom_handling))] + unsafe fn make_unique(rc: &mut RawRc, by_clone: F, by_move: G) + where + T: ?Sized, + F: FnOnce(&mut RawRc), + G: FnOnce(&mut RawRc); +} + +pub struct RefCounts { + pub weak: UnsafeCell, + pub strong: UnsafeCell, +} + +impl RefCounts { + pub const fn new(strong_cont: usize) -> Self { + Self { weak: UnsafeCell::new(1), strong: UnsafeCell::new(strong_cont) } + } +} + +const _: () = assert!(RefCounts::LAYOUT.size().is_power_of_two()); + +struct RcLayout { + allocation_layout: Layout, + allocation_offset_bytes: usize, +} + +impl RcLayout { + const fn from_value_layout(value_layout: Layout) -> Result { + match RefCounts::LAYOUT.extend(value_layout) { + Ok((unaligned_allocation_layout, allocation_offset_bytes)) => Ok(Self { + allocation_layout: unaligned_allocation_layout.pad_to_align(), + allocation_offset_bytes, + }), + Err(error) => Err(error), + } + } + + const unsafe fn from_value_layout_unchecked(value_layout: Layout) -> Self { + match Self::from_value_layout(value_layout) { + Ok(rc_layout) => rc_layout, + Err(_) => unsafe { hint::unreachable_unchecked() }, + } + } + + #[cfg(not(no_global_oom_handling))] + const fn from_value_ref(value_ref: &T) -> Result + where + T: ?Sized, + { + Self::from_value_layout(Layout::for_value(value_ref)) + } + + const unsafe fn from_value_ptr_unchecked(value_ptr: NonNull) -> Self + where + T: ?Sized, + { + unsafe { Self::from_value_layout_unchecked(Layout::for_value_raw(value_ptr.as_ptr())) } + } + + const fn of() -> Result { + Self::from_value_layout(T::LAYOUT) + } + + #[cfg(not(no_global_oom_handling))] + const fn of_slice(length: usize) -> Result { + match Layout::array::(length) { + Ok(layout) => Self::from_value_layout(layout), + Err(error) => Err(error), + } + } +} + +trait RcLayoutExt { + const RC_LAYOUT: RcLayout; +} + +impl RcLayoutExt for T { + const RC_LAYOUT: RcLayout = match RcLayout::of::() { + Ok(rc_layout) => rc_layout, + Err(_) => panic!("layout size is too large"), + }; +} + +unsafe fn ref_counts_ptr_from_value_ptr(value_ptr: NonNull<()>) -> NonNull { + const REF_COUNTS_OFFSET_BYTES: usize = RefCounts::LAYOUT.size(); + + unsafe { value_ptr.byte_sub(REF_COUNTS_OFFSET_BYTES).cast() } +} + +unsafe fn strong_count_ptr_from_value_ptr(value_ptr: NonNull<()>) -> NonNull> { + const STRONG_COUNT_OFFSET_BYTES: usize = + RefCounts::LAYOUT.size() - mem::offset_of!(RefCounts, strong); + + unsafe { value_ptr.byte_sub(STRONG_COUNT_OFFSET_BYTES).cast() } +} + +unsafe fn weak_count_ptr_from_value_ptr(value_ptr: NonNull<()>) -> NonNull> { + const WEAK_COUNT_OFFSET_BYTES: usize = + RefCounts::LAYOUT.size() - mem::offset_of!(RefCounts, weak); + + unsafe { value_ptr.byte_sub(WEAK_COUNT_OFFSET_BYTES).cast() } +} + +unsafe fn init_rc_allocation( + allocation_ptr: NonNull<[u8]>, + rc_layout: &RcLayout, +) -> NonNull<()> { + let allocation_ptr = allocation_ptr.cast::<()>(); + let value_ptr = unsafe { allocation_ptr.byte_add(rc_layout.allocation_offset_bytes) }; + let ref_counts = const { RefCounts::new(STRONG_COUNT) }; + + unsafe { ref_counts_ptr_from_value_ptr(value_ptr).write(ref_counts) }; + + value_ptr +} + +unsafe fn try_handle_rc_allocation_result( + allocation_result: Result, AllocError>, + rc_layout: &RcLayout, +) -> Result, AllocError> { + allocation_result.map(|allocation_ptr| unsafe { + init_rc_allocation::(allocation_ptr, rc_layout) + }) +} + +fn try_allocate_uninit_for_rc( + alloc: &A, + rc_layout: &RcLayout, +) -> Result, AllocError> +where + A: Allocator, +{ + unsafe { + try_handle_rc_allocation_result::( + alloc.allocate(rc_layout.allocation_layout), + rc_layout, + ) + } +} + +fn try_allocate_zeroed_for_rc( + alloc: &A, + rc_layout: &RcLayout, +) -> Result, AllocError> +where + A: Allocator, +{ + unsafe { + try_handle_rc_allocation_result::( + alloc.allocate_zeroed(rc_layout.allocation_layout), + rc_layout, + ) + } +} + +#[cfg(not(no_global_oom_handling))] +unsafe fn handle_rc_allocation_result( + allocation_result: Result, AllocError>, + rc_layout: &RcLayout, +) -> NonNull<()> { + match allocation_result { + Ok(allocation_ptr) => unsafe { + init_rc_allocation::(allocation_ptr, rc_layout) + }, + Err(AllocError) => alloc::handle_alloc_error(rc_layout.allocation_layout), + } +} + +#[cfg(not(no_global_oom_handling))] +fn allocate_uninit_for_rc( + alloc: &A, + rc_layout: &RcLayout, +) -> NonNull<()> +where + A: Allocator, +{ + unsafe { + handle_rc_allocation_result::( + alloc.allocate(rc_layout.allocation_layout), + rc_layout, + ) + } +} + +#[cfg(not(no_global_oom_handling))] +fn allocate_zeroed_for_rc( + alloc: &A, + rc_layout: &RcLayout, +) -> NonNull<()> +where + A: Allocator, +{ + unsafe { + handle_rc_allocation_result::( + alloc.allocate_zeroed(rc_layout.allocation_layout), + rc_layout, + ) + } +} + +#[cfg(not(no_global_oom_handling))] +fn allocate_for_rc_with( + alloc: &A, + rc_layout: &RcLayout, + f: F, +) -> NonNull<()> +where + A: Allocator, + F: FnOnce(NonNull<()>), +{ + struct Guard<'a, A> + where + A: Allocator, + { + alloc: &'a A, + ptr: NonNull<()>, + rc_layout: &'a RcLayout, + } + + impl<'a, A> Drop for Guard<'a, A> + where + A: Allocator, + { + fn drop(&mut self) { + unsafe { deallocate_rc_ptr::(self.alloc, self.ptr, self.rc_layout) }; + } + } + + let ptr = allocate_uninit_for_rc::(alloc, &rc_layout); + let guard = Guard { alloc, ptr, rc_layout }; + + f(ptr); + + mem::forget(guard); + + ptr +} + +#[cfg(not(no_global_oom_handling))] +unsafe fn allocate_for_rc_with_bytes( + alloc: &A, + rc_layout: &RcLayout, + ptr: NonNull<()>, + size: usize, +) -> NonNull<()> +where + A: Allocator, +{ + allocate_for_rc_with::(alloc, rc_layout, |dst_ptr| unsafe { + ptr::copy_nonoverlapping::(ptr.as_ptr().cast(), dst_ptr.as_ptr().cast(), size); + }) +} + +#[cfg(not(no_global_oom_handling))] +fn allocate_for_rc_with_value(alloc: &A, value: &T) -> NonNull +where + A: Allocator, + T: ?Sized, +{ + let rc_layout = RcLayout::from_value_ref(value).unwrap(); + + unsafe { + let ptr = allocate_for_rc_with_bytes::( + alloc, + &rc_layout, + NonNull::from(value).cast(), + size_of_val(value), + ); + + NonNull::new_unchecked(ptr.as_ptr().with_metadata_of(value)) + } +} + +#[cfg(not(no_global_oom_handling))] +unsafe fn allocate_for_rc_with_value_unchecked( + alloc: &A, + value: &T, +) -> NonNull +where + A: Allocator, + T: ?Sized, +{ + unsafe { + let rc_layout = RcLayout::from_value_ptr_unchecked(NonNull::from(value)); + + let ptr = allocate_for_rc_with_bytes::( + alloc, + &rc_layout, + NonNull::from(value).cast(), + size_of_val(value), + ); + + NonNull::new_unchecked(ptr.as_ptr().with_metadata_of(value)) + } +} + +unsafe fn deallocate_rc_ptr(alloc: &A, ptr: NonNull<()>, rc_layout: &RcLayout) +where + A: Allocator, +{ + unsafe { + alloc.deallocate( + ptr.cast().byte_sub(rc_layout.allocation_offset_bytes), + rc_layout.allocation_layout, + ); + } +} + +struct GuardedWeak<'a, T, A, R> +where + T: ?Sized, + A: Allocator, + R: RcOps, +{ + weak: &'a mut RawWeak, + _phantom_data: PhantomData, +} + +impl<'a, T, A, R> GuardedWeak<'a, T, A, R> +where + T: ?Sized, + A: Allocator, + R: RcOps, +{ + unsafe fn new(weak: &'a mut RawWeak) -> Self { + Self { weak, _phantom_data: PhantomData } + } +} + +impl Drop for GuardedWeak<'_, T, A, R> +where + T: ?Sized, + A: Allocator, + R: RcOps, +{ + fn drop(&mut self) { + unsafe { self.weak.drop_unchecked::() }; + } +} + +struct GuardedRc<'a, T, A, R> +where + T: ?Sized, + A: Allocator, + R: RcOps, +{ + rc: &'a mut RawRc, + _phantom_data: PhantomData, +} + +impl<'a, T, A, R> GuardedRc<'a, T, A, R> +where + T: ?Sized, + A: Allocator, + R: RcOps, +{ + unsafe fn new(rc: &'a mut RawRc) -> Self { + Self { rc, _phantom_data: PhantomData } + } +} + +impl Drop for GuardedRc<'_, T, A, R> +where + T: ?Sized, + A: Allocator, + R: RcOps, +{ + fn drop(&mut self) { + unsafe { self.rc.drop::() }; + } +} + +pub struct RawWeak +where + T: ?Sized, +{ + ptr: NonNull, + alloc: A, +} + +impl RawWeak +where + T: ?Sized, +{ + pub const unsafe fn from_raw_parts(ptr: NonNull, alloc: A) -> Self { + Self { ptr, alloc } + } + + pub unsafe fn from_raw(ptr: NonNull) -> Self + where + A: Default, + { + unsafe { Self::from_raw_parts(ptr, A::default()) } + } + + pub fn allocator(&self) -> &A { + &self.alloc + } + + pub fn as_ptr(&self) -> NonNull { + self.ptr + } + + unsafe fn as_ref_unchecked(&self) -> &T { + unsafe { self.ptr.as_ref() } + } + + unsafe fn assume_init_drop(&mut self) + where + A: Allocator, + R: RcOps, + { + unsafe { + let guard = GuardedWeak::::new(self); + + guard.weak.as_ptr().drop_in_place(); + }; + } + + #[cfg(not(no_global_oom_handling))] + fn borrow_raw_parts(&mut self) -> (&mut NonNull, &mut A) { + (&mut self.ptr, &mut self.alloc) + } + + pub unsafe fn cast(self) -> RawWeak { + unsafe { self.cast_with(NonNull::cast) } + } + + pub unsafe fn cast_with(self, f: F) -> RawWeak + where + U: ?Sized, + F: FnOnce(NonNull) -> NonNull, + { + unsafe { RawWeak::from_raw_parts(f(self.ptr), self.alloc) } + } + + pub unsafe fn clone(&self) -> Self + where + A: Clone, + R: RcOps, + { + unsafe { + if !self.is_dangling() { + R::increment_ref_count(self.weak_count_unchecked()); + } + + self.clone_without_increment_weak_count() + } + } + + unsafe fn clone_without_increment_weak_count(&self) -> Self + where + A: Clone, + { + unsafe { Self::from_raw_parts(self.ptr, self.alloc.clone()) } + } + + pub unsafe fn drop(&mut self) + where + A: Allocator, + R: RcOps, + { + if !self.is_dangling() { + unsafe { self.drop_unchecked::() }; + } + } + + unsafe fn drop_unchecked(&mut self) + where + A: Allocator, + R: RcOps, + { + unsafe { + if R::decrement_ref_count(self.weak_count_unchecked()) { + let rc_layout = RcLayout::from_value_ptr_unchecked(self.ptr); + + deallocate_rc_ptr::(&self.alloc, self.ptr.cast(), &rc_layout); + } + }; + } + + unsafe fn get_mut_unchecked(&mut self) -> &mut T { + unsafe { self.ptr.as_mut() } + } + + pub fn into_raw(self) -> NonNull { + self.ptr + } + + pub fn into_raw_parts(self) -> (NonNull, A) { + (self.ptr, self.alloc) + } + + pub fn is_dangling(&self) -> bool { + self.ptr.addr() == NonZeroUsize::MAX + } + + pub fn ptr_eq(&self, other: &Self) -> bool { + ptr::addr_eq(self.ptr.as_ptr(), other.ptr.as_ptr()) + } + + pub fn ptr_ne(&self, other: &Self) -> bool { + !ptr::addr_eq(self.ptr.as_ptr(), other.ptr.as_ptr()) + } + + #[cfg(not(no_sync))] + pub fn ref_counts(&self) -> Option<&RefCounts> { + (!self.is_dangling()).then(|| unsafe { self.ref_counts_unchecked() }) + } + + #[cfg(not(no_sync))] + unsafe fn ref_counts_unchecked(&self) -> &RefCounts { + unsafe { ref_counts_ptr_from_value_ptr(self.ptr.cast()).as_ref() } + } + + pub fn strong_count(&self) -> Option<&UnsafeCell> { + (!self.is_dangling()).then(|| unsafe { self.strong_count_unchecked() }) + } + + unsafe fn strong_count_unchecked(&self) -> &UnsafeCell { + unsafe { strong_count_ptr_from_value_ptr(self.ptr.cast()).as_ref() } + } + + pub fn weak_count(&self) -> Option<&UnsafeCell> { + (!self.is_dangling()).then(|| unsafe { self.weak_count_unchecked() }) + } + + unsafe fn weak_count_unchecked(&self) -> &UnsafeCell { + unsafe { weak_count_ptr_from_value_ptr(self.ptr.cast()).as_ref() } + } + + pub fn upgrade(&self) -> Option> + where + A: Clone, + R: RcOps, + { + if self.is_dangling() { None } else { unsafe { self.upgrade_unchecked::() } } + } + + unsafe fn upgrade_unchecked(&self) -> Option> + where + A: Clone, + R: RcOps, + { + unsafe { + R::upgrade(self.strong_count_unchecked()) + .then(|| RawRc::from_raw_parts(self.ptr, self.alloc.clone())) + } + } +} + +impl RawWeak { + pub fn new_dangling() -> Self + where + A: Default, + { + Self::new_dangling_in(A::default()) + } + + pub const fn new_dangling_in(alloc: A) -> Self { + unsafe { + Self::from_raw_parts( + NonNull::new_unchecked(ptr::without_provenance_mut::(usize::MAX)), + alloc, + ) + } + } + + pub fn try_new_uninit() -> Result + where + A: Allocator + Default, + { + Self::try_new_uninit_in::(A::default()) + } + + pub fn try_new_uninit_in(alloc: A) -> Result + where + A: Allocator, + { + try_allocate_uninit_for_rc::(&alloc, &T::RC_LAYOUT) + .map(|ptr| unsafe { Self::from_raw_parts(ptr.cast(), alloc) }) + } + + pub fn try_new_zeroed() -> Result + where + A: Allocator + Default, + { + Self::try_new_zeroed_in::(A::default()) + } + + pub fn try_new_zeroed_in(alloc: A) -> Result + where + A: Allocator, + { + try_allocate_zeroed_for_rc::(&alloc, &T::RC_LAYOUT) + .map(|ptr| unsafe { Self::from_raw_parts(ptr.cast(), alloc) }) + } + + #[cfg(not(no_global_oom_handling))] + pub fn new_uninit() -> Self + where + A: Allocator + Default, + { + Self::new_uninit_in::(A::default()) + } + + #[cfg(not(no_global_oom_handling))] + pub fn new_uninit_in(alloc: A) -> Self + where + A: Allocator, + { + unsafe { + Self::from_raw_parts( + allocate_uninit_for_rc::(&alloc, &T::RC_LAYOUT).cast(), + alloc, + ) + } + } + + #[cfg(not(no_global_oom_handling))] + pub fn new_zeroed() -> Self + where + A: Allocator + Default, + { + Self::new_zeroed_in::(A::default()) + } + + #[cfg(not(no_global_oom_handling))] + pub fn new_zeroed_in(alloc: A) -> Self + where + A: Allocator, + { + unsafe { + Self::from_raw_parts( + allocate_zeroed_for_rc::(&alloc, &T::RC_LAYOUT).cast(), + alloc, + ) + } + } + + unsafe fn assume_init_into_inner(mut self) -> T + where + A: Allocator, + R: RcOps, + { + unsafe { + let result = self.ptr.read(); + + self.drop_unchecked::(); + + result + } + } +} + +impl RawWeak<[T], A> { + #[cfg(not(no_global_oom_handling))] + fn allocate_in(length: usize, alloc: A, allocate_fn: F) -> Self + where + A: Allocator, + F: FnOnce(&A, &RcLayout) -> NonNull<()>, + { + let rc_layout = RcLayout::of_slice::(length).unwrap(); + let ptr = allocate_fn(&alloc, &rc_layout); + + unsafe { Self::from_raw_parts(NonNull::slice_from_raw_parts(ptr.cast(), length), alloc) } + } + + #[cfg(not(no_global_oom_handling))] + pub fn new_uninit_slice(length: usize) -> Self + where + A: Allocator + Default, + { + Self::new_uninit_slice_in::(length, A::default()) + } + + #[cfg(not(no_global_oom_handling))] + pub fn new_uninit_slice_in(length: usize, alloc: A) -> Self + where + A: Allocator, + { + Self::allocate_in(length, alloc, allocate_uninit_for_rc::) + } + + #[cfg(not(no_global_oom_handling))] + pub fn new_zeroed_slice(length: usize) -> Self + where + A: Allocator + Default, + { + Self::new_zeroed_slice_in::(length, A::default()) + } + + #[cfg(not(no_global_oom_handling))] + pub fn new_zeroed_slice_in(length: usize, alloc: A) -> Self + where + A: Allocator, + { + Self::allocate_in(length, alloc, allocate_zeroed_for_rc::) + } +} + +impl CoerceUnsized> for RawWeak +where + T: ?Sized + Unsize, + U: ?Sized, +{ +} + +impl Debug for RawWeak +where + T: ?Sized, +{ + fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result { + f.write_str("(Weak)") + } +} + +impl Default for RawWeak +where + A: Default, +{ + fn default() -> Self { + Self::new_dangling() + } +} + +impl DispatchFromDyn> for RawWeak +where + T: ?Sized + Unsize, + U: ?Sized, +{ +} + +#[repr(transparent)] +pub struct RawRc +where + T: ?Sized, +{ + weak: RawWeak, + _phantom_data: PhantomData, +} + +impl RawRc +where + T: ?Sized, +{ + pub unsafe fn from_raw(ptr: NonNull) -> Self + where + A: Default, + { + unsafe { Self::from_raw_parts(ptr, A::default()) } + } + + pub unsafe fn from_raw_parts(ptr: NonNull, alloc: A) -> Self { + unsafe { Self::from_weak(RawWeak::from_raw_parts(ptr, alloc)) } + } + + unsafe fn from_weak(weak: RawWeak) -> Self { + Self { weak, _phantom_data: PhantomData } + } + + pub fn allocator(&self) -> &A { + self.weak.allocator() + } + + pub fn as_ptr(&self) -> NonNull { + self.weak.as_ptr() + } + + #[cfg(not(no_global_oom_handling))] + fn borrow_raw_parts(&mut self) -> (&mut NonNull, &mut A) { + self.weak.borrow_raw_parts() + } + + pub unsafe fn cast(self) -> RawRc { + unsafe { RawRc::from_weak(self.weak.cast()) } + } + + pub unsafe fn cast_with(self, f: F) -> RawRc + where + U: ?Sized, + F: FnOnce(NonNull) -> NonNull, + { + unsafe { RawRc::from_weak(self.weak.cast_with(f)) } + } + + pub unsafe fn clone(&self) -> Self + where + A: Clone, + R: RcOps, + { + unsafe { + R::increment_ref_count(self.strong_count()); + + Self::from_weak(self.weak.clone_without_increment_weak_count()) + } + } + + pub unsafe fn decrement_strong_count(ptr: NonNull) + where + A: Allocator + Default, + { + unsafe { Self::decrement_strong_count_in::(ptr, A::default()) }; + } + + pub unsafe fn decrement_strong_count_in(ptr: NonNull, alloc: A) + where + A: Allocator, + { + unsafe { RawRc::from_raw_parts(ptr, alloc).drop::() }; + } + + pub unsafe fn increment_strong_count(ptr: NonNull) { + unsafe { R::increment_ref_count(strong_count_ptr_from_value_ptr(ptr.cast()).as_ref()) }; + } + + pub unsafe fn downgrade(&self) -> RawWeak + where + A: Clone, + R: RcOps, + { + unsafe { + R::downgrade(self.weak_count()); + + self.weak.clone_without_increment_weak_count() + } + } + + pub unsafe fn drop(&mut self) + where + A: Allocator, + R: RcOps, + { + unsafe { + if R::decrement_ref_count(self.strong_count()) { + self.drop_slow::(); + } + }; + } + + #[inline(never)] + unsafe fn drop_slow(&mut self) + where + A: Allocator, + R: RcOps, + { + unsafe { self.weak.assume_init_drop::() } + } + + pub unsafe fn get_mut(&mut self) -> Option<&mut T> + where + R: RcOps, + { + unsafe { + R::is_unique(self.strong_count(), self.weak_count()).then(|| self.get_mut_unchecked()) + } + } + + pub unsafe fn get_mut_unchecked(&mut self) -> &mut T { + unsafe { self.weak.get_mut_unchecked() } + } + + pub fn into_raw(self) -> NonNull { + self.weak.into_raw() + } + + pub fn into_raw_parts(self) -> (NonNull, A) { + self.weak.into_raw_parts() + } + + #[cfg(not(no_global_oom_handling))] + unsafe fn make_unique_by_clone(&mut self) + where + T: CloneToUninit, + A: Allocator, + R: RcOps, + { + let (ptr_ref, alloc) = self.borrow_raw_parts(); + let old_ptr = *ptr_ref; + + unsafe { + let rc_layout = RcLayout::from_value_ptr_unchecked(old_ptr); + + let new_ptr = allocate_for_rc_with::(alloc, &rc_layout, |new_ptr| { + T::clone_to_uninit(old_ptr.as_ref(), new_ptr.as_ptr().cast()); + }); + + *ptr_ref = NonNull::new_unchecked(new_ptr.as_ptr().with_metadata_of(old_ptr.as_ptr())); + + RawRc::from_raw_parts(old_ptr, &*alloc).drop::(); + } + } + + #[cfg(not(no_global_oom_handling))] + unsafe fn make_unique_by_move(&mut self) + where + A: Allocator, + R: RcOps, + { + let (ptr_ref, alloc) = self.borrow_raw_parts(); + let old_ptr = *ptr_ref; + + unsafe { + let new_ptr = allocate_for_rc_with_value_unchecked::(alloc, old_ptr.as_ref()); + + *ptr_ref = new_ptr; + + RawWeak::from_raw_parts(old_ptr, &*alloc).drop_unchecked::(); + } + } + + #[cfg(not(no_global_oom_handling))] + pub unsafe fn make_mut(&mut self) -> &mut T + where + T: CloneToUninit, + A: Allocator + Clone, + R: RcOps, + { + unsafe { + R::make_unique( + self, + |this| this.make_unique_by_clone::(), + |this| this.make_unique_by_move::(), + ); + + self.get_mut_unchecked() + } + } + + pub fn ptr_eq(&self, other: &Self) -> bool { + RawWeak::ptr_eq(&self.weak, &other.weak) + } + + pub fn ptr_ne(&self, other: &Self) -> bool { + RawWeak::ptr_ne(&self.weak, &other.weak) + } + + #[cfg(all(not(no_global_oom_handling), not(no_sync)))] + pub fn ref_counts(&self) -> &RefCounts { + unsafe { self.weak.ref_counts_unchecked() } + } + + pub fn strong_count(&self) -> &UnsafeCell { + unsafe { self.weak.strong_count_unchecked() } + } + + pub fn weak_count(&self) -> &UnsafeCell { + unsafe { self.weak.weak_count_unchecked() } + } +} + +impl RawRc { + unsafe fn from_weak_with_value(weak: RawWeak, value: T) -> Self { + unsafe { + weak.as_ptr().write(value); + + Self::from_weak(weak) + } + } + + pub fn try_new(value: T) -> Result + where + A: Allocator + Default, + { + RawWeak::try_new_uninit::<1>() + .map(|weak| unsafe { Self::from_weak_with_value(weak, value) }) + } + + pub fn try_new_in(value: T, alloc: A) -> Result + where + A: Allocator, + { + RawWeak::try_new_uninit_in::<1>(alloc) + .map(|weak| unsafe { Self::from_weak_with_value(weak, value) }) + } + + #[cfg(not(no_global_oom_handling))] + pub fn new(value: T) -> Self + where + A: Allocator + Default, + { + unsafe { Self::from_weak_with_value(RawWeak::new_uninit::<1>(), value) } + } + + #[cfg(not(no_global_oom_handling))] + pub fn new_in(value: T, alloc: A) -> Self + where + A: Allocator, + { + unsafe { Self::from_weak_with_value(RawWeak::new_uninit_in::<1>(alloc), value) } + } + + #[cfg(not(no_global_oom_handling))] + fn new_with(f: F) -> Self + where + A: Allocator + Default, + F: FnOnce() -> T, + { + let alloc = A::default(); + + unsafe { + let ptr = allocate_for_rc_with::(&alloc, &T::RC_LAYOUT, |ptr| { + ptr.cast().write(f()); + }); + + Self::from_raw_parts(ptr.cast(), alloc) + } + } + + #[cfg(not(no_global_oom_handling))] + pub unsafe fn new_cyclic(data_fn: F) -> Self + where + A: Allocator + Default, + F: FnOnce(&RawWeak) -> T, + R: RcOps, + { + unsafe { Self::new_cyclic_in::(data_fn, A::default()) } + } + + #[cfg(not(no_global_oom_handling))] + pub unsafe fn new_cyclic_in(data_fn: F, alloc: A) -> Self + where + A: Allocator, + F: FnOnce(&RawWeak) -> T, + R: RcOps, + { + let mut weak = RawWeak::new_uninit_in::<0>(alloc); + let guard = unsafe { GuardedWeak::::new(&mut weak) }; + let data = data_fn(&guard.weak); + + mem::forget(guard); + + unsafe { RawUniqueRc::from_weak_with_value(weak, data).into_rc::() } + } + + pub unsafe fn into_inner(self) -> Option + where + A: Allocator, + R: RcOps, + { + unsafe { + R::decrement_ref_count(self.strong_count()) + .then(|| self.weak.assume_init_into_inner::()) + } + } + + pub unsafe fn try_unwrap(self) -> Result> + where + A: Allocator, + R: RcOps, + { + unsafe { + if R::lock_strong_count(self.strong_count()) { + Ok(self.weak.assume_init_into_inner::()) + } else { + Err(self) + } + } + } + + pub unsafe fn unwrap_or_clone(self) -> T + where + T: Clone, + A: Allocator, + R: RcOps, + { + unsafe { + self.try_unwrap::().unwrap_or_else(|mut rc| { + let guard = GuardedRc::::new(&mut rc); + + T::clone(guard.rc.as_ref()) + }) + } + } +} + +impl RawRc, A> { + pub fn try_new_uninit() -> Result + where + A: Allocator + Default, + { + RawWeak::try_new_uninit::<1>().map(|weak| unsafe { Self::from_weak(weak) }) + } + + pub fn try_new_uninit_in(alloc: A) -> Result + where + A: Allocator, + { + RawWeak::try_new_uninit_in::<1>(alloc).map(|weak| unsafe { Self::from_weak(weak) }) + } + + pub fn try_new_zeroed() -> Result + where + A: Allocator + Default, + { + RawWeak::try_new_zeroed::<1>().map(|weak| unsafe { Self::from_weak(weak) }) + } + + pub fn try_new_zeroed_in(alloc: A) -> Result + where + A: Allocator, + { + RawWeak::try_new_zeroed_in::<1>(alloc).map(|weak| unsafe { Self::from_weak(weak) }) + } + + #[cfg(not(no_global_oom_handling))] + pub fn new_uninit() -> Self + where + A: Allocator + Default, + { + unsafe { Self::from_weak(RawWeak::new_uninit::<1>()) } + } + + #[cfg(not(no_global_oom_handling))] + pub fn new_uninit_in(alloc: A) -> Self + where + A: Allocator, + { + unsafe { Self::from_weak(RawWeak::new_uninit_in::<1>(alloc)) } + } + + #[cfg(not(no_global_oom_handling))] + pub fn new_zeroed() -> Self + where + A: Allocator + Default, + { + unsafe { Self::from_weak(RawWeak::new_zeroed::<1>()) } + } + + #[cfg(not(no_global_oom_handling))] + pub fn new_zeroed_in(alloc: A) -> Self + where + A: Allocator, + { + unsafe { Self::from_weak(RawWeak::new_zeroed_in::<1>(alloc)) } + } + + pub unsafe fn assume_init(self) -> RawRc { + unsafe { self.cast() } + } +} + +impl RawRc<[T], A> { + #[cfg(not(no_global_oom_handling))] + unsafe fn from_iter_exact(iter: I, length: usize) -> Self + where + A: Allocator + Default, + I: Iterator, + { + struct Guard { + head: NonNull, + tail: NonNull, + } + + impl Drop for Guard { + fn drop(&mut self) { + unsafe { + let length = self.tail.sub_ptr(self.head); + + NonNull::<[T]>::slice_from_raw_parts(self.head, length).drop_in_place(); + } + } + } + + let rc_layout = RcLayout::of_slice::(length).unwrap(); + let alloc = A::default(); + + unsafe { + let ptr = allocate_for_rc_with::(&alloc, &rc_layout, |ptr| { + let ptr = ptr.cast::(); + let mut guard = Guard:: { head: ptr, tail: ptr }; + + iter.for_each(|value| { + guard.tail.write(value); + guard.tail = guard.tail.add(1); + }); + + mem::forget(guard); + }); + + Self::from_raw_parts(NonNull::slice_from_raw_parts(ptr.cast::(), length), alloc) + } + } +} + +impl RawRc<[MaybeUninit], A> { + #[cfg(not(no_global_oom_handling))] + pub fn new_uninit_slice(length: usize) -> Self + where + A: Allocator + Default, + { + unsafe { Self::from_weak(RawWeak::new_uninit_slice::<1>(length)) } + } + + #[cfg(not(no_global_oom_handling))] + pub fn new_uninit_slice_in(length: usize, alloc: A) -> Self + where + A: Allocator, + { + unsafe { Self::from_weak(RawWeak::new_uninit_slice_in::<1>(length, alloc)) } + } + + #[cfg(not(no_global_oom_handling))] + pub fn new_zeroed_slice(length: usize) -> Self + where + A: Allocator + Default, + { + unsafe { Self::from_weak(RawWeak::new_zeroed_slice::<1>(length)) } + } + + #[cfg(not(no_global_oom_handling))] + pub fn new_zeroed_slice_in(length: usize, alloc: A) -> Self + where + A: Allocator, + { + unsafe { Self::from_weak(RawWeak::new_zeroed_slice_in::<1>(length, alloc)) } + } + + pub unsafe fn assume_init(self) -> RawRc<[T], A> { + unsafe { self.cast_with(|ptr| NonNull::new_unchecked(ptr.as_ptr() as _)) } + } +} + +impl RawRc { + pub fn downcast(self) -> Result, Self> + where + T: Any, + { + if self.as_ref().is::() { Ok(unsafe { self.downcast_unchecked() }) } else { Err(self) } + } + + pub unsafe fn downcast_unchecked(self) -> RawRc + where + T: Any, + { + unsafe { self.cast() } + } +} + +#[cfg(not(no_sync))] +impl RawRc { + pub fn downcast(self) -> Result, Self> + where + T: Any, + { + if self.as_ref().is::() { Ok(unsafe { self.downcast_unchecked() }) } else { Err(self) } + } + + pub unsafe fn downcast_unchecked(self) -> RawRc + where + T: Any, + { + unsafe { self.cast() } + } +} + +impl AsRef for RawRc +where + T: ?Sized, +{ + fn as_ref(&self) -> &T { + unsafe { self.weak.as_ref_unchecked() } + } +} + +impl CoerceUnsized> for RawRc +where + T: ?Sized + Unsize, + U: ?Sized, +{ +} + +impl Debug for RawRc +where + T: Debug + ?Sized, +{ + fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result { + Debug::fmt(self.as_ref(), f) + } +} + +impl Display for RawRc +where + T: Display + ?Sized, +{ + fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result { + Display::fmt(self.as_ref(), f) + } +} + +impl DispatchFromDyn> for RawRc +where + T: ?Sized + Unsize, + U: ?Sized, +{ +} + +impl Pointer for RawRc +where + T: ?Sized, +{ + fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result { + Pointer::fmt(&self.as_ptr(), f) + } +} + +#[cfg(not(no_global_oom_handling))] +impl Default for RawRc +where + T: Default, + A: Allocator + Default, +{ + fn default() -> Self { + Self::new_with(T::default) + } +} + +#[cfg(not(no_global_oom_handling))] +impl Default for RawRc<[T], A> +where + A: Allocator + Default, +{ + fn default() -> Self { + RawRc::new([]) + } +} + +#[cfg(not(no_global_oom_handling))] +impl Default for RawRc +where + A: Allocator + Default, +{ + fn default() -> Self { + RawRc::from("") + } +} + +#[cfg(not(no_global_oom_handling))] +impl From for RawRc +where + A: Allocator + Default, +{ + fn from(value: T) -> Self { + Self::new(value) + } +} + +#[cfg(not(no_global_oom_handling))] +impl From> for RawRc +where + T: ?Sized, + A: Allocator, +{ + fn from(value: Box) -> Self { + let value_ref = &*value; + let alloc_ref = Box::allocator(&value); + + unsafe { + let rc_ptr = allocate_for_rc_with_value::(alloc_ref, value_ref); + let (box_ptr, alloc) = Box::into_raw_with_allocator(value); + + drop(Box::from_raw_in(box_ptr as *mut ManuallyDrop, &alloc)); + + Self::from_raw_parts(rc_ptr, alloc) + } + } +} + +#[cfg(not(no_global_oom_handling))] +trait SpecRawRcFromSlice { + fn spec_from_slice(slice: &[T]) -> Self; +} + +#[cfg(not(no_global_oom_handling))] +impl SpecRawRcFromSlice for RawRc<[T], A> +where + A: Allocator + Default, + T: Clone, +{ + default fn spec_from_slice(slice: &[T]) -> Self { + unsafe { Self::from_iter_exact(slice.iter().cloned(), slice.len()) } + } +} + +#[cfg(not(no_global_oom_handling))] +impl SpecRawRcFromSlice for RawRc<[T], A> +where + A: Allocator + Default, + T: Copy, +{ + fn spec_from_slice(slice: &[T]) -> Self { + let alloc = A::default(); + let ptr = allocate_for_rc_with_value::(&alloc, slice); + + unsafe { Self::from_raw_parts(ptr, alloc) } + } +} + +#[cfg(not(no_global_oom_handling))] +impl From<&[T]> for RawRc<[T], A> +where + A: Allocator + Default, + T: Clone, +{ + fn from(value: &[T]) -> Self { + Self::spec_from_slice(value) + } +} + +#[cfg(not(no_global_oom_handling))] +impl From<&mut [T]> for RawRc<[T], A> +where + A: Allocator + Default, + T: Clone, +{ + fn from(value: &mut [T]) -> Self { + Self::from(&*value) + } +} + +#[cfg(not(no_global_oom_handling))] +impl From<&str> for RawRc +where + A: Allocator + Default, +{ + fn from(value: &str) -> Self { + let rc_of_bytes = RawRc::<[u8], A>::from(value.as_bytes()); + + unsafe { rc_of_bytes.cast_with(|ptr| NonNull::new_unchecked(ptr.as_ptr() as _)) } + } +} + +#[cfg(not(no_global_oom_handling))] +impl From<&mut str> for RawRc +where + A: Allocator + Default, +{ + fn from(value: &mut str) -> Self { + Self::from(&*value) + } +} + +#[cfg(not(no_global_oom_handling))] +impl From for RawRc { + fn from(value: String) -> Self { + let rc_of_bytes = RawRc::<[u8], Global>::from(value.into_bytes()); + + unsafe { rc_of_bytes.cast_with(|ptr| NonNull::new_unchecked(ptr.as_ptr() as _)) } + } +} + +impl From> for RawRc<[u8], A> { + fn from(value: RawRc) -> Self { + unsafe { value.cast_with(|ptr| NonNull::new_unchecked(ptr.as_ptr() as _)) } + } +} + +#[cfg(not(no_global_oom_handling))] +impl From<[T; N]> for RawRc<[T], A> +where + A: Allocator + Default, +{ + fn from(value: [T; N]) -> Self { + RawRc::new(value) + } +} + +#[cfg(not(no_global_oom_handling))] +impl From> for RawRc<[T], A> +where + A: Allocator, +{ + fn from(value: Vec) -> Self { + let value_ref = &*value; + let alloc_ref = value.allocator(); + let rc_ptr = allocate_for_rc_with_value::(alloc_ref, value_ref); + let (vec_ptr, _length, capacity, alloc) = value.into_raw_parts_with_alloc(); + + unsafe { + drop(Vec::from_raw_parts_in(vec_ptr, 0, capacity, &alloc)); + + Self::from_raw_parts(rc_ptr, alloc) + } + } +} + +impl TryFrom> for RawRc<[T; N], A> { + type Error = RawRc<[T], A>; + + fn try_from(value: RawRc<[T], A>) -> Result { + if value.as_ref().len() == N { Ok(unsafe { value.cast() }) } else { Err(value) } + } +} + +#[cfg(not(no_global_oom_handling))] +trait SpecRawRcFromIter { + fn spec_from_iter(iter: I) -> Self; +} + +#[cfg(not(no_global_oom_handling))] +impl SpecRawRcFromIter for RawRc<[I::Item], Global> +where + I: Iterator, +{ + default fn spec_from_iter(iter: I) -> Self { + Self::from(iter.collect::>()) + } +} + +#[cfg(not(no_global_oom_handling))] +impl SpecRawRcFromIter for RawRc<[I::Item], Global> +where + I: TrustedLen, +{ + fn spec_from_iter(iter: I) -> Self { + // This is the case for a `TrustedLen` iterator. + + if let (low, Some(high)) = iter.size_hint() { + debug_assert_eq!( + low, + high, + "TrustedLen iterator's size hint is not exact: {:?}", + (low, high) + ); + + // SAFETY: We need to ensure that the iterator has an exact length and we have. + unsafe { Self::from_iter_exact(iter, low) } + } else { + // TrustedLen contract guarantees that `upper_bound == None` implies an iterator + // length exceeding `usize::MAX`. + // The default implementation would collect into a vec which would panic. + // Thus we panic here immediately without invoking `Vec` code. + panic!("capacity overflow"); + } + } +} + +#[cfg(not(no_global_oom_handling))] +impl FromIterator for RawRc<[T], Global> { + fn from_iter>(iter: I) -> Self { + Self::spec_from_iter(iter.into_iter()) + } +} + +impl Hash for RawRc +where + T: Hash + ?Sized, +{ + fn hash(&self, state: &mut H) { + T::hash(self.as_ref(), state); + } +} + +// Hack to allow specializing on `Eq` even though `Eq` has a method. +#[rustc_unsafe_specialization_marker] +trait MarkerEq: PartialEq {} + +impl MarkerEq for T where T: Eq {} + +trait SpecPartialEq { + fn spec_eq(&self, other: &Self) -> bool; + fn spec_ne(&self, other: &Self) -> bool; +} + +impl SpecPartialEq for RawRc +where + T: PartialEq + ?Sized, +{ + default fn spec_eq(&self, other: &Self) -> bool { + T::eq(self.as_ref(), other.as_ref()) + } + + default fn spec_ne(&self, other: &Self) -> bool { + T::ne(self.as_ref(), other.as_ref()) + } +} + +impl SpecPartialEq for RawRc +where + T: MarkerEq + ?Sized, +{ + fn spec_eq(&self, other: &Self) -> bool { + Self::ptr_eq(self, other) || T::eq(self.as_ref(), other.as_ref()) + } + + fn spec_ne(&self, other: &Self) -> bool { + Self::ptr_ne(self, other) && T::ne(self.as_ref(), other.as_ref()) + } +} + +impl PartialEq for RawRc +where + T: PartialEq + ?Sized, +{ + fn eq(&self, other: &Self) -> bool { + Self::spec_eq(self, other) + } + + fn ne(&self, other: &Self) -> bool { + Self::spec_ne(self, other) + } +} + +impl Eq for RawRc where T: Eq + ?Sized {} + +impl PartialOrd for RawRc +where + T: PartialOrd + ?Sized, +{ + fn partial_cmp(&self, other: &Self) -> Option { + T::partial_cmp(self.as_ref(), other.as_ref()) + } + + fn lt(&self, other: &Self) -> bool { + T::lt(self.as_ref(), other.as_ref()) + } + + fn le(&self, other: &Self) -> bool { + T::le(self.as_ref(), other.as_ref()) + } + + fn gt(&self, other: &Self) -> bool { + T::gt(self.as_ref(), other.as_ref()) + } + + fn ge(&self, other: &Self) -> bool { + T::ge(self.as_ref(), other.as_ref()) + } +} + +impl Ord for RawRc +where + T: Ord + ?Sized, +{ + fn cmp(&self, other: &Self) -> core::cmp::Ordering { + T::cmp(self.as_ref(), other.as_ref()) + } +} + +unsafe impl PinCoerceUnsized for RawRc +where + T: ?Sized, + A: Allocator, +{ +} + +#[repr(transparent)] +pub struct RawUniqueRc +where + T: ?Sized, +{ + weak: RawWeak, +} + +impl RawUniqueRc +where + T: ?Sized, +{ + pub unsafe fn downgrade(&self) -> RawWeak + where + A: Clone, + R: RcOps, + { + unsafe { self.weak.clone::() } + } + + pub unsafe fn drop(&mut self) + where + A: Allocator, + R: RcOps, + { + unsafe { self.weak.assume_init_drop::() }; + } + + pub unsafe fn into_rc(self) -> RawRc + where + R: RcOps, + { + unsafe { + R::unlock_strong_count(self.weak.strong_count_unchecked()); + + RawRc::from_weak(self.weak) + } + } +} + +impl RawUniqueRc { + #[cfg(not(no_global_oom_handling))] + unsafe fn from_weak_with_value(weak: RawWeak, value: T) -> Self { + unsafe { weak.as_ptr().write(value) }; + + Self { weak } + } + + #[cfg(not(no_global_oom_handling))] + pub fn new(value: T) -> Self + where + A: Allocator + Default, + { + unsafe { Self::from_weak_with_value(RawWeak::new_uninit::<0>(), value) } + } + + #[cfg(not(no_global_oom_handling))] + pub fn new_in(value: T, alloc: A) -> Self + where + A: Allocator, + { + unsafe { Self::from_weak_with_value(RawWeak::new_uninit_in::<0>(alloc), value) } + } +} + +impl AsRef for RawUniqueRc +where + T: ?Sized, +{ + fn as_ref(&self) -> &T { + unsafe { self.weak.as_ref_unchecked() } + } +} + +impl AsMut for RawUniqueRc +where + T: ?Sized, +{ + fn as_mut(&mut self) -> &mut T { + unsafe { self.weak.get_mut_unchecked() } + } +} + +impl CoerceUnsized> for RawUniqueRc +where + T: ?Sized + Unsize, + U: ?Sized, + A: Allocator, +{ +} + +impl Debug for RawUniqueRc +where + T: Debug + ?Sized, +{ + fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result { + Debug::fmt(self.as_ref(), f) + } +} + +impl DispatchFromDyn> for RawUniqueRc +where + T: ?Sized + Unsize, + U: ?Sized, +{ +} diff --git a/library/alloc/src/rc.rs b/library/alloc/src/rc.rs index e014404eff35b..b0d6711233028 100644 --- a/library/alloc/src/rc.rs +++ b/library/alloc/src/rc.rs @@ -242,34 +242,26 @@ #![stable(feature = "rust1", since = "1.0.0")] use core::any::Any; -use core::cell::Cell; +use core::cell::UnsafeCell; #[cfg(not(no_global_oom_handling))] use core::clone::CloneToUninit; use core::cmp::Ordering; use core::hash::{Hash, Hasher}; -use core::intrinsics::abort; -#[cfg(not(no_global_oom_handling))] -use core::iter; -use core::marker::{PhantomData, Unsize}; -use core::mem::{self, ManuallyDrop, align_of_val_raw}; +use core::marker::Unsize; +use core::mem::{self, ManuallyDrop}; use core::ops::{CoerceUnsized, Deref, DerefMut, DerefPure, DispatchFromDyn, LegacyReceiver}; use core::panic::{RefUnwindSafe, UnwindSafe}; #[cfg(not(no_global_oom_handling))] use core::pin::Pin; use core::pin::PinCoerceUnsized; -use core::ptr::{self, NonNull, drop_in_place}; -#[cfg(not(no_global_oom_handling))] -use core::slice::from_raw_parts_mut; -use core::{borrow, fmt, hint}; -#[cfg(test)] -use std::boxed::Box; +use core::ptr::{self, NonNull}; +use core::{borrow, fmt, hint, intrinsics}; -#[cfg(not(no_global_oom_handling))] -use crate::alloc::handle_alloc_error; -use crate::alloc::{AllocError, Allocator, Global, Layout}; +use crate::alloc::{AllocError, Allocator, Global}; use crate::borrow::{Cow, ToOwned}; -#[cfg(not(test))] +#[cfg(not(no_global_oom_handling))] use crate::boxed::Box; +use crate::raw_rc::{self, RawRc, RawUniqueRc, RawWeak}; #[cfg(not(no_global_oom_handling))] use crate::string::String; #[cfg(not(no_global_oom_handling))] @@ -278,23 +270,103 @@ use crate::vec::Vec; #[cfg(test)] mod tests; -// This is repr(C) to future-proof against possible field-reordering, which -// would interfere with otherwise safe [into|from]_raw() of transmutable -// inner types. -#[repr(C)] -struct RcInner { - strong: Cell, - weak: Cell, - value: T, +#[cfg(not(no_global_oom_handling))] +fn weak_fn_to_raw_weak_fn(f: F) -> impl FnOnce(&RawWeak) -> T +where + F: FnOnce(&Weak) -> T, + A: Allocator, +{ + move |raw_weak: &RawWeak| f(unsafe { mem::transmute(raw_weak) }) } -/// Calculate layout for `RcInner` using the inner value's layout -fn rc_inner_layout_for_value_layout(layout: Layout) -> Layout { - // Calculate layout using the given value layout. - // Previously, layout was calculated on the expression - // `&*(ptr as *const RcInner)`, but this created a misaligned - // reference (see #54908). - Layout::new::>().extend(layout).unwrap().0.pad_to_align() +enum RcOps {} + +impl raw_rc::RcOps for RcOps { + unsafe fn increment_ref_count(count: &UnsafeCell) { + let count = unsafe { &mut *count.get() }; + let strong = *count; + + // We insert an `assume` here to hint LLVM at an otherwise + // missed optimization. + // SAFETY: The reference count will never be zero when this is + // called. + unsafe { hint::assert_unchecked(strong != 0) }; + + let strong = count.wrapping_add(1); + + *count = strong; + + // We want to abort on overflow instead of dropping the value. + // Checking for overflow after the store instead of before + // allows for slightly better code generation. + if intrinsics::unlikely(strong == 0) { + intrinsics::abort(); + } + } + + unsafe fn decrement_ref_count(count: &UnsafeCell) -> bool { + let count = unsafe { &mut *count.get() }; + + *count -= 1; + + *count == 0 + } + + unsafe fn upgrade(strong_count: &UnsafeCell) -> bool { + let strong_count = unsafe { &mut *strong_count.get() }; + + if *strong_count == 0 { + false + } else { + *strong_count += 1; + + true + } + } + + unsafe fn downgrade(weak_count: &UnsafeCell) { + unsafe { Self::increment_ref_count(weak_count) }; + } + + unsafe fn lock_strong_count(strong_count: &UnsafeCell) -> bool { + let strong_count = unsafe { &mut *strong_count.get() }; + + if *strong_count == 1 { + *strong_count = 0; + + true + } else { + false + } + } + + unsafe fn unlock_strong_count(strong_count: &UnsafeCell) { + unsafe { *strong_count.get() = 1 }; + } + + unsafe fn is_unique(strong_count: &UnsafeCell, weak_count: &UnsafeCell) -> bool { + unsafe { *strong_count.get() == 1 && *weak_count.get() == 1 } + } + + #[cfg(not(no_global_oom_handling))] + unsafe fn make_unique(rc: &mut RawRc, by_clone: F, by_move: G) + where + T: ?Sized, + F: FnOnce(&mut RawRc), + G: FnOnce(&mut RawRc), + { + let strong_count = unsafe { &mut *rc.strong_count().get() }; + + if *strong_count == 1 { + if unsafe { *rc.weak_count().get() } != 1 { + *strong_count = 0; + + by_move(rc); + } + } else { + by_clone(rc); + } + } } /// A single-threaded reference-counting pointer. 'Rc' stands for 'Reference @@ -311,13 +383,12 @@ fn rc_inner_layout_for_value_layout(layout: Layout) -> Layout { #[cfg_attr(not(test), rustc_diagnostic_item = "Rc")] #[stable(feature = "rust1", since = "1.0.0")] #[rustc_insignificant_dtor] +#[repr(transparent)] pub struct Rc< T: ?Sized, #[unstable(feature = "allocator_api", issue = "32838")] A: Allocator = Global, > { - ptr: NonNull>, - phantom: PhantomData>, - alloc: A, + raw_rc: RawRc, } #[stable(feature = "rust1", since = "1.0.0")] @@ -342,58 +413,6 @@ impl, U: ?Sized, A: Allocator> CoerceUnsized> for #[unstable(feature = "dispatch_from_dyn", issue = "none")] impl, U: ?Sized> DispatchFromDyn> for Rc {} -impl Rc { - #[inline] - unsafe fn from_inner(ptr: NonNull>) -> Self { - unsafe { Self::from_inner_in(ptr, Global) } - } - - #[inline] - unsafe fn from_ptr(ptr: *mut RcInner) -> Self { - unsafe { Self::from_inner(NonNull::new_unchecked(ptr)) } - } -} - -impl Rc { - #[inline(always)] - fn inner(&self) -> &RcInner { - // This unsafety is ok because while this Rc is alive we're guaranteed - // that the inner pointer is valid. - unsafe { self.ptr.as_ref() } - } - - #[inline] - fn into_inner_with_allocator(this: Self) -> (NonNull>, A) { - let this = mem::ManuallyDrop::new(this); - (this.ptr, unsafe { ptr::read(&this.alloc) }) - } - - #[inline] - unsafe fn from_inner_in(ptr: NonNull>, alloc: A) -> Self { - Self { ptr, phantom: PhantomData, alloc } - } - - #[inline] - unsafe fn from_ptr_in(ptr: *mut RcInner, alloc: A) -> Self { - unsafe { Self::from_inner_in(NonNull::new_unchecked(ptr), alloc) } - } - - // Non-inlined part of `drop`. - #[inline(never)] - unsafe fn drop_slow(&mut self) { - // Reconstruct the "strong weak" pointer and drop it when this - // variable goes out of scope. This ensures that the memory is - // deallocated even if the destructor of `T` panics. - let _weak = Weak { ptr: self.ptr, alloc: &self.alloc }; - - // Destroy the contained object. - // We cannot use `get_mut_unchecked` here, because `self.alloc` is borrowed. - unsafe { - ptr::drop_in_place(&mut (*self.ptr.as_ptr()).value); - } - } -} - impl Rc { /// Constructs a new `Rc`. /// @@ -407,16 +426,7 @@ impl Rc { #[cfg(not(no_global_oom_handling))] #[stable(feature = "rust1", since = "1.0.0")] pub fn new(value: T) -> Rc { - // There is an implicit weak pointer owned by all the strong - // pointers, which ensures that the weak destructor never frees - // the allocation while the strong destructor is running, even - // if the weak pointer is stored inside the strong one. - unsafe { - Self::from_inner( - Box::leak(Box::new(RcInner { strong: Cell::new(1), weak: Cell::new(1), value })) - .into(), - ) - } + Self { raw_rc: RawRc::new(value) } } /// Constructs a new `Rc` while giving you a `Weak` to the allocation, @@ -476,7 +486,7 @@ impl Rc { where F: FnOnce(&Weak) -> T, { - Self::new_cyclic_in(data_fn, Global) + Self { raw_rc: unsafe { RawRc::new_cyclic::<_, RcOps>(weak_fn_to_raw_weak_fn(data_fn)) } } } /// Constructs a new `Rc` with uninitialized contents. @@ -501,13 +511,7 @@ impl Rc { #[stable(feature = "new_uninit", since = "1.82.0")] #[must_use] pub fn new_uninit() -> Rc> { - unsafe { - Rc::from_ptr(Rc::allocate_for_layout( - Layout::new::(), - |layout| Global.allocate(layout), - <*mut u8>::cast, - )) - } + Rc { raw_rc: RawRc::new_uninit() } } /// Constructs a new `Rc` with uninitialized contents, with the memory @@ -534,13 +538,7 @@ impl Rc { #[unstable(feature = "new_zeroed_alloc", issue = "129396")] #[must_use] pub fn new_zeroed() -> Rc> { - unsafe { - Rc::from_ptr(Rc::allocate_for_layout( - Layout::new::(), - |layout| Global.allocate_zeroed(layout), - <*mut u8>::cast, - )) - } + Rc { raw_rc: RawRc::new_zeroed() } } /// Constructs a new `Rc`, returning an error if the allocation fails @@ -556,20 +554,7 @@ impl Rc { /// ``` #[unstable(feature = "allocator_api", issue = "32838")] pub fn try_new(value: T) -> Result, AllocError> { - // There is an implicit weak pointer owned by all the strong - // pointers, which ensures that the weak destructor never frees - // the allocation while the strong destructor is running, even - // if the weak pointer is stored inside the strong one. - unsafe { - Ok(Self::from_inner( - Box::leak(Box::try_new(RcInner { - strong: Cell::new(1), - weak: Cell::new(1), - value, - })?) - .into(), - )) - } + RawRc::try_new(value).map(|raw_rc| Self { raw_rc }) } /// Constructs a new `Rc` with uninitialized contents, returning an error if the allocation fails @@ -595,13 +580,7 @@ impl Rc { #[unstable(feature = "allocator_api", issue = "32838")] // #[unstable(feature = "new_uninit", issue = "63291")] pub fn try_new_uninit() -> Result>, AllocError> { - unsafe { - Ok(Rc::from_ptr(Rc::try_allocate_for_layout( - Layout::new::(), - |layout| Global.allocate(layout), - <*mut u8>::cast, - )?)) - } + RawRc::try_new_uninit().map(|raw_rc| Rc { raw_rc }) } /// Constructs a new `Rc` with uninitialized contents, with the memory @@ -628,13 +607,7 @@ impl Rc { #[unstable(feature = "allocator_api", issue = "32838")] //#[unstable(feature = "new_uninit", issue = "63291")] pub fn try_new_zeroed() -> Result>, AllocError> { - unsafe { - Ok(Rc::from_ptr(Rc::try_allocate_for_layout( - Layout::new::(), - |layout| Global.allocate_zeroed(layout), - <*mut u8>::cast, - )?)) - } + RawRc::try_new_zeroed().map(|raw_rc| Rc { raw_rc }) } /// Constructs a new `Pin>`. If `T` does not implement `Unpin`, then /// `value` will be pinned in memory and unable to be moved. @@ -662,12 +635,7 @@ impl Rc { #[unstable(feature = "allocator_api", issue = "32838")] #[inline] pub fn new_in(value: T, alloc: A) -> Rc { - // NOTE: Prefer match over unwrap_or_else since closure sometimes not inlineable. - // That would make code size bigger. - match Self::try_new_in(value, alloc) { - Ok(m) => m, - Err(_) => handle_alloc_error(Layout::new::>()), - } + Self { raw_rc: RawRc::new_in(value, alloc) } } /// Constructs a new `Rc` with uninitialized contents in the provided allocator. @@ -697,16 +665,7 @@ impl Rc { // #[unstable(feature = "new_uninit", issue = "63291")] #[inline] pub fn new_uninit_in(alloc: A) -> Rc, A> { - unsafe { - Rc::from_ptr_in( - Rc::allocate_for_layout( - Layout::new::(), - |layout| alloc.allocate(layout), - <*mut u8>::cast, - ), - alloc, - ) - } + Rc { raw_rc: RawRc::new_uninit_in(alloc) } } /// Constructs a new `Rc` with uninitialized contents, with the memory @@ -735,16 +694,7 @@ impl Rc { // #[unstable(feature = "new_uninit", issue = "63291")] #[inline] pub fn new_zeroed_in(alloc: A) -> Rc, A> { - unsafe { - Rc::from_ptr_in( - Rc::allocate_for_layout( - Layout::new::(), - |layout| alloc.allocate_zeroed(layout), - <*mut u8>::cast, - ), - alloc, - ) - } + Rc { raw_rc: RawRc::new_zeroed_in(alloc) } } /// Constructs a new `Rc` in the given allocator while giving you a `Weak` to the allocation, @@ -782,47 +732,11 @@ impl Rc { where F: FnOnce(&Weak) -> T, { - // Construct the inner in the "uninitialized" state with a single - // weak reference. - let (uninit_raw_ptr, alloc) = Box::into_raw_with_allocator(Box::new_in( - RcInner { - strong: Cell::new(0), - weak: Cell::new(1), - value: mem::MaybeUninit::::uninit(), + Self { + raw_rc: unsafe { + RawRc::new_cyclic_in::<_, RcOps>(weak_fn_to_raw_weak_fn(data_fn), alloc) }, - alloc, - )); - let uninit_ptr: NonNull<_> = (unsafe { &mut *uninit_raw_ptr }).into(); - let init_ptr: NonNull> = uninit_ptr.cast(); - - let weak = Weak { ptr: init_ptr, alloc }; - - // It's important we don't give up ownership of the weak pointer, or - // else the memory might be freed by the time `data_fn` returns. If - // we really wanted to pass ownership, we could create an additional - // weak pointer for ourselves, but this would result in additional - // updates to the weak reference count which might not be necessary - // otherwise. - let data = data_fn(&weak); - - let strong = unsafe { - let inner = init_ptr.as_ptr(); - ptr::write(&raw mut (*inner).value, data); - - let prev_value = (*inner).strong.get(); - debug_assert_eq!(prev_value, 0, "No prior strong references should exist"); - (*inner).strong.set(1); - - // Strong references should collectively own a shared weak reference, - // so don't run the destructor for our old weak reference. - // Calling into_raw_with_allocator has the double effect of giving us back the allocator, - // and forgetting the weak reference. - let alloc = weak.into_raw_with_allocator().1; - - Rc::from_inner_in(init_ptr, alloc) - }; - - strong + } } /// Constructs a new `Rc` in the provided allocator, returning an error if the allocation @@ -841,15 +755,7 @@ impl Rc { #[unstable(feature = "allocator_api", issue = "32838")] #[inline] pub fn try_new_in(value: T, alloc: A) -> Result { - // There is an implicit weak pointer owned by all the strong - // pointers, which ensures that the weak destructor never frees - // the allocation while the strong destructor is running, even - // if the weak pointer is stored inside the strong one. - let (ptr, alloc) = Box::into_unique(Box::try_new_in( - RcInner { strong: Cell::new(1), weak: Cell::new(1), value }, - alloc, - )?); - Ok(unsafe { Self::from_inner_in(ptr.into(), alloc) }) + RawRc::try_new_in(value, alloc).map(|raw_rc| Self { raw_rc }) } /// Constructs a new `Rc` with uninitialized contents, in the provided allocator, returning an @@ -880,16 +786,7 @@ impl Rc { // #[unstable(feature = "new_uninit", issue = "63291")] #[inline] pub fn try_new_uninit_in(alloc: A) -> Result, A>, AllocError> { - unsafe { - Ok(Rc::from_ptr_in( - Rc::try_allocate_for_layout( - Layout::new::(), - |layout| alloc.allocate(layout), - <*mut u8>::cast, - )?, - alloc, - )) - } + RawRc::try_new_uninit_in(alloc).map(|raw_rc| Rc { raw_rc }) } /// Constructs a new `Rc` with uninitialized contents, with the memory @@ -919,16 +816,7 @@ impl Rc { //#[unstable(feature = "new_uninit", issue = "63291")] #[inline] pub fn try_new_zeroed_in(alloc: A) -> Result, A>, AllocError> { - unsafe { - Ok(Rc::from_ptr_in( - Rc::try_allocate_for_layout( - Layout::new::(), - |layout| alloc.allocate_zeroed(layout), - <*mut u8>::cast, - )?, - alloc, - )) - } + RawRc::try_new_zeroed_in(alloc).map(|raw_rc| Rc { raw_rc }) } /// Constructs a new `Pin>` in the provided allocator. If `T` does not implement `Unpin`, then @@ -965,22 +853,7 @@ impl Rc { #[inline] #[stable(feature = "rc_unique", since = "1.4.0")] pub fn try_unwrap(this: Self) -> Result { - if Rc::strong_count(&this) == 1 { - let this = ManuallyDrop::new(this); - - let val: T = unsafe { ptr::read(&**this) }; // copy the contained object - let alloc: A = unsafe { ptr::read(&this.alloc) }; // copy the allocator - - // Indicate to Weaks that they can't be promoted by decrementing - // the strong count, and then remove the implicit "strong weak" - // pointer while also handling drop logic by just crafting a - // fake Weak. - this.inner().dec_strong(); - let _weak = Weak { ptr: this.ptr, alloc }; - Ok(val) - } else { - Err(this) - } + unsafe { Rc::into_raw_rc(this).try_unwrap::().map_err(|raw_rc| Self { raw_rc }) } } /// Returns the inner value, if the `Rc` has exactly one strong reference. @@ -1016,7 +889,7 @@ impl Rc { #[inline] #[stable(feature = "rc_into_inner", since = "1.70.0")] pub fn into_inner(this: Self) -> Option { - Rc::try_unwrap(this).ok() + unsafe { RawRc::into_inner::(Rc::into_raw_rc(this)) } } } @@ -1046,7 +919,7 @@ impl Rc<[T]> { #[stable(feature = "new_uninit", since = "1.82.0")] #[must_use] pub fn new_uninit_slice(len: usize) -> Rc<[mem::MaybeUninit]> { - unsafe { Rc::from_ptr(Rc::allocate_for_slice(len)) } + Rc { raw_rc: RawRc::new_uninit_slice(len) } } /// Constructs a new reference-counted slice with uninitialized contents, with the memory being @@ -1073,16 +946,7 @@ impl Rc<[T]> { #[unstable(feature = "new_zeroed_alloc", issue = "129396")] #[must_use] pub fn new_zeroed_slice(len: usize) -> Rc<[mem::MaybeUninit]> { - unsafe { - Rc::from_ptr(Rc::allocate_for_layout( - Layout::array::(len).unwrap(), - |layout| Global.allocate_zeroed(layout), - |mem| { - ptr::slice_from_raw_parts_mut(mem.cast::(), len) - as *mut RcInner<[mem::MaybeUninit]> - }, - )) - } + Rc { raw_rc: RawRc::new_zeroed_slice(len) } } /// Converts the reference-counted slice into a reference-counted array. @@ -1136,7 +1000,7 @@ impl Rc<[T], A> { // #[unstable(feature = "new_uninit", issue = "63291")] #[inline] pub fn new_uninit_slice_in(len: usize, alloc: A) -> Rc<[mem::MaybeUninit], A> { - unsafe { Rc::from_ptr_in(Rc::allocate_for_slice_in(len, &alloc), alloc) } + Rc { raw_rc: RawRc::new_uninit_slice_in(len, alloc) } } /// Constructs a new reference-counted slice with uninitialized contents, with the memory being @@ -1165,19 +1029,7 @@ impl Rc<[T], A> { // #[unstable(feature = "new_uninit", issue = "63291")] #[inline] pub fn new_zeroed_slice_in(len: usize, alloc: A) -> Rc<[mem::MaybeUninit], A> { - unsafe { - Rc::from_ptr_in( - Rc::allocate_for_layout( - Layout::array::(len).unwrap(), - |layout| alloc.allocate_zeroed(layout), - |mem| { - ptr::slice_from_raw_parts_mut(mem.cast::(), len) - as *mut RcInner<[mem::MaybeUninit]> - }, - ), - alloc, - ) - } + Rc { raw_rc: RawRc::new_zeroed_slice_in(len, alloc) } } } @@ -1213,8 +1065,7 @@ impl Rc, A> { #[stable(feature = "new_uninit", since = "1.82.0")] #[inline] pub unsafe fn assume_init(self) -> Rc { - let (ptr, alloc) = Rc::into_inner_with_allocator(self); - unsafe { Rc::from_inner_in(ptr.cast(), alloc) } + Rc { raw_rc: unsafe { Rc::into_raw_rc(self).assume_init() } } } } @@ -1253,8 +1104,7 @@ impl Rc<[mem::MaybeUninit], A> { #[stable(feature = "new_uninit", since = "1.82.0")] #[inline] pub unsafe fn assume_init(self) -> Rc<[T], A> { - let (ptr, alloc) = Rc::into_inner_with_allocator(self); - unsafe { Rc::from_ptr_in(ptr.as_ptr() as _, alloc) } + Rc { raw_rc: unsafe { Rc::into_raw_rc(self).assume_init() } } } } @@ -1323,7 +1173,7 @@ impl Rc { #[inline] #[stable(feature = "rc_raw", since = "1.17.0")] pub unsafe fn from_raw(ptr: *const T) -> Self { - unsafe { Self::from_raw_in(ptr, Global) } + Self { raw_rc: unsafe { RawRc::from_raw(NonNull::new_unchecked(ptr.cast_mut())) } } } /// Increments the strong reference count on the `Rc` associated with the @@ -1356,7 +1206,11 @@ impl Rc { #[inline] #[stable(feature = "rc_mutate_strong_count", since = "1.53.0")] pub unsafe fn increment_strong_count(ptr: *const T) { - unsafe { Self::increment_strong_count_in(ptr, Global) } + unsafe { + RawRc::::increment_strong_count::(NonNull::new_unchecked( + ptr.cast_mut(), + )) + }; } /// Decrements the strong reference count on the `Rc` associated with the @@ -1390,11 +1244,19 @@ impl Rc { #[inline] #[stable(feature = "rc_mutate_strong_count", since = "1.53.0")] pub unsafe fn decrement_strong_count(ptr: *const T) { - unsafe { Self::decrement_strong_count_in(ptr, Global) } + unsafe { + RawRc::::decrement_strong_count::(NonNull::new_unchecked( + ptr.cast_mut(), + )) + } } } impl Rc { + fn into_raw_rc(this: Self) -> RawRc { + unsafe { ptr::read(&ManuallyDrop::new(this).raw_rc) } + } + /// Returns a reference to the underlying allocator. /// /// Note: this is an associated function, which means that you have @@ -1403,7 +1265,7 @@ impl Rc { #[inline] #[unstable(feature = "allocator_api", issue = "32838")] pub fn allocator(this: &Self) -> &A { - &this.alloc + this.raw_rc.allocator() } /// Consumes the `Rc`, returning the wrapped pointer. @@ -1426,8 +1288,7 @@ impl Rc { #[stable(feature = "rc_raw", since = "1.17.0")] #[rustc_never_returns_null_ptr] pub fn into_raw(this: Self) -> *const T { - let this = ManuallyDrop::new(this); - Self::as_ptr(&*this) + Rc::into_raw_rc(this).into_raw().as_ptr() } /// Consumes the `Rc`, returning the wrapped pointer and allocator. @@ -1451,11 +1312,9 @@ impl Rc { #[must_use = "losing the pointer will leak memory"] #[unstable(feature = "allocator_api", issue = "32838")] pub fn into_raw_with_allocator(this: Self) -> (*const T, A) { - let this = mem::ManuallyDrop::new(this); - let ptr = Self::as_ptr(&this); - // Safety: `this` is ManuallyDrop so the allocator will not be double-dropped - let alloc = unsafe { ptr::read(&this.alloc) }; - (ptr, alloc) + let (ptr, alloc) = Rc::into_raw_rc(this).into_raw_parts(); + + (ptr.as_ptr(), alloc) } /// Provides a raw pointer to the data. @@ -1477,12 +1336,7 @@ impl Rc { #[stable(feature = "weak_into_raw", since = "1.45.0")] #[rustc_never_returns_null_ptr] pub fn as_ptr(this: &Self) -> *const T { - let ptr: *mut RcInner = NonNull::as_ptr(this.ptr); - - // SAFETY: This cannot go through Deref::deref or Rc::inner because - // this is required to retain raw/mut provenance such that e.g. `get_mut` can - // write through the pointer after the Rc is recovered through `from_raw`. - unsafe { &raw mut (*ptr).value } + this.raw_rc.as_ptr().as_ptr() } /// Constructs an `Rc` from a raw pointer in the provided allocator. @@ -1554,12 +1408,9 @@ impl Rc { /// ``` #[unstable(feature = "allocator_api", issue = "32838")] pub unsafe fn from_raw_in(ptr: *const T, alloc: A) -> Self { - let offset = unsafe { data_offset(ptr) }; - - // Reverse the offset to find the original RcInner. - let rc_ptr = unsafe { ptr.byte_sub(offset) as *mut RcInner }; - - unsafe { Self::from_ptr_in(rc_ptr, alloc) } + unsafe { + Self { raw_rc: RawRc::from_raw_parts(NonNull::new_unchecked(ptr.cast_mut()), alloc) } + } } /// Creates a new [`Weak`] pointer to this allocation. @@ -1580,10 +1431,7 @@ impl Rc { where A: Clone, { - this.inner().inc_weak(); - // Make sure we do not create a dangling Weak - debug_assert!(!is_dangling(this.ptr.as_ptr())); - Weak { ptr: this.ptr, alloc: this.alloc.clone() } + Weak { raw_weak: unsafe { this.raw_rc.downgrade::() } } } /// Gets the number of [`Weak`] pointers to this allocation. @@ -1601,7 +1449,7 @@ impl Rc { #[inline] #[stable(feature = "rc_counts", since = "1.15.0")] pub fn weak_count(this: &Self) -> usize { - this.inner().weak() - 1 + unsafe { *this.raw_rc.weak_count().get() - 1 } } /// Gets the number of strong (`Rc`) pointers to this allocation. @@ -1619,7 +1467,7 @@ impl Rc { #[inline] #[stable(feature = "rc_counts", since = "1.15.0")] pub fn strong_count(this: &Self) -> usize { - this.inner().strong() + unsafe { *this.raw_rc.strong_count().get() } } /// Increments the strong reference count on the `Rc` associated with the @@ -1658,10 +1506,11 @@ impl Rc { where A: Clone, { - // Retain Rc, but don't touch refcount by wrapping in ManuallyDrop - let rc = unsafe { mem::ManuallyDrop::new(Rc::::from_raw_in(ptr, alloc)) }; - // Now increase refcount, but don't drop new refcount either - let _rc_clone: mem::ManuallyDrop<_> = rc.clone(); + unsafe { + RawRc::::increment_strong_count::(NonNull::new_unchecked(ptr.cast_mut())) + }; + + drop(alloc); } /// Decrements the strong reference count on the `Rc` associated with the @@ -1698,14 +1547,12 @@ impl Rc { #[inline] #[unstable(feature = "allocator_api", issue = "32838")] pub unsafe fn decrement_strong_count_in(ptr: *const T, alloc: A) { - unsafe { drop(Rc::from_raw_in(ptr, alloc)) }; - } - - /// Returns `true` if there are no other `Rc` or [`Weak`] pointers to - /// this allocation. - #[inline] - fn is_unique(this: &Self) -> bool { - Rc::weak_count(this) == 0 && Rc::strong_count(this) == 1 + unsafe { + RawRc::::decrement_strong_count_in::( + NonNull::new_unchecked(ptr.cast_mut()), + alloc, + ) + }; } /// Returns a mutable reference into the given `Rc`, if there are @@ -1735,7 +1582,7 @@ impl Rc { #[inline] #[stable(feature = "rc_unique", since = "1.4.0")] pub fn get_mut(this: &mut Self) -> Option<&mut T> { - if Rc::is_unique(this) { unsafe { Some(Rc::get_mut_unchecked(this)) } } else { None } + unsafe { this.raw_rc.get_mut::() } } /// Returns a mutable reference into the given `Rc`, @@ -1801,9 +1648,7 @@ impl Rc { #[inline] #[unstable(feature = "get_mut_unchecked", issue = "63292")] pub unsafe fn get_mut_unchecked(this: &mut Self) -> &mut T { - // We are careful to *not* create a reference covering the "count" fields, as - // this would conflict with accesses to the reference counts (e.g. by `Weak`). - unsafe { &mut (*this.ptr.as_ptr()).value } + unsafe { this.raw_rc.get_mut_unchecked() } } #[inline] @@ -1824,7 +1669,7 @@ impl Rc { /// assert!(!Rc::ptr_eq(&five, &other_five)); /// ``` pub fn ptr_eq(this: &Self, other: &Self) -> bool { - ptr::addr_eq(this.ptr.as_ptr(), other.ptr.as_ptr()) + RawRc::ptr_eq(&this.raw_rc, &other.raw_rc) } } @@ -1883,57 +1728,7 @@ impl Rc { #[inline] #[stable(feature = "rc_unique", since = "1.4.0")] pub fn make_mut(this: &mut Self) -> &mut T { - let size_of_val = size_of_val::(&**this); - - if Rc::strong_count(this) != 1 { - // Gotta clone the data, there are other Rcs. - - let this_data_ref: &T = &**this; - // `in_progress` drops the allocation if we panic before finishing initializing it. - let mut in_progress: UniqueRcUninit = - UniqueRcUninit::new(this_data_ref, this.alloc.clone()); - - // Initialize with clone of this. - let initialized_clone = unsafe { - // Clone. If the clone panics, `in_progress` will be dropped and clean up. - this_data_ref.clone_to_uninit(in_progress.data_ptr().cast()); - // Cast type of pointer, now that it is initialized. - in_progress.into_rc() - }; - - // Replace `this` with newly constructed Rc. - *this = initialized_clone; - } else if Rc::weak_count(this) != 0 { - // Can just steal the data, all that's left is Weaks - - // We don't need panic-protection like the above branch does, but we might as well - // use the same mechanism. - let mut in_progress: UniqueRcUninit = - UniqueRcUninit::new(&**this, this.alloc.clone()); - unsafe { - // Initialize `in_progress` with move of **this. - // We have to express this in terms of bytes because `T: ?Sized`; there is no - // operation that just copies a value based on its `size_of_val()`. - ptr::copy_nonoverlapping( - ptr::from_ref(&**this).cast::(), - in_progress.data_ptr().cast::(), - size_of_val, - ); - - this.inner().dec_strong(); - // Remove implicit strong-weak ref (no need to craft a fake - // Weak here -- we know other Weaks can clean up for us) - this.inner().dec_weak(); - // Replace `this` with newly constructed Rc that has the moved data. - ptr::write(this, in_progress.into_rc()); - } - } - // This unsafety is ok because we're guaranteed that the pointer - // returned is the *only* pointer that will ever be returned to T. Our - // reference count is guaranteed to be 1 at this point, and we required - // the `Rc` itself to be `mut`, so we're returning the only possible - // reference to the allocation. - unsafe { &mut this.ptr.as_mut().value } + unsafe { this.raw_rc.make_mut::() } } } @@ -1969,7 +1764,7 @@ impl Rc { #[inline] #[stable(feature = "arc_unwrap_or_clone", since = "1.76.0")] pub fn unwrap_or_clone(this: Self) -> T { - Rc::try_unwrap(this).unwrap_or_else(|rc| (*rc).clone()) + unsafe { Rc::into_raw_rc(this).unwrap_or_clone::() } } } @@ -1995,13 +1790,9 @@ impl Rc { #[inline] #[stable(feature = "rc_downcast", since = "1.29.0")] pub fn downcast(self) -> Result, Self> { - if (*self).is::() { - unsafe { - let (ptr, alloc) = Rc::into_inner_with_allocator(self); - Ok(Rc::from_inner_in(ptr.cast(), alloc)) - } - } else { - Err(self) + match Rc::into_raw_rc(self).downcast::() { + Ok(raw_rc) => Ok(Rc { raw_rc }), + Err(raw_rc) => Err(Self { raw_rc }), } } @@ -2034,208 +1825,7 @@ impl Rc { #[inline] #[unstable(feature = "downcast_unchecked", issue = "90850")] pub unsafe fn downcast_unchecked(self) -> Rc { - unsafe { - let (ptr, alloc) = Rc::into_inner_with_allocator(self); - Rc::from_inner_in(ptr.cast(), alloc) - } - } -} - -impl Rc { - /// Allocates an `RcInner` with sufficient space for - /// a possibly-unsized inner value where the value has the layout provided. - /// - /// The function `mem_to_rc_inner` is called with the data pointer - /// and must return back a (potentially fat)-pointer for the `RcInner`. - #[cfg(not(no_global_oom_handling))] - unsafe fn allocate_for_layout( - value_layout: Layout, - allocate: impl FnOnce(Layout) -> Result, AllocError>, - mem_to_rc_inner: impl FnOnce(*mut u8) -> *mut RcInner, - ) -> *mut RcInner { - let layout = rc_inner_layout_for_value_layout(value_layout); - unsafe { - Rc::try_allocate_for_layout(value_layout, allocate, mem_to_rc_inner) - .unwrap_or_else(|_| handle_alloc_error(layout)) - } - } - - /// Allocates an `RcInner` with sufficient space for - /// a possibly-unsized inner value where the value has the layout provided, - /// returning an error if allocation fails. - /// - /// The function `mem_to_rc_inner` is called with the data pointer - /// and must return back a (potentially fat)-pointer for the `RcInner`. - #[inline] - unsafe fn try_allocate_for_layout( - value_layout: Layout, - allocate: impl FnOnce(Layout) -> Result, AllocError>, - mem_to_rc_inner: impl FnOnce(*mut u8) -> *mut RcInner, - ) -> Result<*mut RcInner, AllocError> { - let layout = rc_inner_layout_for_value_layout(value_layout); - - // Allocate for the layout. - let ptr = allocate(layout)?; - - // Initialize the RcInner - let inner = mem_to_rc_inner(ptr.as_non_null_ptr().as_ptr()); - unsafe { - debug_assert_eq!(Layout::for_value_raw(inner), layout); - - (&raw mut (*inner).strong).write(Cell::new(1)); - (&raw mut (*inner).weak).write(Cell::new(1)); - } - - Ok(inner) - } -} - -impl Rc { - /// Allocates an `RcInner` with sufficient space for an unsized inner value - #[cfg(not(no_global_oom_handling))] - unsafe fn allocate_for_ptr_in(ptr: *const T, alloc: &A) -> *mut RcInner { - // Allocate for the `RcInner` using the given value. - unsafe { - Rc::::allocate_for_layout( - Layout::for_value_raw(ptr), - |layout| alloc.allocate(layout), - |mem| mem.with_metadata_of(ptr as *const RcInner), - ) - } - } - - #[cfg(not(no_global_oom_handling))] - fn from_box_in(src: Box) -> Rc { - unsafe { - let value_size = size_of_val(&*src); - let ptr = Self::allocate_for_ptr_in(&*src, Box::allocator(&src)); - - // Copy value as bytes - ptr::copy_nonoverlapping( - (&raw const *src) as *const u8, - (&raw mut (*ptr).value) as *mut u8, - value_size, - ); - - // Free the allocation without dropping its contents - let (bptr, alloc) = Box::into_raw_with_allocator(src); - let src = Box::from_raw_in(bptr as *mut mem::ManuallyDrop, alloc.by_ref()); - drop(src); - - Self::from_ptr_in(ptr, alloc) - } - } -} - -impl Rc<[T]> { - /// Allocates an `RcInner<[T]>` with the given length. - #[cfg(not(no_global_oom_handling))] - unsafe fn allocate_for_slice(len: usize) -> *mut RcInner<[T]> { - unsafe { - Self::allocate_for_layout( - Layout::array::(len).unwrap(), - |layout| Global.allocate(layout), - |mem| ptr::slice_from_raw_parts_mut(mem.cast::(), len) as *mut RcInner<[T]>, - ) - } - } - - /// Copy elements from slice into newly allocated `Rc<[T]>` - /// - /// Unsafe because the caller must either take ownership or bind `T: Copy` - #[cfg(not(no_global_oom_handling))] - unsafe fn copy_from_slice(v: &[T]) -> Rc<[T]> { - unsafe { - let ptr = Self::allocate_for_slice(v.len()); - ptr::copy_nonoverlapping(v.as_ptr(), (&raw mut (*ptr).value) as *mut T, v.len()); - Self::from_ptr(ptr) - } - } - - /// Constructs an `Rc<[T]>` from an iterator known to be of a certain size. - /// - /// Behavior is undefined should the size be wrong. - #[cfg(not(no_global_oom_handling))] - unsafe fn from_iter_exact(iter: impl Iterator, len: usize) -> Rc<[T]> { - // Panic guard while cloning T elements. - // In the event of a panic, elements that have been written - // into the new RcInner will be dropped, then the memory freed. - struct Guard { - mem: NonNull, - elems: *mut T, - layout: Layout, - n_elems: usize, - } - - impl Drop for Guard { - fn drop(&mut self) { - unsafe { - let slice = from_raw_parts_mut(self.elems, self.n_elems); - ptr::drop_in_place(slice); - - Global.deallocate(self.mem, self.layout); - } - } - } - - unsafe { - let ptr = Self::allocate_for_slice(len); - - let mem = ptr as *mut _ as *mut u8; - let layout = Layout::for_value_raw(ptr); - - // Pointer to first element - let elems = (&raw mut (*ptr).value) as *mut T; - - let mut guard = Guard { mem: NonNull::new_unchecked(mem), elems, layout, n_elems: 0 }; - - for (i, item) in iter.enumerate() { - ptr::write(elems.add(i), item); - guard.n_elems += 1; - } - - // All clear. Forget the guard so it doesn't free the new RcInner. - mem::forget(guard); - - Self::from_ptr(ptr) - } - } -} - -impl Rc<[T], A> { - /// Allocates an `RcInner<[T]>` with the given length. - #[inline] - #[cfg(not(no_global_oom_handling))] - unsafe fn allocate_for_slice_in(len: usize, alloc: &A) -> *mut RcInner<[T]> { - unsafe { - Rc::<[T]>::allocate_for_layout( - Layout::array::(len).unwrap(), - |layout| alloc.allocate(layout), - |mem| ptr::slice_from_raw_parts_mut(mem.cast::(), len) as *mut RcInner<[T]>, - ) - } - } -} - -#[cfg(not(no_global_oom_handling))] -/// Specialization trait used for `From<&[T]>`. -trait RcFromSlice { - fn from_slice(slice: &[T]) -> Self; -} - -#[cfg(not(no_global_oom_handling))] -impl RcFromSlice for Rc<[T]> { - #[inline] - default fn from_slice(v: &[T]) -> Self { - unsafe { Self::from_iter_exact(v.iter().cloned(), v.len()) } - } -} - -#[cfg(not(no_global_oom_handling))] -impl RcFromSlice for Rc<[T]> { - #[inline] - fn from_slice(v: &[T]) -> Self { - unsafe { Rc::copy_from_slice(v) } + Rc { raw_rc: unsafe { Rc::into_raw_rc(self).downcast_unchecked() } } } } @@ -2245,7 +1835,7 @@ impl Deref for Rc { #[inline(always)] fn deref(&self) -> &T { - &self.inner().value + self.raw_rc.as_ref() } } @@ -2298,12 +1888,7 @@ unsafe impl<#[may_dangle] T: ?Sized, A: Allocator> Drop for Rc { /// ``` #[inline] fn drop(&mut self) { - unsafe { - self.inner().dec_strong(); - if self.inner().strong() == 0 { - self.drop_slow(); - } - } + unsafe { self.raw_rc.drop::() }; } } @@ -2325,10 +1910,7 @@ impl Clone for Rc { /// ``` #[inline] fn clone(&self) -> Self { - unsafe { - self.inner().inc_strong(); - Self::from_inner_in(self.ptr, self.alloc.clone()) - } + Self { raw_rc: unsafe { self.raw_rc.clone::() } } } } @@ -2347,16 +1929,7 @@ impl Default for Rc { /// ``` #[inline] fn default() -> Rc { - unsafe { - Self::from_inner( - Box::leak(Box::write(Box::new_uninit(), RcInner { - strong: Cell::new(1), - weak: Cell::new(1), - value: T::default(), - })) - .into(), - ) - } + Self { raw_rc: RawRc::default() } } } @@ -2368,7 +1941,7 @@ impl Default for Rc { /// This may or may not share an allocation with other Rcs on the same thread. #[inline] fn default() -> Self { - Rc::from("") + Self { raw_rc: RawRc::default() } } } @@ -2380,53 +1953,7 @@ impl Default for Rc<[T]> { /// This may or may not share an allocation with other Rcs on the same thread. #[inline] fn default() -> Self { - let arr: [T; 0] = []; - Rc::from(arr) - } -} - -#[stable(feature = "rust1", since = "1.0.0")] -trait RcEqIdent { - fn eq(&self, other: &Rc) -> bool; - fn ne(&self, other: &Rc) -> bool; -} - -#[stable(feature = "rust1", since = "1.0.0")] -impl RcEqIdent for Rc { - #[inline] - default fn eq(&self, other: &Rc) -> bool { - **self == **other - } - - #[inline] - default fn ne(&self, other: &Rc) -> bool { - **self != **other - } -} - -// Hack to allow specializing on `Eq` even though `Eq` has a method. -#[rustc_unsafe_specialization_marker] -pub(crate) trait MarkerEq: PartialEq {} - -impl MarkerEq for T {} - -/// We're doing this specialization here, and not as a more general optimization on `&T`, because it -/// would otherwise add a cost to all equality checks on refs. We assume that `Rc`s are used to -/// store large values, that are slow to clone, but also heavy to check for equality, causing this -/// cost to pay off more easily. It's also more likely to have two `Rc` clones, that point to -/// the same value, than two `&T`s. -/// -/// We can only do this when `T: Eq` as a `PartialEq` might be deliberately irreflexive. -#[stable(feature = "rust1", since = "1.0.0")] -impl RcEqIdent for Rc { - #[inline] - fn eq(&self, other: &Rc) -> bool { - Rc::ptr_eq(self, other) || **self == **other - } - - #[inline] - fn ne(&self, other: &Rc) -> bool { - !Rc::ptr_eq(self, other) && **self != **other + Self { raw_rc: RawRc::default() } } } @@ -2452,7 +1979,7 @@ impl PartialEq for Rc { /// ``` #[inline] fn eq(&self, other: &Rc) -> bool { - RcEqIdent::eq(self, other) + RawRc::eq(&self.raw_rc, &other.raw_rc) } /// Inequality for two `Rc`s. @@ -2474,7 +2001,7 @@ impl PartialEq for Rc { /// ``` #[inline] fn ne(&self, other: &Rc) -> bool { - RcEqIdent::ne(self, other) + RawRc::ne(&self.raw_rc, &other.raw_rc) } } @@ -2499,7 +2026,7 @@ impl PartialOrd for Rc { /// ``` #[inline(always)] fn partial_cmp(&self, other: &Rc) -> Option { - (**self).partial_cmp(&**other) + RawRc::partial_cmp(&self.raw_rc, &other.raw_rc) } /// Less-than comparison for two `Rc`s. @@ -2517,7 +2044,7 @@ impl PartialOrd for Rc { /// ``` #[inline(always)] fn lt(&self, other: &Rc) -> bool { - **self < **other + RawRc::lt(&self.raw_rc, &other.raw_rc) } /// 'Less than or equal to' comparison for two `Rc`s. @@ -2535,7 +2062,7 @@ impl PartialOrd for Rc { /// ``` #[inline(always)] fn le(&self, other: &Rc) -> bool { - **self <= **other + RawRc::le(&self.raw_rc, &other.raw_rc) } /// Greater-than comparison for two `Rc`s. @@ -2553,7 +2080,7 @@ impl PartialOrd for Rc { /// ``` #[inline(always)] fn gt(&self, other: &Rc) -> bool { - **self > **other + RawRc::gt(&self.raw_rc, &other.raw_rc) } /// 'Greater than or equal to' comparison for two `Rc`s. @@ -2571,7 +2098,7 @@ impl PartialOrd for Rc { /// ``` #[inline(always)] fn ge(&self, other: &Rc) -> bool { - **self >= **other + RawRc::ge(&self.raw_rc, &other.raw_rc) } } @@ -2593,35 +2120,35 @@ impl Ord for Rc { /// ``` #[inline] fn cmp(&self, other: &Rc) -> Ordering { - (**self).cmp(&**other) + RawRc::cmp(&self.raw_rc, &other.raw_rc) } } #[stable(feature = "rust1", since = "1.0.0")] impl Hash for Rc { fn hash(&self, state: &mut H) { - (**self).hash(state); + RawRc::hash(&self.raw_rc, state) } } #[stable(feature = "rust1", since = "1.0.0")] impl fmt::Display for Rc { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - fmt::Display::fmt(&**self, f) + fmt::Display::fmt(&self.raw_rc, f) } } #[stable(feature = "rust1", since = "1.0.0")] impl fmt::Debug for Rc { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - fmt::Debug::fmt(&**self, f) + fmt::Debug::fmt(&self.raw_rc, f) } } #[stable(feature = "rust1", since = "1.0.0")] impl fmt::Pointer for Rc { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - fmt::Pointer::fmt(&(&raw const **self), f) + fmt::Pointer::fmt(&self.raw_rc, f) } } @@ -2642,7 +2169,7 @@ impl From for Rc { /// assert_eq!(Rc::from(x), rc); /// ``` fn from(t: T) -> Self { - Rc::new(t) + Self { raw_rc: RawRc::from(t) } } } @@ -2663,7 +2190,7 @@ impl From<[T; N]> for Rc<[T]> { /// ``` #[inline] fn from(v: [T; N]) -> Rc<[T]> { - Rc::<[T; N]>::from(v) + Self { raw_rc: RawRc::from(v) } } } @@ -2682,7 +2209,7 @@ impl From<&[T]> for Rc<[T]> { /// ``` #[inline] fn from(v: &[T]) -> Rc<[T]> { - >::from_slice(v) + Self { raw_rc: RawRc::from(v) } } } @@ -2702,7 +2229,7 @@ impl From<&mut [T]> for Rc<[T]> { /// ``` #[inline] fn from(v: &mut [T]) -> Rc<[T]> { - Rc::from(&*v) + Self { raw_rc: RawRc::from(v) } } } @@ -2720,8 +2247,7 @@ impl From<&str> for Rc { /// ``` #[inline] fn from(v: &str) -> Rc { - let rc = Rc::<[u8]>::from(v.as_bytes()); - unsafe { Rc::from_raw(Rc::into_raw(rc) as *const str) } + Self { raw_rc: RawRc::from(v) } } } @@ -2741,7 +2267,7 @@ impl From<&mut str> for Rc { /// ``` #[inline] fn from(v: &mut str) -> Rc { - Rc::from(&*v) + Self { raw_rc: RawRc::from(v) } } } @@ -2760,7 +2286,7 @@ impl From for Rc { /// ``` #[inline] fn from(v: String) -> Rc { - Rc::from(&v[..]) + Self { raw_rc: RawRc::from(v) } } } @@ -2779,7 +2305,7 @@ impl From> for Rc { /// ``` #[inline] fn from(v: Box) -> Rc { - Rc::from_box_in(v) + Self { raw_rc: RawRc::from(v) } } } @@ -2798,18 +2324,7 @@ impl From> for Rc<[T], A> { /// ``` #[inline] fn from(v: Vec) -> Rc<[T], A> { - unsafe { - let (vec_ptr, len, cap, alloc) = v.into_raw_parts_with_alloc(); - - let rc_ptr = Self::allocate_for_slice_in(len, &alloc); - ptr::copy_nonoverlapping(vec_ptr, (&raw mut (*rc_ptr).value) as *mut T, len); - - // Create a `Vec` with length 0, to deallocate the buffer - // without dropping its contents or the allocator - let _ = Vec::from_raw_parts_in(vec_ptr, 0, cap, &alloc); - - Self::from_ptr_in(rc_ptr, alloc) - } + Self { raw_rc: RawRc::from(v) } } } @@ -2854,8 +2369,7 @@ impl From> for Rc<[u8]> { /// ``` #[inline] fn from(rc: Rc) -> Self { - // SAFETY: `str` has the same layout as `[u8]`. - unsafe { Rc::from_raw(Rc::into_raw(rc) as *const [u8]) } + Self { raw_rc: RawRc::from(Rc::into_raw_rc(rc)) } } } @@ -2864,11 +2378,9 @@ impl TryFrom> for Rc<[T; N], A> { type Error = Rc<[T], A>; fn try_from(boxed_slice: Rc<[T], A>) -> Result { - if boxed_slice.len() == N { - let (ptr, alloc) = Rc::into_inner_with_allocator(boxed_slice); - Ok(unsafe { Rc::from_inner_in(ptr.cast(), alloc) }) - } else { - Err(boxed_slice) + match RawRc::try_from(Rc::into_raw_rc(boxed_slice)) { + Ok(raw_rc) => Ok(Self { raw_rc }), + Err(raw_rc) => Err(Rc { raw_rc }), } } } @@ -2915,47 +2427,7 @@ impl FromIterator for Rc<[T]> { /// # assert_eq!(&*evens, &*(0..10).collect::>()); /// ``` fn from_iter>(iter: I) -> Self { - ToRcSlice::to_rc_slice(iter.into_iter()) - } -} - -/// Specialization trait used for collecting into `Rc<[T]>`. -#[cfg(not(no_global_oom_handling))] -trait ToRcSlice: Iterator + Sized { - fn to_rc_slice(self) -> Rc<[T]>; -} - -#[cfg(not(no_global_oom_handling))] -impl> ToRcSlice for I { - default fn to_rc_slice(self) -> Rc<[T]> { - self.collect::>().into() - } -} - -#[cfg(not(no_global_oom_handling))] -impl> ToRcSlice for I { - fn to_rc_slice(self) -> Rc<[T]> { - // This is the case for a `TrustedLen` iterator. - let (low, high) = self.size_hint(); - if let Some(high) = high { - debug_assert_eq!( - low, - high, - "TrustedLen iterator's size hint is not exact: {:?}", - (low, high) - ); - - unsafe { - // SAFETY: We need to ensure that the iterator has an exact length and we have. - Rc::from_iter_exact(self, low) - } - } else { - // TrustedLen contract guarantees that `upper_bound == None` implies an iterator - // length exceeding `usize::MAX`. - // The default implementation would collect into a vec which would panic. - // Thus we panic here immediately without invoking `Vec` code. - panic!("capacity overflow"); - } + Self { raw_rc: RawRc::from_iter(iter) } } } @@ -2983,18 +2455,12 @@ impl> ToRcSlice for I { /// [`upgrade`]: Weak::upgrade #[stable(feature = "rc_weak", since = "1.4.0")] #[cfg_attr(not(test), rustc_diagnostic_item = "RcWeak")] +#[repr(transparent)] pub struct Weak< T: ?Sized, #[unstable(feature = "allocator_api", issue = "32838")] A: Allocator = Global, > { - // This is a `NonNull` to allow optimizing the size of this type in enums, - // but it is not necessarily a valid pointer. - // `Weak::new` sets this to `usize::MAX` so that it doesn’t need - // to allocate space on the heap. That's not a value a real pointer - // will ever have because RcInner has alignment at least 2. - // This is only possible when `T: Sized`; unsized `T` never dangle. - ptr: NonNull>, - alloc: A, + raw_weak: RawWeak, } #[stable(feature = "rc_weak", since = "1.4.0")] @@ -3027,12 +2493,7 @@ impl Weak { #[rustc_const_stable(feature = "const_weak_new", since = "1.73.0")] #[must_use] pub const fn new() -> Weak { - Weak { - ptr: unsafe { - NonNull::new_unchecked(ptr::without_provenance_mut::>(usize::MAX)) - }, - alloc: Global, - } + Self { raw_weak: RawWeak::new_dangling_in(Global) } } } @@ -3054,26 +2515,10 @@ impl Weak { #[inline] #[unstable(feature = "allocator_api", issue = "32838")] pub fn new_in(alloc: A) -> Weak { - Weak { - ptr: unsafe { - NonNull::new_unchecked(ptr::without_provenance_mut::>(usize::MAX)) - }, - alloc, - } + Self { raw_weak: RawWeak::new_dangling_in(alloc) } } } -pub(crate) fn is_dangling(ptr: *const T) -> bool { - (ptr.cast::<()>()).addr() == usize::MAX -} - -/// Helper type to allow accessing the reference counts without -/// making any assertions about the data field. -struct WeakInner<'a> { - weak: &'a Cell, - strong: &'a Cell, -} - impl Weak { /// Converts a raw pointer previously created by [`into_raw`] back into `Weak`. /// @@ -3120,16 +2565,20 @@ impl Weak { #[inline] #[stable(feature = "weak_into_raw", since = "1.45.0")] pub unsafe fn from_raw(ptr: *const T) -> Self { - unsafe { Self::from_raw_in(ptr, Global) } + Self { raw_weak: unsafe { RawWeak::from_raw(NonNull::new_unchecked(ptr.cast_mut())) } } } } impl Weak { + fn into_raw_weak(self) -> RawWeak { + unsafe { ptr::read(&ManuallyDrop::new(self).raw_weak) } + } + /// Returns a reference to the underlying allocator. #[inline] #[unstable(feature = "allocator_api", issue = "32838")] pub fn allocator(&self) -> &A { - &self.alloc + self.raw_weak.allocator() } /// Returns a raw pointer to the object `T` pointed to by this `Weak`. @@ -3160,18 +2609,7 @@ impl Weak { #[must_use] #[stable(feature = "rc_as_ptr", since = "1.45.0")] pub fn as_ptr(&self) -> *const T { - let ptr: *mut RcInner = NonNull::as_ptr(self.ptr); - - if is_dangling(ptr) { - // If the pointer is dangling, we return the sentinel directly. This cannot be - // a valid payload address, as the payload is at least as aligned as RcInner (usize). - ptr as *const T - } else { - // SAFETY: if is_dangling returns false, then the pointer is dereferenceable. - // The payload may be dropped at this point, and we have to maintain provenance, - // so use raw pointer manipulation. - unsafe { &raw mut (*ptr).value } - } + self.raw_weak.as_ptr().as_ptr() } /// Consumes the `Weak` and turns it into a raw pointer. @@ -3204,7 +2642,7 @@ impl Weak { #[must_use = "losing the pointer will leak memory"] #[stable(feature = "weak_into_raw", since = "1.45.0")] pub fn into_raw(self) -> *const T { - mem::ManuallyDrop::new(self).as_ptr() + self.into_raw_weak().into_raw().as_ptr() } /// Consumes the `Weak`, returning the wrapped pointer and allocator. @@ -3240,11 +2678,9 @@ impl Weak { #[inline] #[unstable(feature = "allocator_api", issue = "32838")] pub fn into_raw_with_allocator(self) -> (*const T, A) { - let this = mem::ManuallyDrop::new(self); - let result = this.as_ptr(); - // Safety: `this` is ManuallyDrop so the allocator will not be double-dropped - let alloc = unsafe { ptr::read(&this.alloc) }; - (result, alloc) + let (ptr, alloc) = self.into_raw_weak().into_raw_parts(); + + (ptr.as_ptr(), alloc) } /// Converts a raw pointer previously created by [`into_raw`] back into `Weak`. @@ -3292,22 +2728,11 @@ impl Weak { #[inline] #[unstable(feature = "allocator_api", issue = "32838")] pub unsafe fn from_raw_in(ptr: *const T, alloc: A) -> Self { - // See Weak::as_ptr for context on how the input pointer is derived. - - let ptr = if is_dangling(ptr) { - // This is a dangling Weak. - ptr as *mut RcInner - } else { - // Otherwise, we're guaranteed the pointer came from a nondangling Weak. - // SAFETY: data_offset is safe to call, as ptr references a real (potentially dropped) T. - let offset = unsafe { data_offset(ptr) }; - // Thus, we reverse the offset to get the whole RcInner. - // SAFETY: the pointer originated from a Weak, so this offset is safe. - unsafe { ptr.byte_sub(offset) as *mut RcInner } - }; - - // SAFETY: we now have recovered the original Weak pointer, so can create the Weak. - Weak { ptr: unsafe { NonNull::new_unchecked(ptr) }, alloc } + Self { + raw_weak: unsafe { + RawWeak::from_raw_parts(NonNull::new_unchecked(ptr.cast_mut()), alloc) + }, + } } /// Attempts to upgrade the `Weak` pointer to an [`Rc`], delaying @@ -3340,16 +2765,7 @@ impl Weak { where A: Clone, { - let inner = self.inner()?; - - if inner.strong() == 0 { - None - } else { - unsafe { - inner.inc_strong(); - Some(Rc::from_inner_in(self.ptr, self.alloc.clone())) - } - } + self.raw_weak.upgrade::().map(|raw_rc| Rc { raw_rc }) } /// Gets the number of strong (`Rc`) pointers pointing to this allocation. @@ -3358,7 +2774,7 @@ impl Weak { #[must_use] #[stable(feature = "weak_counts", since = "1.41.0")] pub fn strong_count(&self) -> usize { - if let Some(inner) = self.inner() { inner.strong() } else { 0 } + self.raw_weak.strong_count().map_or(0, |count| unsafe { *count.get() }) } /// Gets the number of `Weak` pointers pointing to this allocation. @@ -3367,32 +2783,7 @@ impl Weak { #[must_use] #[stable(feature = "weak_counts", since = "1.41.0")] pub fn weak_count(&self) -> usize { - if let Some(inner) = self.inner() { - if inner.strong() > 0 { - inner.weak() - 1 // subtract the implicit weak ptr - } else { - 0 - } - } else { - 0 - } - } - - /// Returns `None` when the pointer is dangling and there is no allocated `RcInner`, - /// (i.e., when this `Weak` was created by `Weak::new`). - #[inline] - fn inner(&self) -> Option> { - if is_dangling(self.ptr.as_ptr()) { - None - } else { - // We are careful to *not* create a reference covering the "data" field, as - // the field may be mutated concurrently (for example, if the last `Rc` - // is dropped, the data field will be dropped in-place). - Some(unsafe { - let ptr = self.ptr.as_ptr(); - WeakInner { strong: &(*ptr).strong, weak: &(*ptr).weak } - }) - } + self.raw_weak.weak_count().map_or(0, |count| unsafe { *count.get() } - 1) } /// Returns `true` if the two `Weak`s point to the same allocation similar to [`ptr::eq`], or if @@ -3438,7 +2829,7 @@ impl Weak { #[must_use] #[stable(feature = "weak_ptr_eq", since = "1.39.0")] pub fn ptr_eq(&self, other: &Self) -> bool { - ptr::addr_eq(self.ptr.as_ptr(), other.ptr.as_ptr()) + RawWeak::ptr_eq(&self.raw_weak, &other.raw_weak) } } @@ -3469,16 +2860,7 @@ unsafe impl<#[may_dangle] T: ?Sized, A: Allocator> Drop for Weak { /// assert!(other_weak_foo.upgrade().is_none()); /// ``` fn drop(&mut self) { - let inner = if let Some(inner) = self.inner() { inner } else { return }; - - inner.dec_weak(); - // the weak count starts at 1, and will only go to zero if all - // the strong pointers have disappeared. - if inner.weak() == 0 { - unsafe { - self.alloc.deallocate(self.ptr.cast(), Layout::for_value_raw(self.ptr.as_ptr())); - } - } + unsafe { self.raw_weak.drop::() }; } } @@ -3497,17 +2879,14 @@ impl Clone for Weak { /// ``` #[inline] fn clone(&self) -> Weak { - if let Some(inner) = self.inner() { - inner.inc_weak() - } - Weak { ptr: self.ptr, alloc: self.alloc.clone() } + Self { raw_weak: unsafe { self.raw_weak.clone::() } } } } #[stable(feature = "rc_weak", since = "1.4.0")] impl fmt::Debug for Weak { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - write!(f, "(Weak)") + fmt::Debug::fmt(&self.raw_weak, f) } } @@ -3527,154 +2906,27 @@ impl Default for Weak { /// assert!(empty.upgrade().is_none()); /// ``` fn default() -> Weak { - Weak::new() - } -} - -// NOTE: We checked_add here to deal with mem::forget safely. In particular -// if you mem::forget Rcs (or Weaks), the ref-count can overflow, and then -// you can free the allocation while outstanding Rcs (or Weaks) exist. -// We abort because this is such a degenerate scenario that we don't care about -// what happens -- no real program should ever experience this. -// -// This should have negligible overhead since you don't actually need to -// clone these much in Rust thanks to ownership and move-semantics. - -#[doc(hidden)] -trait RcInnerPtr { - fn weak_ref(&self) -> &Cell; - fn strong_ref(&self) -> &Cell; - - #[inline] - fn strong(&self) -> usize { - self.strong_ref().get() - } - - #[inline] - fn inc_strong(&self) { - let strong = self.strong(); - - // We insert an `assume` here to hint LLVM at an otherwise - // missed optimization. - // SAFETY: The reference count will never be zero when this is - // called. - unsafe { - hint::assert_unchecked(strong != 0); - } - - let strong = strong.wrapping_add(1); - self.strong_ref().set(strong); - - // We want to abort on overflow instead of dropping the value. - // Checking for overflow after the store instead of before - // allows for slightly better code generation. - if core::intrinsics::unlikely(strong == 0) { - abort(); - } - } - - #[inline] - fn dec_strong(&self) { - self.strong_ref().set(self.strong() - 1); - } - - #[inline] - fn weak(&self) -> usize { - self.weak_ref().get() - } - - #[inline] - fn inc_weak(&self) { - let weak = self.weak(); - - // We insert an `assume` here to hint LLVM at an otherwise - // missed optimization. - // SAFETY: The reference count will never be zero when this is - // called. - unsafe { - hint::assert_unchecked(weak != 0); - } - - let weak = weak.wrapping_add(1); - self.weak_ref().set(weak); - - // We want to abort on overflow instead of dropping the value. - // Checking for overflow after the store instead of before - // allows for slightly better code generation. - if core::intrinsics::unlikely(weak == 0) { - abort(); - } - } - - #[inline] - fn dec_weak(&self) { - self.weak_ref().set(self.weak() - 1); - } -} - -impl RcInnerPtr for RcInner { - #[inline(always)] - fn weak_ref(&self) -> &Cell { - &self.weak - } - - #[inline(always)] - fn strong_ref(&self) -> &Cell { - &self.strong - } -} - -impl<'a> RcInnerPtr for WeakInner<'a> { - #[inline(always)] - fn weak_ref(&self) -> &Cell { - self.weak - } - - #[inline(always)] - fn strong_ref(&self) -> &Cell { - self.strong + Self { raw_weak: RawWeak::default() } } } #[stable(feature = "rust1", since = "1.0.0")] impl borrow::Borrow for Rc { fn borrow(&self) -> &T { - &**self + self.raw_rc.as_ref() } } #[stable(since = "1.5.0", feature = "smart_ptr_as_ref")] impl AsRef for Rc { fn as_ref(&self) -> &T { - &**self + self.raw_rc.as_ref() } } #[stable(feature = "pin", since = "1.33.0")] impl Unpin for Rc {} -/// Gets the offset within an `RcInner` for the payload behind a pointer. -/// -/// # Safety -/// -/// The pointer must point to (and have valid metadata for) a previously -/// valid instance of T, but the T is allowed to be dropped. -unsafe fn data_offset(ptr: *const T) -> usize { - // Align the unsized value to the end of the RcInner. - // Because RcInner is repr(C), it will always be the last field in memory. - // SAFETY: since the only unsized types possible are slices, trait objects, - // and extern types, the input safety requirement is currently enough to - // satisfy the requirements of align_of_val_raw; this is an implementation - // detail of the language that must not be relied upon outside of std. - unsafe { data_offset_align(align_of_val_raw(ptr)) } -} - -#[inline] -fn data_offset_align(align: usize) -> usize { - let layout = Layout::new::>(); - layout.size() + layout.padding_needed_for(align) -} - /// A uniquely owned [`Rc`]. /// /// This represents an `Rc` that is known to be uniquely owned -- that is, have exactly one strong @@ -3712,13 +2964,12 @@ fn data_offset_align(align: usize) -> usize { /// previous example, `UniqueRc` allows for more flexibility in the construction of cyclic data, /// including fallible or async constructors. #[unstable(feature = "unique_rc_arc", issue = "112566")] +#[repr(transparent)] pub struct UniqueRc< T: ?Sized, #[unstable(feature = "allocator_api", issue = "32838")] A: Allocator = Global, > { - ptr: NonNull>, - phantom: PhantomData>, - alloc: A, + raw_unique_rc: RawUniqueRc, } // Not necessary for correctness since `UniqueRc` contains `NonNull`, @@ -3979,7 +3230,7 @@ impl UniqueRc { #[cfg(not(no_global_oom_handling))] #[unstable(feature = "unique_rc_arc", issue = "112566")] pub fn new(value: T) -> Self { - Self::new_in(value, Global) + Self { raw_unique_rc: RawUniqueRc::new(value) } } } @@ -3993,17 +3244,7 @@ impl UniqueRc { #[cfg(not(no_global_oom_handling))] #[unstable(feature = "unique_rc_arc", issue = "112566")] pub fn new_in(value: T, alloc: A) -> Self { - let (ptr, alloc) = Box::into_unique(Box::new_in( - RcInner { - strong: Cell::new(0), - // keep one weak reference so if all the weak pointers that are created are dropped - // the UniqueRc still stays valid. - weak: Cell::new(1), - value, - }, - alloc, - )); - Self { ptr: ptr.into(), phantom: PhantomData, alloc } + Self { raw_unique_rc: RawUniqueRc::new_in(value, alloc) } } } @@ -4017,18 +3258,8 @@ impl UniqueRc { /// references. #[unstable(feature = "unique_rc_arc", issue = "112566")] pub fn into_rc(this: Self) -> Rc { - let mut this = ManuallyDrop::new(this); - - // Move the allocator out. - // SAFETY: `this.alloc` will not be accessed again, nor dropped because it is in - // a `ManuallyDrop`. - let alloc: A = unsafe { ptr::read(&this.alloc) }; - - // SAFETY: This pointer was allocated at creation time so we know it is valid. - unsafe { - // Convert our weak reference into a strong reference - this.ptr.as_mut().strong.set(1); - Rc::from_inner_in(this.ptr, alloc) + Rc { + raw_rc: unsafe { ptr::read(&ManuallyDrop::new(this).raw_unique_rc).into_rc::() }, } } } @@ -4040,12 +3271,7 @@ impl UniqueRc { /// to a [`Rc`] using [`UniqueRc::into_rc`]. #[unstable(feature = "unique_rc_arc", issue = "112566")] pub fn downgrade(this: &Self) -> Weak { - // SAFETY: This pointer was allocated at creation time and we guarantee that we only have - // one strong reference before converting to a regular Rc. - unsafe { - this.ptr.as_ref().inc_weak(); - } - Weak { ptr: this.ptr, alloc: this.alloc.clone() } + Weak { raw_weak: unsafe { this.raw_unique_rc.downgrade::() } } } } @@ -4054,99 +3280,20 @@ impl Deref for UniqueRc { type Target = T; fn deref(&self) -> &T { - // SAFETY: This pointer was allocated at creation time so we know it is valid. - unsafe { &self.ptr.as_ref().value } + self.raw_unique_rc.as_ref() } } #[unstable(feature = "unique_rc_arc", issue = "112566")] impl DerefMut for UniqueRc { fn deref_mut(&mut self) -> &mut T { - // SAFETY: This pointer was allocated at creation time so we know it is valid. We know we - // have unique ownership and therefore it's safe to make a mutable reference because - // `UniqueRc` owns the only strong reference to itself. - unsafe { &mut (*self.ptr.as_ptr()).value } + self.raw_unique_rc.as_mut() } } #[unstable(feature = "unique_rc_arc", issue = "112566")] unsafe impl<#[may_dangle] T: ?Sized, A: Allocator> Drop for UniqueRc { fn drop(&mut self) { - unsafe { - // destroy the contained object - drop_in_place(DerefMut::deref_mut(self)); - - // remove the implicit "strong weak" pointer now that we've destroyed the contents. - self.ptr.as_ref().dec_weak(); - - if self.ptr.as_ref().weak() == 0 { - self.alloc.deallocate(self.ptr.cast(), Layout::for_value_raw(self.ptr.as_ptr())); - } - } - } -} - -/// A unique owning pointer to a [`RcInner`] **that does not imply the contents are initialized,** -/// but will deallocate it (without dropping the value) when dropped. -/// -/// This is a helper for [`Rc::make_mut()`] to ensure correct cleanup on panic. -/// It is nearly a duplicate of `UniqueRc, A>` except that it allows `T: !Sized`, -/// which `MaybeUninit` does not. -#[cfg(not(no_global_oom_handling))] -struct UniqueRcUninit { - ptr: NonNull>, - layout_for_value: Layout, - alloc: Option, -} - -#[cfg(not(no_global_oom_handling))] -impl UniqueRcUninit { - /// Allocates a RcInner with layout suitable to contain `for_value` or a clone of it. - fn new(for_value: &T, alloc: A) -> UniqueRcUninit { - let layout = Layout::for_value(for_value); - let ptr = unsafe { - Rc::allocate_for_layout( - layout, - |layout_for_rc_inner| alloc.allocate(layout_for_rc_inner), - |mem| mem.with_metadata_of(ptr::from_ref(for_value) as *const RcInner), - ) - }; - Self { ptr: NonNull::new(ptr).unwrap(), layout_for_value: layout, alloc: Some(alloc) } - } - - /// Returns the pointer to be written into to initialize the [`Rc`]. - fn data_ptr(&mut self) -> *mut T { - let offset = data_offset_align(self.layout_for_value.align()); - unsafe { self.ptr.as_ptr().byte_add(offset) as *mut T } - } - - /// Upgrade this into a normal [`Rc`]. - /// - /// # Safety - /// - /// The data must have been initialized (by writing to [`Self::data_ptr()`]). - unsafe fn into_rc(self) -> Rc { - let mut this = ManuallyDrop::new(self); - let ptr = this.ptr; - let alloc = this.alloc.take().unwrap(); - - // SAFETY: The pointer is valid as per `UniqueRcUninit::new`, and the caller is responsible - // for having initialized the data. - unsafe { Rc::from_ptr_in(ptr.as_ptr(), alloc) } - } -} - -#[cfg(not(no_global_oom_handling))] -impl Drop for UniqueRcUninit { - fn drop(&mut self) { - // SAFETY: - // * new() produced a pointer safe to deallocate. - // * We own the pointer unless into_rc() was called, which forgets us. - unsafe { - self.alloc.take().unwrap().deallocate( - self.ptr.cast(), - rc_inner_layout_for_value_layout(self.layout_for_value), - ); - } + unsafe { self.raw_unique_rc.drop::() }; } } diff --git a/library/alloc/src/rc/tests.rs b/library/alloc/src/rc/tests.rs index 2210a7c24c06a..a49dcf7eee3e6 100644 --- a/library/alloc/src/rc/tests.rs +++ b/library/alloc/src/rc/tests.rs @@ -61,16 +61,25 @@ fn weak_self_cyclic() { #[test] fn is_unique() { + fn rc_is_unique(rc: &Rc) -> bool { + unsafe { + ::is_unique( + rc.raw_rc.strong_count(), + rc.raw_rc.weak_count(), + ) + } + } + let x = Rc::new(3); - assert!(Rc::is_unique(&x)); + assert!(rc_is_unique(&x)); let y = x.clone(); - assert!(!Rc::is_unique(&x)); + assert!(!rc_is_unique(&x)); drop(y); - assert!(Rc::is_unique(&x)); + assert!(rc_is_unique(&x)); let w = Rc::downgrade(&x); - assert!(!Rc::is_unique(&x)); + assert!(!rc_is_unique(&x)); drop(w); - assert!(Rc::is_unique(&x)); + assert!(rc_is_unique(&x)); } #[test] diff --git a/library/alloc/src/sync.rs b/library/alloc/src/sync.rs index b34a6d3f660c8..b51c7d009b9fd 100644 --- a/library/alloc/src/sync.rs +++ b/library/alloc/src/sync.rs @@ -9,36 +9,46 @@ //! `#[cfg(target_has_atomic = "ptr")]`. use core::any::Any; +use core::cell::UnsafeCell; #[cfg(not(no_global_oom_handling))] use core::clone::CloneToUninit; use core::cmp::Ordering; use core::hash::{Hash, Hasher}; -use core::intrinsics::abort; +use core::marker::Unsize; #[cfg(not(no_global_oom_handling))] -use core::iter; -use core::marker::{PhantomData, Unsize}; -use core::mem::{self, ManuallyDrop, align_of_val_raw}; +use core::mem::MaybeUninit; +use core::mem::{self, ManuallyDrop}; use core::ops::{CoerceUnsized, Deref, DerefPure, DispatchFromDyn, LegacyReceiver}; use core::panic::{RefUnwindSafe, UnwindSafe}; use core::pin::{Pin, PinCoerceUnsized}; use core::ptr::{self, NonNull}; -#[cfg(not(no_global_oom_handling))] -use core::slice::from_raw_parts_mut; -use core::sync::atomic; use core::sync::atomic::Ordering::{Acquire, Relaxed, Release}; -use core::{borrow, fmt, hint}; +use core::sync::atomic::{self, AtomicUsize}; +use core::{borrow, fmt, hint, intrinsics}; #[cfg(not(no_global_oom_handling))] -use crate::alloc::handle_alloc_error; -use crate::alloc::{AllocError, Allocator, Global, Layout}; +use crate::alloc::Layout; +use crate::alloc::{AllocError, Allocator, Global}; use crate::borrow::{Cow, ToOwned}; +#[cfg(not(no_global_oom_handling))] use crate::boxed::Box; -use crate::rc::is_dangling; +#[cfg(not(no_global_oom_handling))] +use crate::raw_rc::RefCounts; +use crate::raw_rc::{self, RawRc, RawWeak}; #[cfg(not(no_global_oom_handling))] use crate::string::String; #[cfg(not(no_global_oom_handling))] use crate::vec::Vec; +#[cfg(not(no_global_oom_handling))] +fn weak_fn_to_raw_weak_fn(f: F) -> impl FnOnce(&RawWeak) -> T +where + F: FnOnce(&Weak) -> T, + A: Allocator, +{ + move |raw_weak: &RawWeak| f(unsafe { mem::transmute(raw_weak) }) +} + /// A soft limit on the amount of references that may be made to an `Arc`. /// /// Going above this limit will abort your program (although not @@ -70,6 +80,206 @@ macro_rules! acquire { }; } +enum RcOps {} + +impl raw_rc::RcOps for RcOps { + unsafe fn increment_ref_count(count: &UnsafeCell) { + let count = unsafe { AtomicUsize::from_ptr(count.get()) }; + + // Using a relaxed ordering is alright here, as knowledge of the + // original reference prevents other threads from erroneously deleting + // the object. + // + // As explained in the [Boost documentation][1], Increasing the + // reference counter can always be done with memory_order_relaxed: New + // references to an object can only be formed from an existing + // reference, and passing an existing reference from one thread to + // another must already provide any required synchronization. + // + // [1]: (www.boost.org/doc/libs/1_55_0/doc/html/atomic/usage_examples.html) + let old_size = count.fetch_add(1, Relaxed); + + // However we need to guard against massive refcounts in case someone is `mem::forget`ing + // Arcs. If we don't do this the count can overflow and users will use-after free. This + // branch will never be taken in any realistic program. We abort because such a program is + // incredibly degenerate, and we don't care to support it. + // + // This check is not 100% water-proof: we error when the refcount grows beyond `isize::MAX`. + // But we do that check *after* having done the increment, so there is a chance here that + // the worst already happened and we actually do overflow the `usize` counter. However, that + // requires the counter to grow from `isize::MAX` to `usize::MAX` between the increment + // above and the `abort` below, which seems exceedingly unlikely. + // + // This is a global invariant, and also applies when using a compare-exchange loop to increment + // counters in other methods. + // Otherwise, the counter could be brought to an almost-overflow using a compare-exchange loop, + // and then overflow using a few `fetch_add`s. + if old_size > MAX_REFCOUNT { + intrinsics::abort(); + } + } + + unsafe fn decrement_ref_count(count: &UnsafeCell) -> bool { + let count = unsafe { AtomicUsize::from_ptr(count.get()) }; + + if count.fetch_sub(1, Release) == 1 { + acquire!(count); + + true + } else { + false + } + } + + unsafe fn upgrade(strong_count: &UnsafeCell) -> bool { + let strong_count = unsafe { AtomicUsize::from_ptr(strong_count.get()) }; + + #[inline] + fn checked_increment(n: usize) -> Option { + // Any write of 0 we can observe leaves the field in permanently zero state. + if n == 0 { + return None; + } + // See comments in `Arc::clone` for why we do this (for `mem::forget`). + assert!(n <= MAX_REFCOUNT, "{}", INTERNAL_OVERFLOW_ERROR); + Some(n + 1) + } + + // We use a CAS loop to increment the strong count instead of a + // fetch_add as this function should never take the reference count + // from zero to one. + // + // Relaxed is fine for the failure case because we don't have any expectations about the new state. + // Acquire is necessary for the success case to synchronise with `Arc::new_cyclic`, when the inner + // value can be initialized after `Weak` references have already been created. In that case, we + // expect to observe the fully initialized value. + strong_count.fetch_update(Acquire, Relaxed, checked_increment).is_ok() + } + + unsafe fn downgrade(weak_count: &UnsafeCell) { + let weak_count = unsafe { AtomicUsize::from_ptr(weak_count.get()) }; + + // This Relaxed is OK because we're checking the value in the CAS + // below. + let mut cur = weak_count.load(Relaxed); + + loop { + // check if the weak counter is currently "locked"; if so, spin. + if cur == usize::MAX { + hint::spin_loop(); + cur = weak_count.load(Relaxed); + + continue; + } + + // We can't allow the refcount to increase much past `MAX_REFCOUNT`. + assert!(cur <= MAX_REFCOUNT, "{}", INTERNAL_OVERFLOW_ERROR); + + // NOTE: this code currently ignores the possibility of overflow + // into usize::MAX; in general both Rc and Arc need to be adjusted + // to deal with overflow. + + // Unlike with Clone(), we need this to be an Acquire read to + // synchronize with the write coming from `is_unique`, so that the + // events prior to that write happen before this read. + match weak_count.compare_exchange_weak(cur, cur + 1, Acquire, Relaxed) { + Ok(_) => break, + Err(old) => cur = old, + } + } + } + + unsafe fn lock_strong_count(strong_count: &UnsafeCell) -> bool { + let strong_count = unsafe { AtomicUsize::from_ptr(strong_count.get()) }; + + match strong_count.compare_exchange(1, 0, Relaxed, Relaxed) { + Ok(_) => { + acquire!(strong_count); + + true + } + Err(_) => false, + } + } + + unsafe fn unlock_strong_count(strong_count: &UnsafeCell) { + let strong_count = unsafe { AtomicUsize::from_ptr(strong_count.get()) }; + + strong_count.store(1, Release); + } + + unsafe fn is_unique(strong_count: &UnsafeCell, weak_count: &UnsafeCell) -> bool { + let strong_count = unsafe { AtomicUsize::from_ptr(strong_count.get()) }; + let weak_count = unsafe { AtomicUsize::from_ptr(weak_count.get()) }; + + // lock the weak pointer count if we appear to be the sole weak pointer + // holder. + // + // The acquire label here ensures a happens-before relationship with any + // writes to `strong` (in particular in `Weak::upgrade`) prior to decrements + // of the `weak` count (via `Weak::drop`, which uses release). If the upgraded + // weak ref was never dropped, the CAS here will fail so we do not care to synchronize. + if weak_count.compare_exchange(1, usize::MAX, Acquire, Relaxed).is_ok() { + // This needs to be an `Acquire` to synchronize with the decrement of the `strong` + // counter in `drop` -- the only access that happens when any but the last reference + // is being dropped. + let unique = strong_count.load(Acquire) == 1; + + // The release write here synchronizes with a read in `downgrade`, + // effectively preventing the above read of `strong` from happening + // after the write. + weak_count.store(1, Release); // release the lock + unique + } else { + false + } + } + + #[cfg(not(no_global_oom_handling))] + unsafe fn make_unique(rc: &mut raw_rc::RawRc, by_clone: F, by_move: G) + where + T: ?Sized, + F: FnOnce(&mut raw_rc::RawRc), + G: FnOnce(&mut raw_rc::RawRc), + { + let ref_counts = rc.ref_counts(); + let strong_count = unsafe { AtomicUsize::from_ptr(ref_counts.strong.get()) }; + let weak_count = unsafe { AtomicUsize::from_ptr(ref_counts.weak.get()) }; + + // Note that we hold both a strong reference and a weak reference. + // Thus, releasing our strong reference only will not, by itself, cause + // the memory to be deallocated. + // + // Use Acquire to ensure that we see any writes to `weak` that happen + // before release writes (i.e., decrements) to `strong`. Since we hold a + // weak count, there's no chance the ArcInner itself could be + // deallocated. + if strong_count.compare_exchange(1, 0, Acquire, Relaxed).is_ok() { + if weak_count.load(Relaxed) == 1 { + // We were the sole reference of either kind; bump back up the + // strong ref count. + strong_count.store(1, Release); + } else { + // Relaxed suffices in the above because this is fundamentally an + // optimization: we are always racing with weak pointers being + // dropped. Worst case, we end up allocated a new Arc unnecessarily. + + // We removed the last strong ref, but there are additional weak + // refs remaining. We'll move the contents to a new Arc, and + // invalidate the other weak refs. + + // Note that it is not possible for the read of `weak` to yield + // usize::MAX (i.e., locked), since the weak count can only be + // locked by a thread with a strong reference. + + by_move(rc); + } + } else { + by_clone(rc); + } + } +} + /// A thread-safe reference-counting pointer. 'Arc' stands for 'Atomically /// Reference Counted'. /// @@ -240,9 +450,7 @@ pub struct Arc< T: ?Sized, #[unstable(feature = "allocator_api", issue = "32838")] A: Allocator = Global, > { - ptr: NonNull>, - phantom: PhantomData>, - alloc: A, + raw_rc: RawRc, } #[stable(feature = "rust1", since = "1.0.0")] @@ -259,34 +467,6 @@ impl, U: ?Sized, A: Allocator> CoerceUnsized> fo #[unstable(feature = "dispatch_from_dyn", issue = "none")] impl, U: ?Sized> DispatchFromDyn> for Arc {} -impl Arc { - unsafe fn from_inner(ptr: NonNull>) -> Self { - unsafe { Self::from_inner_in(ptr, Global) } - } - - unsafe fn from_ptr(ptr: *mut ArcInner) -> Self { - unsafe { Self::from_ptr_in(ptr, Global) } - } -} - -impl Arc { - #[inline] - fn into_inner_with_allocator(this: Self) -> (NonNull>, A) { - let this = mem::ManuallyDrop::new(this); - (this.ptr, unsafe { ptr::read(&this.alloc) }) - } - - #[inline] - unsafe fn from_inner_in(ptr: NonNull>, alloc: A) -> Self { - Self { ptr, phantom: PhantomData, alloc } - } - - #[inline] - unsafe fn from_ptr_in(ptr: *mut ArcInner, alloc: A) -> Self { - unsafe { Self::from_inner_in(NonNull::new_unchecked(ptr), alloc) } - } -} - /// `Weak` is a version of [`Arc`] that holds a non-owning reference to the /// managed allocation. /// @@ -315,14 +495,7 @@ pub struct Weak< T: ?Sized, #[unstable(feature = "allocator_api", issue = "32838")] A: Allocator = Global, > { - // This is a `NonNull` to allow optimizing the size of this type in enums, - // but it is not necessarily a valid pointer. - // `Weak::new` sets this to `usize::MAX` so that it doesn’t need - // to allocate space on the heap. That's not a value a real pointer - // will ever have because RcInner has alignment at least 2. - // This is only possible when `T: Sized`; unsized `T` never dangle. - ptr: NonNull>, - alloc: A, + raw_weak: RawWeak, } #[stable(feature = "arc_weak", since = "1.4.0")] @@ -338,37 +511,10 @@ impl, U: ?Sized> DispatchFromDyn> for Weak {} #[stable(feature = "arc_weak", since = "1.4.0")] impl fmt::Debug for Weak { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - write!(f, "(Weak)") + fmt::Debug::fmt(&self.raw_weak, f) } } -// This is repr(C) to future-proof against possible field-reordering, which -// would interfere with otherwise safe [into|from]_raw() of transmutable -// inner types. -#[repr(C)] -struct ArcInner { - strong: atomic::AtomicUsize, - - // the value usize::MAX acts as a sentinel for temporarily "locking" the - // ability to upgrade weak pointers or downgrade strong ones; this is used - // to avoid races in `make_mut` and `get_mut`. - weak: atomic::AtomicUsize, - - data: T, -} - -/// Calculate layout for `ArcInner` using the inner value's layout -fn arcinner_layout_for_value_layout(layout: Layout) -> Layout { - // Calculate layout using the given value layout. - // Previously, layout was calculated on the expression - // `&*(ptr as *const ArcInner)`, but this created a misaligned - // reference (see #54908). - Layout::new::>().extend(layout).unwrap().0.pad_to_align() -} - -unsafe impl Send for ArcInner {} -unsafe impl Sync for ArcInner {} - impl Arc { /// Constructs a new `Arc`. /// @@ -383,14 +529,7 @@ impl Arc { #[inline] #[stable(feature = "rust1", since = "1.0.0")] pub fn new(data: T) -> Arc { - // Start the weak pointer count as 1 which is the weak pointer that's - // held by all the strong pointers (kinda), see std/rc.rs for more info - let x: Box<_> = Box::new(ArcInner { - strong: atomic::AtomicUsize::new(1), - weak: atomic::AtomicUsize::new(1), - data, - }); - unsafe { Self::from_inner(Box::leak(x).into()) } + Self { raw_rc: RawRc::new(data) } } /// Constructs a new `Arc` while giving you a `Weak` to the allocation, @@ -451,7 +590,7 @@ impl Arc { where F: FnOnce(&Weak) -> T, { - Self::new_cyclic_in(data_fn, Global) + Self { raw_rc: unsafe { RawRc::new_cyclic::<_, RcOps>(weak_fn_to_raw_weak_fn(data_fn)) } } } /// Constructs a new `Arc` with uninitialized contents. @@ -477,13 +616,7 @@ impl Arc { #[stable(feature = "new_uninit", since = "1.82.0")] #[must_use] pub fn new_uninit() -> Arc> { - unsafe { - Arc::from_ptr(Arc::allocate_for_layout( - Layout::new::(), - |layout| Global.allocate(layout), - <*mut u8>::cast, - )) - } + Arc { raw_rc: RawRc::new_uninit() } } /// Constructs a new `Arc` with uninitialized contents, with the memory @@ -511,13 +644,7 @@ impl Arc { #[unstable(feature = "new_zeroed_alloc", issue = "129396")] #[must_use] pub fn new_zeroed() -> Arc> { - unsafe { - Arc::from_ptr(Arc::allocate_for_layout( - Layout::new::(), - |layout| Global.allocate_zeroed(layout), - <*mut u8>::cast, - )) - } + Arc { raw_rc: RawRc::new_zeroed() } } /// Constructs a new `Pin>`. If `T` does not implement `Unpin`, then @@ -550,14 +677,7 @@ impl Arc { #[unstable(feature = "allocator_api", issue = "32838")] #[inline] pub fn try_new(data: T) -> Result, AllocError> { - // Start the weak pointer count as 1 which is the weak pointer that's - // held by all the strong pointers (kinda), see std/rc.rs for more info - let x: Box<_> = Box::try_new(ArcInner { - strong: atomic::AtomicUsize::new(1), - weak: atomic::AtomicUsize::new(1), - data, - })?; - unsafe { Ok(Self::from_inner(Box::leak(x).into())) } + RawRc::try_new(data).map(|raw_rc| Self { raw_rc }) } /// Constructs a new `Arc` with uninitialized contents, returning an error @@ -584,13 +704,7 @@ impl Arc { #[unstable(feature = "allocator_api", issue = "32838")] // #[unstable(feature = "new_uninit", issue = "63291")] pub fn try_new_uninit() -> Result>, AllocError> { - unsafe { - Ok(Arc::from_ptr(Arc::try_allocate_for_layout( - Layout::new::(), - |layout| Global.allocate(layout), - <*mut u8>::cast, - )?)) - } + RawRc::try_new_uninit().map(|raw_rc| Arc { raw_rc }) } /// Constructs a new `Arc` with uninitialized contents, with the memory @@ -617,13 +731,7 @@ impl Arc { #[unstable(feature = "allocator_api", issue = "32838")] // #[unstable(feature = "new_uninit", issue = "63291")] pub fn try_new_zeroed() -> Result>, AllocError> { - unsafe { - Ok(Arc::from_ptr(Arc::try_allocate_for_layout( - Layout::new::(), - |layout| Global.allocate_zeroed(layout), - <*mut u8>::cast, - )?)) - } + RawRc::try_new_zeroed().map(|raw_rc| Arc { raw_rc }) } } @@ -644,18 +752,7 @@ impl Arc { #[cfg(not(no_global_oom_handling))] #[unstable(feature = "allocator_api", issue = "32838")] pub fn new_in(data: T, alloc: A) -> Arc { - // Start the weak pointer count as 1 which is the weak pointer that's - // held by all the strong pointers (kinda), see std/rc.rs for more info - let x = Box::new_in( - ArcInner { - strong: atomic::AtomicUsize::new(1), - weak: atomic::AtomicUsize::new(1), - data, - }, - alloc, - ); - let (ptr, alloc) = Box::into_unique(x); - unsafe { Self::from_inner_in(ptr.into(), alloc) } + Self { raw_rc: RawRc::new_in(data, alloc) } } /// Constructs a new `Arc` with uninitialized contents in the provided allocator. @@ -685,16 +782,7 @@ impl Arc { // #[unstable(feature = "new_uninit", issue = "63291")] #[inline] pub fn new_uninit_in(alloc: A) -> Arc, A> { - unsafe { - Arc::from_ptr_in( - Arc::allocate_for_layout( - Layout::new::(), - |layout| alloc.allocate(layout), - <*mut u8>::cast, - ), - alloc, - ) - } + Arc { raw_rc: RawRc::new_uninit_in(alloc) } } /// Constructs a new `Arc` with uninitialized contents, with the memory @@ -723,16 +811,7 @@ impl Arc { // #[unstable(feature = "new_uninit", issue = "63291")] #[inline] pub fn new_zeroed_in(alloc: A) -> Arc, A> { - unsafe { - Arc::from_ptr_in( - Arc::allocate_for_layout( - Layout::new::(), - |layout| alloc.allocate_zeroed(layout), - <*mut u8>::cast, - ), - alloc, - ) - } + Arc { raw_rc: RawRc::new_zeroed_in(alloc) } } /// Constructs a new `Arc` in the given allocator while giving you a `Weak` to the allocation, @@ -771,60 +850,11 @@ impl Arc { where F: FnOnce(&Weak) -> T, { - // Construct the inner in the "uninitialized" state with a single - // weak reference. - let (uninit_raw_ptr, alloc) = Box::into_raw_with_allocator(Box::new_in( - ArcInner { - strong: atomic::AtomicUsize::new(0), - weak: atomic::AtomicUsize::new(1), - data: mem::MaybeUninit::::uninit(), + Self { + raw_rc: unsafe { + RawRc::new_cyclic_in::<_, RcOps>(weak_fn_to_raw_weak_fn(data_fn), alloc) }, - alloc, - )); - let uninit_ptr: NonNull<_> = (unsafe { &mut *uninit_raw_ptr }).into(); - let init_ptr: NonNull> = uninit_ptr.cast(); - - let weak = Weak { ptr: init_ptr, alloc }; - - // It's important we don't give up ownership of the weak pointer, or - // else the memory might be freed by the time `data_fn` returns. If - // we really wanted to pass ownership, we could create an additional - // weak pointer for ourselves, but this would result in additional - // updates to the weak reference count which might not be necessary - // otherwise. - let data = data_fn(&weak); - - // Now we can properly initialize the inner value and turn our weak - // reference into a strong reference. - let strong = unsafe { - let inner = init_ptr.as_ptr(); - ptr::write(&raw mut (*inner).data, data); - - // The above write to the data field must be visible to any threads which - // observe a non-zero strong count. Therefore we need at least "Release" ordering - // in order to synchronize with the `compare_exchange_weak` in `Weak::upgrade`. - // - // "Acquire" ordering is not required. When considering the possible behaviors - // of `data_fn` we only need to look at what it could do with a reference to a - // non-upgradeable `Weak`: - // - It can *clone* the `Weak`, increasing the weak reference count. - // - It can drop those clones, decreasing the weak reference count (but never to zero). - // - // These side effects do not impact us in any way, and no other side effects are - // possible with safe code alone. - let prev_value = (*inner).strong.fetch_add(1, Release); - debug_assert_eq!(prev_value, 0, "No prior strong references should exist"); - - // Strong references should collectively own a shared weak reference, - // so don't run the destructor for our old weak reference. - // Calling into_raw_with_allocator has the double effect of giving us back the allocator, - // and forgetting the weak reference. - let alloc = weak.into_raw_with_allocator().1; - - Arc::from_inner_in(init_ptr, alloc) - }; - - strong + } } /// Constructs a new `Pin>` in the provided allocator. If `T` does not implement `Unpin`, @@ -867,18 +897,7 @@ impl Arc { #[unstable(feature = "allocator_api", issue = "32838")] #[inline] pub fn try_new_in(data: T, alloc: A) -> Result, AllocError> { - // Start the weak pointer count as 1 which is the weak pointer that's - // held by all the strong pointers (kinda), see std/rc.rs for more info - let x = Box::try_new_in( - ArcInner { - strong: atomic::AtomicUsize::new(1), - weak: atomic::AtomicUsize::new(1), - data, - }, - alloc, - )?; - let (ptr, alloc) = Box::into_unique(x); - Ok(unsafe { Self::from_inner_in(ptr.into(), alloc) }) + RawRc::try_new_in(data, alloc).map(|raw_rc| Self { raw_rc }) } /// Constructs a new `Arc` with uninitialized contents, in the provided allocator, returning an @@ -909,16 +928,7 @@ impl Arc { // #[unstable(feature = "new_uninit", issue = "63291")] #[inline] pub fn try_new_uninit_in(alloc: A) -> Result, A>, AllocError> { - unsafe { - Ok(Arc::from_ptr_in( - Arc::try_allocate_for_layout( - Layout::new::(), - |layout| alloc.allocate(layout), - <*mut u8>::cast, - )?, - alloc, - )) - } + RawRc::try_new_uninit_in(alloc).map(|raw_rc| Arc { raw_rc }) } /// Constructs a new `Arc` with uninitialized contents, with the memory @@ -948,16 +958,7 @@ impl Arc { // #[unstable(feature = "new_uninit", issue = "63291")] #[inline] pub fn try_new_zeroed_in(alloc: A) -> Result, A>, AllocError> { - unsafe { - Ok(Arc::from_ptr_in( - Arc::try_allocate_for_layout( - Layout::new::(), - |layout| alloc.allocate_zeroed(layout), - <*mut u8>::cast, - )?, - alloc, - )) - } + RawRc::try_new_zeroed_in(alloc).map(|raw_rc| Arc { raw_rc }) } /// Returns the inner value, if the `Arc` has exactly one strong reference. /// @@ -995,20 +996,10 @@ impl Arc { #[inline] #[stable(feature = "arc_unique", since = "1.4.0")] pub fn try_unwrap(this: Self) -> Result { - if this.inner().strong.compare_exchange(1, 0, Relaxed, Relaxed).is_err() { - return Err(this); + match unsafe { RawRc::try_unwrap::(Arc::into_raw_rc(this)) } { + Ok(value) => Ok(value), + Err(raw_rc) => Err(Self { raw_rc }), } - - acquire!(this.inner().strong); - - let this = ManuallyDrop::new(this); - let elem: T = unsafe { ptr::read(&this.ptr.as_ref().data) }; - let alloc: A = unsafe { ptr::read(&this.alloc) }; // copy the allocator - - // Make a weak pointer to clean up the implicit strong-weak reference - let _weak = Weak { ptr: this.ptr, alloc }; - - Ok(elem) } /// Returns the inner value, if the `Arc` has exactly one strong reference. @@ -1110,30 +1101,7 @@ impl Arc { #[inline] #[stable(feature = "arc_into_inner", since = "1.70.0")] pub fn into_inner(this: Self) -> Option { - // Make sure that the ordinary `Drop` implementation isn’t called as well - let mut this = mem::ManuallyDrop::new(this); - - // Following the implementation of `drop` and `drop_slow` - if this.inner().strong.fetch_sub(1, Release) != 1 { - return None; - } - - acquire!(this.inner().strong); - - // SAFETY: This mirrors the line - // - // unsafe { ptr::drop_in_place(Self::get_mut_unchecked(self)) }; - // - // in `drop_slow`. Instead of dropping the value behind the pointer, - // it is read and eventually returned; `ptr::read` has the same - // safety conditions as `ptr::drop_in_place`. - - let inner = unsafe { ptr::read(Self::get_mut_unchecked(&mut this)) }; - let alloc = unsafe { ptr::read(&this.alloc) }; - - drop(Weak { ptr: this.ptr, alloc }); - - Some(inner) + unsafe { Arc::into_raw_rc(this).into_inner::() } } } @@ -1164,7 +1132,7 @@ impl Arc<[T]> { #[stable(feature = "new_uninit", since = "1.82.0")] #[must_use] pub fn new_uninit_slice(len: usize) -> Arc<[mem::MaybeUninit]> { - unsafe { Arc::from_ptr(Arc::allocate_for_slice(len)) } + Arc { raw_rc: RawRc::new_uninit_slice(len) } } /// Constructs a new atomically reference-counted slice with uninitialized contents, with the memory being @@ -1192,16 +1160,7 @@ impl Arc<[T]> { #[unstable(feature = "new_zeroed_alloc", issue = "129396")] #[must_use] pub fn new_zeroed_slice(len: usize) -> Arc<[mem::MaybeUninit]> { - unsafe { - Arc::from_ptr(Arc::allocate_for_layout( - Layout::array::(len).unwrap(), - |layout| Global.allocate_zeroed(layout), - |mem| { - ptr::slice_from_raw_parts_mut(mem as *mut T, len) - as *mut ArcInner<[mem::MaybeUninit]> - }, - )) - } + Arc { raw_rc: RawRc::new_zeroed_slice(len) } } /// Converts the reference-counted slice into a reference-counted array. @@ -1255,7 +1214,7 @@ impl Arc<[T], A> { #[unstable(feature = "allocator_api", issue = "32838")] #[inline] pub fn new_uninit_slice_in(len: usize, alloc: A) -> Arc<[mem::MaybeUninit], A> { - unsafe { Arc::from_ptr_in(Arc::allocate_for_slice_in(len, &alloc), alloc) } + Arc { raw_rc: RawRc::new_uninit_slice_in(len, alloc) } } /// Constructs a new atomically reference-counted slice with uninitialized contents, with the memory being @@ -1283,19 +1242,7 @@ impl Arc<[T], A> { #[unstable(feature = "allocator_api", issue = "32838")] #[inline] pub fn new_zeroed_slice_in(len: usize, alloc: A) -> Arc<[mem::MaybeUninit], A> { - unsafe { - Arc::from_ptr_in( - Arc::allocate_for_layout( - Layout::array::(len).unwrap(), - |layout| alloc.allocate_zeroed(layout), - |mem| { - ptr::slice_from_raw_parts_mut(mem.cast::(), len) - as *mut ArcInner<[mem::MaybeUninit]> - }, - ), - alloc, - ) - } + Arc { raw_rc: RawRc::new_zeroed_slice_in(len, alloc) } } } @@ -1332,8 +1279,7 @@ impl Arc, A> { #[must_use = "`self` will be dropped if the result is not used"] #[inline] pub unsafe fn assume_init(self) -> Arc { - let (ptr, alloc) = Arc::into_inner_with_allocator(self); - unsafe { Arc::from_inner_in(ptr.cast(), alloc) } + Arc { raw_rc: unsafe { Arc::into_raw_rc(self).assume_init() } } } } @@ -1373,8 +1319,7 @@ impl Arc<[mem::MaybeUninit], A> { #[must_use = "`self` will be dropped if the result is not used"] #[inline] pub unsafe fn assume_init(self) -> Arc<[T], A> { - let (ptr, alloc) = Arc::into_inner_with_allocator(self); - unsafe { Arc::from_ptr_in(ptr.as_ptr() as _, alloc) } + Arc { raw_rc: unsafe { Arc::into_raw_rc(self).assume_init() } } } } @@ -1441,7 +1386,7 @@ impl Arc { #[inline] #[stable(feature = "rc_raw", since = "1.17.0")] pub unsafe fn from_raw(ptr: *const T) -> Self { - unsafe { Arc::from_raw_in(ptr, Global) } + unsafe { Self { raw_rc: RawRc::from_raw(NonNull::new_unchecked(ptr.cast_mut())) } } } /// Increments the strong reference count on the `Arc` associated with the @@ -1475,7 +1420,11 @@ impl Arc { #[inline] #[stable(feature = "arc_mutate_strong_count", since = "1.51.0")] pub unsafe fn increment_strong_count(ptr: *const T) { - unsafe { Arc::increment_strong_count_in(ptr, Global) } + unsafe { + RawRc::::increment_strong_count::(NonNull::new_unchecked( + ptr.cast_mut(), + )) + }; } /// Decrements the strong reference count on the `Arc` associated with the @@ -1511,11 +1460,27 @@ impl Arc { #[inline] #[stable(feature = "arc_mutate_strong_count", since = "1.51.0")] pub unsafe fn decrement_strong_count(ptr: *const T) { - unsafe { Arc::decrement_strong_count_in(ptr, Global) } + unsafe { + RawRc::::decrement_strong_count::(NonNull::new_unchecked( + ptr.cast_mut(), + )) + }; } } impl Arc { + fn into_raw_rc(this: Arc) -> RawRc { + unsafe { ptr::read(&ManuallyDrop::new(this).raw_rc) } + } + + fn raw_strong_count(&self) -> &AtomicUsize { + unsafe { AtomicUsize::from_ptr(self.raw_rc.strong_count().get()) } + } + + fn raw_weak_count(&self) -> &AtomicUsize { + unsafe { AtomicUsize::from_ptr(self.raw_rc.weak_count().get()) } + } + /// Returns a reference to the underlying allocator. /// /// Note: this is an associated function, which means that you have @@ -1524,7 +1489,7 @@ impl Arc { #[inline] #[unstable(feature = "allocator_api", issue = "32838")] pub fn allocator(this: &Self) -> &A { - &this.alloc + this.raw_rc.allocator() } /// Consumes the `Arc`, returning the wrapped pointer. @@ -1547,8 +1512,7 @@ impl Arc { #[stable(feature = "rc_raw", since = "1.17.0")] #[rustc_never_returns_null_ptr] pub fn into_raw(this: Self) -> *const T { - let this = ManuallyDrop::new(this); - Self::as_ptr(&*this) + Arc::into_raw_rc(this).into_raw().as_ptr() } /// Consumes the `Arc`, returning the wrapped pointer and allocator. @@ -1572,11 +1536,9 @@ impl Arc { #[must_use = "losing the pointer will leak memory"] #[unstable(feature = "allocator_api", issue = "32838")] pub fn into_raw_with_allocator(this: Self) -> (*const T, A) { - let this = mem::ManuallyDrop::new(this); - let ptr = Self::as_ptr(&this); - // Safety: `this` is ManuallyDrop so the allocator will not be double-dropped - let alloc = unsafe { ptr::read(&this.alloc) }; - (ptr, alloc) + let (ptr, alloc) = Arc::into_raw_rc(this).into_raw_parts(); + + (ptr.as_ptr(), alloc) } /// Provides a raw pointer to the data. @@ -1599,12 +1561,7 @@ impl Arc { #[stable(feature = "rc_as_ptr", since = "1.45.0")] #[rustc_never_returns_null_ptr] pub fn as_ptr(this: &Self) -> *const T { - let ptr: *mut ArcInner = NonNull::as_ptr(this.ptr); - - // SAFETY: This cannot go through Deref::deref or RcInnerPtr::inner because - // this is required to retain raw/mut provenance such that e.g. `get_mut` can - // write through the pointer after the Rc is recovered through `from_raw`. - unsafe { &raw mut (*ptr).data } + this.raw_rc.as_ptr().as_ptr() } /// Constructs an `Arc` from a raw pointer. @@ -1677,13 +1634,8 @@ impl Arc { #[inline] #[unstable(feature = "allocator_api", issue = "32838")] pub unsafe fn from_raw_in(ptr: *const T, alloc: A) -> Self { - unsafe { - let offset = data_offset(ptr); - - // Reverse the offset to find the original ArcInner. - let arc_ptr = ptr.byte_sub(offset) as *mut ArcInner; - - Self::from_ptr_in(arc_ptr, alloc) + Self { + raw_rc: unsafe { RawRc::from_raw_parts(NonNull::new_unchecked(ptr.cast_mut()), alloc) }, } } @@ -1705,38 +1657,8 @@ impl Arc { where A: Clone, { - // This Relaxed is OK because we're checking the value in the CAS - // below. - let mut cur = this.inner().weak.load(Relaxed); - - loop { - // check if the weak counter is currently "locked"; if so, spin. - if cur == usize::MAX { - hint::spin_loop(); - cur = this.inner().weak.load(Relaxed); - continue; - } - - // We can't allow the refcount to increase much past `MAX_REFCOUNT`. - assert!(cur <= MAX_REFCOUNT, "{}", INTERNAL_OVERFLOW_ERROR); - - // NOTE: this code currently ignores the possibility of overflow - // into usize::MAX; in general both Rc and Arc need to be adjusted - // to deal with overflow. - - // Unlike with Clone(), we need this to be an Acquire read to - // synchronize with the write coming from `is_unique`, so that the - // events prior to that write happen before this read. - match this.inner().weak.compare_exchange_weak(cur, cur + 1, Acquire, Relaxed) { - Ok(_) => { - // Make sure we do not create a dangling Weak - debug_assert!(!is_dangling(this.ptr.as_ptr())); - return Weak { ptr: this.ptr, alloc: this.alloc.clone() }; - } - Err(old) => cur = old, - } - } - } + Weak { raw_weak: unsafe { this.raw_rc.downgrade::() } } + } /// Gets the number of [`Weak`] pointers to this allocation. /// @@ -1762,7 +1684,7 @@ impl Arc { #[must_use] #[stable(feature = "arc_counts", since = "1.15.0")] pub fn weak_count(this: &Self) -> usize { - let cnt = this.inner().weak.load(Relaxed); + let cnt = this.raw_weak_count().load(Relaxed); // If the weak count is currently locked, the value of the // count was 0 just before taking the lock. if cnt == usize::MAX { 0 } else { cnt - 1 } @@ -1792,7 +1714,7 @@ impl Arc { #[must_use] #[stable(feature = "arc_counts", since = "1.15.0")] pub fn strong_count(this: &Self) -> usize { - this.inner().strong.load(Relaxed) + this.raw_strong_count().load(Relaxed) } /// Increments the strong reference count on the `Arc` associated with the @@ -1833,10 +1755,11 @@ impl Arc { where A: Clone, { - // Retain Arc, but don't touch refcount by wrapping in ManuallyDrop - let arc = unsafe { mem::ManuallyDrop::new(Arc::from_raw_in(ptr, alloc)) }; - // Now increase refcount, but don't drop new refcount either - let _arc_clone: mem::ManuallyDrop<_> = arc.clone(); + unsafe { + RawRc::::increment_strong_count::(NonNull::new_unchecked(ptr.cast_mut())) + }; + + drop(alloc); } /// Decrements the strong reference count on the `Arc` associated with the @@ -1876,33 +1799,12 @@ impl Arc { #[inline] #[unstable(feature = "allocator_api", issue = "32838")] pub unsafe fn decrement_strong_count_in(ptr: *const T, alloc: A) { - unsafe { drop(Arc::from_raw_in(ptr, alloc)) }; - } - - #[inline] - fn inner(&self) -> &ArcInner { - // This unsafety is ok because while this arc is alive we're guaranteed - // that the inner pointer is valid. Furthermore, we know that the - // `ArcInner` structure itself is `Sync` because the inner data is - // `Sync` as well, so we're ok loaning out an immutable pointer to these - // contents. - unsafe { self.ptr.as_ref() } - } - - // Non-inlined part of `drop`. - #[inline(never)] - unsafe fn drop_slow(&mut self) { - // Drop the weak ref collectively held by all strong references when this - // variable goes out of scope. This ensures that the memory is deallocated - // even if the destructor of `T` panics. - // Take a reference to `self.alloc` instead of cloning because 1. it'll last long - // enough, and 2. you should be able to drop `Arc`s with unclonable allocators - let _weak = Weak { ptr: self.ptr, alloc: &self.alloc }; - - // Destroy the data at this time, even though we must not free the box - // allocation itself (there might still be weak pointers lying around). - // We cannot use `get_mut_unchecked` here, because `self.alloc` is borrowed. - unsafe { ptr::drop_in_place(&mut (*self.ptr.as_ptr()).data) }; + unsafe { + RawRc::::decrement_strong_count_in::( + NonNull::new_unchecked(ptr.cast_mut()), + alloc, + ) + }; } /// Returns `true` if the two `Arc`s point to the same allocation in a vein similar to @@ -1926,215 +1828,7 @@ impl Arc { #[must_use] #[stable(feature = "ptr_eq", since = "1.17.0")] pub fn ptr_eq(this: &Self, other: &Self) -> bool { - ptr::addr_eq(this.ptr.as_ptr(), other.ptr.as_ptr()) - } -} - -impl Arc { - /// Allocates an `ArcInner` with sufficient space for - /// a possibly-unsized inner value where the value has the layout provided. - /// - /// The function `mem_to_arcinner` is called with the data pointer - /// and must return back a (potentially fat)-pointer for the `ArcInner`. - #[cfg(not(no_global_oom_handling))] - unsafe fn allocate_for_layout( - value_layout: Layout, - allocate: impl FnOnce(Layout) -> Result, AllocError>, - mem_to_arcinner: impl FnOnce(*mut u8) -> *mut ArcInner, - ) -> *mut ArcInner { - let layout = arcinner_layout_for_value_layout(value_layout); - - let ptr = allocate(layout).unwrap_or_else(|_| handle_alloc_error(layout)); - - unsafe { Self::initialize_arcinner(ptr, layout, mem_to_arcinner) } - } - - /// Allocates an `ArcInner` with sufficient space for - /// a possibly-unsized inner value where the value has the layout provided, - /// returning an error if allocation fails. - /// - /// The function `mem_to_arcinner` is called with the data pointer - /// and must return back a (potentially fat)-pointer for the `ArcInner`. - unsafe fn try_allocate_for_layout( - value_layout: Layout, - allocate: impl FnOnce(Layout) -> Result, AllocError>, - mem_to_arcinner: impl FnOnce(*mut u8) -> *mut ArcInner, - ) -> Result<*mut ArcInner, AllocError> { - let layout = arcinner_layout_for_value_layout(value_layout); - - let ptr = allocate(layout)?; - - let inner = unsafe { Self::initialize_arcinner(ptr, layout, mem_to_arcinner) }; - - Ok(inner) - } - - unsafe fn initialize_arcinner( - ptr: NonNull<[u8]>, - layout: Layout, - mem_to_arcinner: impl FnOnce(*mut u8) -> *mut ArcInner, - ) -> *mut ArcInner { - let inner = mem_to_arcinner(ptr.as_non_null_ptr().as_ptr()); - debug_assert_eq!(unsafe { Layout::for_value_raw(inner) }, layout); - - unsafe { - (&raw mut (*inner).strong).write(atomic::AtomicUsize::new(1)); - (&raw mut (*inner).weak).write(atomic::AtomicUsize::new(1)); - } - - inner - } -} - -impl Arc { - /// Allocates an `ArcInner` with sufficient space for an unsized inner value. - #[inline] - #[cfg(not(no_global_oom_handling))] - unsafe fn allocate_for_ptr_in(ptr: *const T, alloc: &A) -> *mut ArcInner { - // Allocate for the `ArcInner` using the given value. - unsafe { - Arc::allocate_for_layout( - Layout::for_value_raw(ptr), - |layout| alloc.allocate(layout), - |mem| mem.with_metadata_of(ptr as *const ArcInner), - ) - } - } - - #[cfg(not(no_global_oom_handling))] - fn from_box_in(src: Box) -> Arc { - unsafe { - let value_size = size_of_val(&*src); - let ptr = Self::allocate_for_ptr_in(&*src, Box::allocator(&src)); - - // Copy value as bytes - ptr::copy_nonoverlapping( - (&raw const *src) as *const u8, - (&raw mut (*ptr).data) as *mut u8, - value_size, - ); - - // Free the allocation without dropping its contents - let (bptr, alloc) = Box::into_raw_with_allocator(src); - let src = Box::from_raw_in(bptr as *mut mem::ManuallyDrop, alloc.by_ref()); - drop(src); - - Self::from_ptr_in(ptr, alloc) - } - } -} - -impl Arc<[T]> { - /// Allocates an `ArcInner<[T]>` with the given length. - #[cfg(not(no_global_oom_handling))] - unsafe fn allocate_for_slice(len: usize) -> *mut ArcInner<[T]> { - unsafe { - Self::allocate_for_layout( - Layout::array::(len).unwrap(), - |layout| Global.allocate(layout), - |mem| ptr::slice_from_raw_parts_mut(mem.cast::(), len) as *mut ArcInner<[T]>, - ) - } - } - - /// Copy elements from slice into newly allocated `Arc<[T]>` - /// - /// Unsafe because the caller must either take ownership or bind `T: Copy`. - #[cfg(not(no_global_oom_handling))] - unsafe fn copy_from_slice(v: &[T]) -> Arc<[T]> { - unsafe { - let ptr = Self::allocate_for_slice(v.len()); - - ptr::copy_nonoverlapping(v.as_ptr(), (&raw mut (*ptr).data) as *mut T, v.len()); - - Self::from_ptr(ptr) - } - } - - /// Constructs an `Arc<[T]>` from an iterator known to be of a certain size. - /// - /// Behavior is undefined should the size be wrong. - #[cfg(not(no_global_oom_handling))] - unsafe fn from_iter_exact(iter: impl Iterator, len: usize) -> Arc<[T]> { - // Panic guard while cloning T elements. - // In the event of a panic, elements that have been written - // into the new ArcInner will be dropped, then the memory freed. - struct Guard { - mem: NonNull, - elems: *mut T, - layout: Layout, - n_elems: usize, - } - - impl Drop for Guard { - fn drop(&mut self) { - unsafe { - let slice = from_raw_parts_mut(self.elems, self.n_elems); - ptr::drop_in_place(slice); - - Global.deallocate(self.mem, self.layout); - } - } - } - - unsafe { - let ptr = Self::allocate_for_slice(len); - - let mem = ptr as *mut _ as *mut u8; - let layout = Layout::for_value_raw(ptr); - - // Pointer to first element - let elems = (&raw mut (*ptr).data) as *mut T; - - let mut guard = Guard { mem: NonNull::new_unchecked(mem), elems, layout, n_elems: 0 }; - - for (i, item) in iter.enumerate() { - ptr::write(elems.add(i), item); - guard.n_elems += 1; - } - - // All clear. Forget the guard so it doesn't free the new ArcInner. - mem::forget(guard); - - Self::from_ptr(ptr) - } - } -} - -impl Arc<[T], A> { - /// Allocates an `ArcInner<[T]>` with the given length. - #[inline] - #[cfg(not(no_global_oom_handling))] - unsafe fn allocate_for_slice_in(len: usize, alloc: &A) -> *mut ArcInner<[T]> { - unsafe { - Arc::allocate_for_layout( - Layout::array::(len).unwrap(), - |layout| alloc.allocate(layout), - |mem| ptr::slice_from_raw_parts_mut(mem.cast::(), len) as *mut ArcInner<[T]>, - ) - } - } -} - -/// Specialization trait used for `From<&[T]>`. -#[cfg(not(no_global_oom_handling))] -trait ArcFromSlice { - fn from_slice(slice: &[T]) -> Self; -} - -#[cfg(not(no_global_oom_handling))] -impl ArcFromSlice for Arc<[T]> { - #[inline] - default fn from_slice(v: &[T]) -> Self { - unsafe { Self::from_iter_exact(v.iter().cloned(), v.len()) } - } -} - -#[cfg(not(no_global_oom_handling))] -impl ArcFromSlice for Arc<[T]> { - #[inline] - fn from_slice(v: &[T]) -> Self { - unsafe { Arc::copy_from_slice(v) } + RawRc::ptr_eq(&this.raw_rc, &other.raw_rc) } } @@ -2156,39 +1850,7 @@ impl Clone for Arc { /// ``` #[inline] fn clone(&self) -> Arc { - // Using a relaxed ordering is alright here, as knowledge of the - // original reference prevents other threads from erroneously deleting - // the object. - // - // As explained in the [Boost documentation][1], Increasing the - // reference counter can always be done with memory_order_relaxed: New - // references to an object can only be formed from an existing - // reference, and passing an existing reference from one thread to - // another must already provide any required synchronization. - // - // [1]: (www.boost.org/doc/libs/1_55_0/doc/html/atomic/usage_examples.html) - let old_size = self.inner().strong.fetch_add(1, Relaxed); - - // However we need to guard against massive refcounts in case someone is `mem::forget`ing - // Arcs. If we don't do this the count can overflow and users will use-after free. This - // branch will never be taken in any realistic program. We abort because such a program is - // incredibly degenerate, and we don't care to support it. - // - // This check is not 100% water-proof: we error when the refcount grows beyond `isize::MAX`. - // But we do that check *after* having done the increment, so there is a chance here that - // the worst already happened and we actually do overflow the `usize` counter. However, that - // requires the counter to grow from `isize::MAX` to `usize::MAX` between the increment - // above and the `abort` below, which seems exceedingly unlikely. - // - // This is a global invariant, and also applies when using a compare-exchange loop to increment - // counters in other methods. - // Otherwise, the counter could be brought to an almost-overflow using a compare-exchange loop, - // and then overflow using a few `fetch_add`s. - if old_size > MAX_REFCOUNT { - abort(); - } - - unsafe { Self::from_inner_in(self.ptr, self.alloc.clone()) } + Self { raw_rc: unsafe { self.raw_rc.clone::() } } } } @@ -2198,7 +1860,7 @@ impl Deref for Arc { #[inline] fn deref(&self) -> &T { - &self.inner().data + self.raw_rc.as_ref() } } @@ -2269,75 +1931,7 @@ impl Arc { #[inline] #[stable(feature = "arc_unique", since = "1.4.0")] pub fn make_mut(this: &mut Self) -> &mut T { - let size_of_val = mem::size_of_val::(&**this); - - // Note that we hold both a strong reference and a weak reference. - // Thus, releasing our strong reference only will not, by itself, cause - // the memory to be deallocated. - // - // Use Acquire to ensure that we see any writes to `weak` that happen - // before release writes (i.e., decrements) to `strong`. Since we hold a - // weak count, there's no chance the ArcInner itself could be - // deallocated. - if this.inner().strong.compare_exchange(1, 0, Acquire, Relaxed).is_err() { - // Another strong pointer exists, so we must clone. - - let this_data_ref: &T = &**this; - // `in_progress` drops the allocation if we panic before finishing initializing it. - let mut in_progress: UniqueArcUninit = - UniqueArcUninit::new(this_data_ref, this.alloc.clone()); - - let initialized_clone = unsafe { - // Clone. If the clone panics, `in_progress` will be dropped and clean up. - this_data_ref.clone_to_uninit(in_progress.data_ptr().cast()); - // Cast type of pointer, now that it is initialized. - in_progress.into_arc() - }; - *this = initialized_clone; - } else if this.inner().weak.load(Relaxed) != 1 { - // Relaxed suffices in the above because this is fundamentally an - // optimization: we are always racing with weak pointers being - // dropped. Worst case, we end up allocated a new Arc unnecessarily. - - // We removed the last strong ref, but there are additional weak - // refs remaining. We'll move the contents to a new Arc, and - // invalidate the other weak refs. - - // Note that it is not possible for the read of `weak` to yield - // usize::MAX (i.e., locked), since the weak count can only be - // locked by a thread with a strong reference. - - // Materialize our own implicit weak pointer, so that it can clean - // up the ArcInner as needed. - let _weak = Weak { ptr: this.ptr, alloc: this.alloc.clone() }; - - // Can just steal the data, all that's left is Weaks - // - // We don't need panic-protection like the above branch does, but we might as well - // use the same mechanism. - let mut in_progress: UniqueArcUninit = - UniqueArcUninit::new(&**this, this.alloc.clone()); - unsafe { - // Initialize `in_progress` with move of **this. - // We have to express this in terms of bytes because `T: ?Sized`; there is no - // operation that just copies a value based on its `size_of_val()`. - ptr::copy_nonoverlapping( - ptr::from_ref(&**this).cast::(), - in_progress.data_ptr().cast::(), - size_of_val, - ); - - ptr::write(this, in_progress.into_arc()); - } - } else { - // We were the sole reference of either kind; bump back up the - // strong ref count. - this.inner().strong.store(1, Release); - } - - // As with `get_mut()`, the unsafety is ok because our reference was - // either unique to begin with, or became one upon cloning the contents. - unsafe { Self::get_mut_unchecked(this) } + unsafe { this.raw_rc.make_mut::() } } } @@ -2373,7 +1967,7 @@ impl Arc { #[inline] #[stable(feature = "arc_unwrap_or_clone", since = "1.76.0")] pub fn unwrap_or_clone(this: Self) -> T { - Arc::try_unwrap(this).unwrap_or_else(|arc| (*arc).clone()) + unsafe { Arc::into_raw_rc(this).unwrap_or_clone::() } } } @@ -2405,16 +1999,7 @@ impl Arc { #[inline] #[stable(feature = "arc_unique", since = "1.4.0")] pub fn get_mut(this: &mut Self) -> Option<&mut T> { - if this.is_unique() { - // This unsafety is ok because we're guaranteed that the pointer - // returned is the *only* pointer that will ever be returned to T. Our - // reference count is guaranteed to be 1 at this point, and we required - // the Arc itself to be `mut`, so we're returning the only possible - // reference to the inner data. - unsafe { Some(Arc::get_mut_unchecked(this)) } - } else { - None - } + unsafe { this.raw_rc.get_mut::() } } /// Returns a mutable reference into the given `Arc`, @@ -2480,37 +2065,7 @@ impl Arc { #[inline] #[unstable(feature = "get_mut_unchecked", issue = "63292")] pub unsafe fn get_mut_unchecked(this: &mut Self) -> &mut T { - // We are careful to *not* create a reference covering the "count" fields, as - // this would alias with concurrent access to the reference counts (e.g. by `Weak`). - unsafe { &mut (*this.ptr.as_ptr()).data } - } - - /// Determine whether this is the unique reference (including weak refs) to - /// the underlying data. - /// - /// Note that this requires locking the weak ref count. - fn is_unique(&mut self) -> bool { - // lock the weak pointer count if we appear to be the sole weak pointer - // holder. - // - // The acquire label here ensures a happens-before relationship with any - // writes to `strong` (in particular in `Weak::upgrade`) prior to decrements - // of the `weak` count (via `Weak::drop`, which uses release). If the upgraded - // weak ref was never dropped, the CAS here will fail so we do not care to synchronize. - if self.inner().weak.compare_exchange(1, usize::MAX, Acquire, Relaxed).is_ok() { - // This needs to be an `Acquire` to synchronize with the decrement of the `strong` - // counter in `drop` -- the only access that happens when any but the last reference - // is being dropped. - let unique = self.inner().strong.load(Acquire) == 1; - - // The release write here synchronizes with a read in `downgrade`, - // effectively preventing the above read of `strong` from happening - // after the write. - self.inner().weak.store(1, Release); // release the lock - unique - } else { - false - } + unsafe { this.raw_rc.get_mut_unchecked() } } } @@ -2543,54 +2098,7 @@ unsafe impl<#[may_dangle] T: ?Sized, A: Allocator> Drop for Arc { /// ``` #[inline] fn drop(&mut self) { - // Because `fetch_sub` is already atomic, we do not need to synchronize - // with other threads unless we are going to delete the object. This - // same logic applies to the below `fetch_sub` to the `weak` count. - if self.inner().strong.fetch_sub(1, Release) != 1 { - return; - } - - // This fence is needed to prevent reordering of use of the data and - // deletion of the data. Because it is marked `Release`, the decreasing - // of the reference count synchronizes with this `Acquire` fence. This - // means that use of the data happens before decreasing the reference - // count, which happens before this fence, which happens before the - // deletion of the data. - // - // As explained in the [Boost documentation][1], - // - // > It is important to enforce any possible access to the object in one - // > thread (through an existing reference) to *happen before* deleting - // > the object in a different thread. This is achieved by a "release" - // > operation after dropping a reference (any access to the object - // > through this reference must obviously happened before), and an - // > "acquire" operation before deleting the object. - // - // In particular, while the contents of an Arc are usually immutable, it's - // possible to have interior writes to something like a Mutex. Since a - // Mutex is not acquired when it is deleted, we can't rely on its - // synchronization logic to make writes in thread A visible to a destructor - // running in thread B. - // - // Also note that the Acquire fence here could probably be replaced with an - // Acquire load, which could improve performance in highly-contended - // situations. See [2]. - // - // [1]: (www.boost.org/doc/libs/1_55_0/doc/html/atomic/usage_examples.html) - // [2]: (https://github.com/rust-lang/rust/pull/41714) - acquire!(self.inner().strong); - - // Make sure we aren't trying to "drop" the shared static for empty slices - // used by Default::default. - debug_assert!( - !ptr::addr_eq(self.ptr.as_ptr(), &STATIC_INNER_SLICE.inner), - "Arcs backed by a static should never reach a strong count of 0. \ - Likely decrement_strong_count or from_raw were called too many times.", - ); - - unsafe { - self.drop_slow(); - } + unsafe { self.raw_rc.drop::() }; } } @@ -2619,13 +2127,9 @@ impl Arc { where T: Any + Send + Sync, { - if (*self).is::() { - unsafe { - let (ptr, alloc) = Arc::into_inner_with_allocator(self); - Ok(Arc::from_inner_in(ptr.cast(), alloc)) - } - } else { - Err(self) + match Arc::into_raw_rc(self).downcast::() { + Ok(raw_rc) => Ok(Arc { raw_rc }), + Err(raw_rc) => Err(Self { raw_rc }), } } @@ -2661,10 +2165,7 @@ impl Arc { where T: Any + Send + Sync, { - unsafe { - let (ptr, alloc) = Arc::into_inner_with_allocator(self); - Arc::from_inner_in(ptr.cast(), alloc) - } + Arc { raw_rc: unsafe { Arc::into_raw_rc(self).downcast_unchecked() } } } } @@ -2687,12 +2188,7 @@ impl Weak { #[rustc_const_stable(feature = "const_weak_new", since = "1.73.0")] #[must_use] pub const fn new() -> Weak { - Weak { - ptr: unsafe { - NonNull::new_unchecked(ptr::without_provenance_mut::>(usize::MAX)) - }, - alloc: Global, - } + Self { raw_weak: RawWeak::new_dangling_in(Global) } } } @@ -2717,22 +2213,10 @@ impl Weak { #[inline] #[unstable(feature = "allocator_api", issue = "32838")] pub fn new_in(alloc: A) -> Weak { - Weak { - ptr: unsafe { - NonNull::new_unchecked(ptr::without_provenance_mut::>(usize::MAX)) - }, - alloc, - } + Self { raw_weak: RawWeak::new_dangling_in(alloc) } } } -/// Helper type to allow accessing the reference counts without -/// making any assertions about the data field. -struct WeakInner<'a> { - weak: &'a atomic::AtomicUsize, - strong: &'a atomic::AtomicUsize, -} - impl Weak { /// Converts a raw pointer previously created by [`into_raw`] back into `Weak`. /// @@ -2778,16 +2262,26 @@ impl Weak { #[inline] #[stable(feature = "weak_into_raw", since = "1.45.0")] pub unsafe fn from_raw(ptr: *const T) -> Self { - unsafe { Weak::from_raw_in(ptr, Global) } + Self { raw_weak: unsafe { RawWeak::from_raw(NonNull::new_unchecked(ptr.cast_mut())) } } } } impl Weak { + fn into_raw_weak(self) -> RawWeak { + unsafe { ptr::read(&ManuallyDrop::new(self).raw_weak) } + } + + fn raw_strong_count(&self) -> Option<&AtomicUsize> { + self.raw_weak + .strong_count() + .map(|strong_count| unsafe { AtomicUsize::from_ptr(strong_count.get()) }) + } + /// Returns a reference to the underlying allocator. #[inline] #[unstable(feature = "allocator_api", issue = "32838")] pub fn allocator(&self) -> &A { - &self.alloc + self.raw_weak.allocator() } /// Returns a raw pointer to the object `T` pointed to by this `Weak`. @@ -2818,18 +2312,7 @@ impl Weak { #[must_use] #[stable(feature = "weak_into_raw", since = "1.45.0")] pub fn as_ptr(&self) -> *const T { - let ptr: *mut ArcInner = NonNull::as_ptr(self.ptr); - - if is_dangling(ptr) { - // If the pointer is dangling, we return the sentinel directly. This cannot be - // a valid payload address, as the payload is at least as aligned as ArcInner (usize). - ptr as *const T - } else { - // SAFETY: if is_dangling returns false, then the pointer is dereferenceable. - // The payload may be dropped at this point, and we have to maintain provenance, - // so use raw pointer manipulation. - unsafe { &raw mut (*ptr).data } - } + self.raw_weak.as_ptr().as_ptr() } /// Consumes the `Weak` and turns it into a raw pointer. @@ -2862,7 +2345,7 @@ impl Weak { #[must_use = "losing the pointer will leak memory"] #[stable(feature = "weak_into_raw", since = "1.45.0")] pub fn into_raw(self) -> *const T { - ManuallyDrop::new(self).as_ptr() + self.into_raw_weak().into_raw().as_ptr() } /// Consumes the `Weak`, returning the wrapped pointer and allocator. @@ -2897,11 +2380,9 @@ impl Weak { #[must_use = "losing the pointer will leak memory"] #[unstable(feature = "allocator_api", issue = "32838")] pub fn into_raw_with_allocator(self) -> (*const T, A) { - let this = mem::ManuallyDrop::new(self); - let result = this.as_ptr(); - // Safety: `this` is ManuallyDrop so the allocator will not be double-dropped - let alloc = unsafe { ptr::read(&this.alloc) }; - (result, alloc) + let (ptr, alloc) = self.into_raw_weak().into_raw_parts(); + + (ptr.as_ptr(), alloc) } /// Converts a raw pointer previously created by [`into_raw`] back into `Weak` in the provided @@ -2949,22 +2430,11 @@ impl Weak { #[inline] #[unstable(feature = "allocator_api", issue = "32838")] pub unsafe fn from_raw_in(ptr: *const T, alloc: A) -> Self { - // See Weak::as_ptr for context on how the input pointer is derived. - - let ptr = if is_dangling(ptr) { - // This is a dangling Weak. - ptr as *mut ArcInner - } else { - // Otherwise, we're guaranteed the pointer came from a nondangling Weak. - // SAFETY: data_offset is safe to call, as ptr references a real (potentially dropped) T. - let offset = unsafe { data_offset(ptr) }; - // Thus, we reverse the offset to get the whole RcInner. - // SAFETY: the pointer originated from a Weak, so this offset is safe. - unsafe { ptr.byte_sub(offset) as *mut ArcInner } - }; - - // SAFETY: we now have recovered the original Weak pointer, so can create the Weak. - Weak { ptr: unsafe { NonNull::new_unchecked(ptr) }, alloc } + Self { + raw_weak: unsafe { + RawWeak::from_raw_parts(NonNull::new_unchecked(ptr.cast_mut()), alloc) + }, + } } } @@ -2999,31 +2469,7 @@ impl Weak { where A: Clone, { - #[inline] - fn checked_increment(n: usize) -> Option { - // Any write of 0 we can observe leaves the field in permanently zero state. - if n == 0 { - return None; - } - // See comments in `Arc::clone` for why we do this (for `mem::forget`). - assert!(n <= MAX_REFCOUNT, "{}", INTERNAL_OVERFLOW_ERROR); - Some(n + 1) - } - - // We use a CAS loop to increment the strong count instead of a - // fetch_add as this function should never take the reference count - // from zero to one. - // - // Relaxed is fine for the failure case because we don't have any expectations about the new state. - // Acquire is necessary for the success case to synchronise with `Arc::new_cyclic`, when the inner - // value can be initialized after `Weak` references have already been created. In that case, we - // expect to observe the fully initialized value. - if self.inner()?.strong.fetch_update(Acquire, Relaxed, checked_increment).is_ok() { - // SAFETY: pointer is not null, verified in checked_increment - unsafe { Some(Arc::from_inner_in(self.ptr, self.alloc.clone())) } - } else { - None - } + self.raw_weak.upgrade::().map(|raw_rc| Arc { raw_rc }) } /// Gets the number of strong (`Arc`) pointers pointing to this allocation. @@ -3032,7 +2478,7 @@ impl Weak { #[must_use] #[stable(feature = "weak_counts", since = "1.41.0")] pub fn strong_count(&self) -> usize { - if let Some(inner) = self.inner() { inner.strong.load(Relaxed) } else { 0 } + self.raw_strong_count().map_or(0, |strong_count| strong_count.load(Relaxed)) } /// Gets an approximation of the number of `Weak` pointers pointing to this @@ -3049,9 +2495,9 @@ impl Weak { #[must_use] #[stable(feature = "weak_counts", since = "1.41.0")] pub fn weak_count(&self) -> usize { - if let Some(inner) = self.inner() { - let weak = inner.weak.load(Acquire); - let strong = inner.strong.load(Relaxed); + if let Some(ref_counts) = self.raw_weak.ref_counts() { + let weak = unsafe { AtomicUsize::from_ptr(ref_counts.weak.get()) }.load(Acquire); + let strong = unsafe { AtomicUsize::from_ptr(ref_counts.strong.get()) }.load(Relaxed); if strong == 0 { 0 } else { @@ -3067,21 +2513,6 @@ impl Weak { } } - /// Returns `None` when the pointer is dangling and there is no allocated `ArcInner`, - /// (i.e., when this `Weak` was created by `Weak::new`). - #[inline] - fn inner(&self) -> Option> { - let ptr = self.ptr.as_ptr(); - if is_dangling(ptr) { - None - } else { - // We are careful to *not* create a reference covering the "data" field, as - // the field may be mutated concurrently (for example, if the last `Arc` - // is dropped, the data field will be dropped in-place). - Some(unsafe { WeakInner { strong: &(*ptr).strong, weak: &(*ptr).weak } }) - } - } - /// Returns `true` if the two `Weak`s point to the same allocation similar to [`ptr::eq`], or if /// both don't point to any allocation (because they were created with `Weak::new()`). However, /// this function ignores the metadata of `dyn Trait` pointers. @@ -3127,7 +2558,7 @@ impl Weak { #[must_use] #[stable(feature = "weak_ptr_eq", since = "1.39.0")] pub fn ptr_eq(&self, other: &Self) -> bool { - ptr::addr_eq(self.ptr.as_ptr(), other.ptr.as_ptr()) + RawWeak::ptr_eq(&self.raw_weak, &other.raw_weak) } } @@ -3146,20 +2577,7 @@ impl Clone for Weak { /// ``` #[inline] fn clone(&self) -> Weak { - if let Some(inner) = self.inner() { - // See comments in Arc::clone() for why this is relaxed. This can use a - // fetch_add (ignoring the lock) because the weak count is only locked - // where are *no other* weak pointers in existence. (So we can't be - // running this code in that case). - let old_size = inner.weak.fetch_add(1, Relaxed); - - // See comments in Arc::clone() for why we do this (for mem::forget). - if old_size > MAX_REFCOUNT { - abort(); - } - } - - Weak { ptr: self.ptr, alloc: self.alloc.clone() } + Self { raw_weak: unsafe { self.raw_weak.clone::() } } } } @@ -3180,7 +2598,7 @@ impl Default for Weak { /// assert!(empty.upgrade().is_none()); /// ``` fn default() -> Weak { - Weak::new() + Self { raw_weak: RawWeak::default() } } } @@ -3211,69 +2629,7 @@ unsafe impl<#[may_dangle] T: ?Sized, A: Allocator> Drop for Weak { /// assert!(other_weak_foo.upgrade().is_none()); /// ``` fn drop(&mut self) { - // If we find out that we were the last weak pointer, then its time to - // deallocate the data entirely. See the discussion in Arc::drop() about - // the memory orderings - // - // It's not necessary to check for the locked state here, because the - // weak count can only be locked if there was precisely one weak ref, - // meaning that drop could only subsequently run ON that remaining weak - // ref, which can only happen after the lock is released. - let inner = if let Some(inner) = self.inner() { inner } else { return }; - - if inner.weak.fetch_sub(1, Release) == 1 { - acquire!(inner.weak); - - // Make sure we aren't trying to "deallocate" the shared static for empty slices - // used by Default::default. - debug_assert!( - !ptr::addr_eq(self.ptr.as_ptr(), &STATIC_INNER_SLICE.inner), - "Arc/Weaks backed by a static should never be deallocated. \ - Likely decrement_strong_count or from_raw were called too many times.", - ); - - unsafe { - self.alloc.deallocate(self.ptr.cast(), Layout::for_value_raw(self.ptr.as_ptr())) - } - } - } -} - -#[stable(feature = "rust1", since = "1.0.0")] -trait ArcEqIdent { - fn eq(&self, other: &Arc) -> bool; - fn ne(&self, other: &Arc) -> bool; -} - -#[stable(feature = "rust1", since = "1.0.0")] -impl ArcEqIdent for Arc { - #[inline] - default fn eq(&self, other: &Arc) -> bool { - **self == **other - } - #[inline] - default fn ne(&self, other: &Arc) -> bool { - **self != **other - } -} - -/// We're doing this specialization here, and not as a more general optimization on `&T`, because it -/// would otherwise add a cost to all equality checks on refs. We assume that `Arc`s are used to -/// store large values, that are slow to clone, but also heavy to check for equality, causing this -/// cost to pay off more easily. It's also more likely to have two `Arc` clones, that point to -/// the same value, than two `&T`s. -/// -/// We can only do this when `T: Eq` as a `PartialEq` might be deliberately irreflexive. -#[stable(feature = "rust1", since = "1.0.0")] -impl ArcEqIdent for Arc { - #[inline] - fn eq(&self, other: &Arc) -> bool { - Arc::ptr_eq(self, other) || **self == **other - } - - #[inline] - fn ne(&self, other: &Arc) -> bool { - !Arc::ptr_eq(self, other) && **self != **other + unsafe { self.raw_weak.drop::() }; } } @@ -3298,7 +2654,7 @@ impl PartialEq for Arc { /// ``` #[inline] fn eq(&self, other: &Arc) -> bool { - ArcEqIdent::eq(self, other) + RawRc::eq(&self.raw_rc, &other.raw_rc) } /// Inequality for two `Arc`s. @@ -3319,7 +2675,7 @@ impl PartialEq for Arc { /// ``` #[inline] fn ne(&self, other: &Arc) -> bool { - ArcEqIdent::ne(self, other) + RawRc::ne(&self.raw_rc, &other.raw_rc) } } @@ -3340,7 +2696,7 @@ impl PartialOrd for Arc { /// assert_eq!(Some(Ordering::Less), five.partial_cmp(&Arc::new(6))); /// ``` fn partial_cmp(&self, other: &Arc) -> Option { - (**self).partial_cmp(&**other) + RawRc::partial_cmp(&self.raw_rc, &other.raw_rc) } /// Less-than comparison for two `Arc`s. @@ -3357,7 +2713,7 @@ impl PartialOrd for Arc { /// assert!(five < Arc::new(6)); /// ``` fn lt(&self, other: &Arc) -> bool { - *(*self) < *(*other) + RawRc::lt(&self.raw_rc, &other.raw_rc) } /// 'Less than or equal to' comparison for two `Arc`s. @@ -3374,7 +2730,7 @@ impl PartialOrd for Arc { /// assert!(five <= Arc::new(5)); /// ``` fn le(&self, other: &Arc) -> bool { - *(*self) <= *(*other) + RawRc::le(&self.raw_rc, &other.raw_rc) } /// Greater-than comparison for two `Arc`s. @@ -3391,7 +2747,7 @@ impl PartialOrd for Arc { /// assert!(five > Arc::new(4)); /// ``` fn gt(&self, other: &Arc) -> bool { - *(*self) > *(*other) + RawRc::gt(&self.raw_rc, &other.raw_rc) } /// 'Greater than or equal to' comparison for two `Arc`s. @@ -3408,7 +2764,7 @@ impl PartialOrd for Arc { /// assert!(five >= Arc::new(5)); /// ``` fn ge(&self, other: &Arc) -> bool { - *(*self) >= *(*other) + RawRc::ge(&self.raw_rc, &other.raw_rc) } } #[stable(feature = "rust1", since = "1.0.0")] @@ -3428,7 +2784,7 @@ impl Ord for Arc { /// assert_eq!(Ordering::Less, five.cmp(&Arc::new(6))); /// ``` fn cmp(&self, other: &Arc) -> Ordering { - (**self).cmp(&**other) + RawRc::cmp(&self.raw_rc, &other.raw_rc) } } #[stable(feature = "rust1", since = "1.0.0")] @@ -3437,21 +2793,21 @@ impl Eq for Arc {} #[stable(feature = "rust1", since = "1.0.0")] impl fmt::Display for Arc { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - fmt::Display::fmt(&**self, f) + fmt::Display::fmt(&self.raw_rc, f) } } #[stable(feature = "rust1", since = "1.0.0")] impl fmt::Debug for Arc { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - fmt::Debug::fmt(&**self, f) + fmt::Debug::fmt(&self.raw_rc, f) } } #[stable(feature = "rust1", since = "1.0.0")] impl fmt::Pointer for Arc { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - fmt::Pointer::fmt(&(&raw const **self), f) + fmt::Pointer::fmt(&self.raw_rc, f) } } @@ -3469,19 +2825,17 @@ impl Default for Arc { /// assert_eq!(*x, 0); /// ``` fn default() -> Arc { - unsafe { - Self::from_inner( - Box::leak(Box::write(Box::new_uninit(), ArcInner { - strong: atomic::AtomicUsize::new(1), - weak: atomic::AtomicUsize::new(1), - data: T::default(), - })) - .into(), - ) - } + Self { raw_rc: RawRc::default() } } } +#[cfg(not(no_global_oom_handling))] +const MAX_STATIC_INNER_SLICE_ALIGNMENT: usize = 16; + +#[cfg(not(no_global_oom_handling))] +const STATIC_INNER_PADDING: usize = + Layout::new::().padding_needed_for(MAX_STATIC_INNER_SLICE_ALIGNMENT); + /// Struct to hold the static `ArcInner` used for empty `Arc` as /// returned by `Default::default`. /// @@ -3489,19 +2843,22 @@ impl Default for Arc { /// * `repr(align(16))` so we can use it for `[T]` with `align_of::() <= 16`. /// * `repr(C)` so `inner` is at offset 0 (and thus guaranteed to actually be aligned to 16). /// * `[u8; 1]` (to be initialized with 0) so it can be used for `Arc`. +#[cfg(not(no_global_oom_handling))] #[repr(C, align(16))] struct SliceArcInnerForStatic { - inner: ArcInner<[u8; 1]>, + padding: MaybeUninit<[RefCounts; STATIC_INNER_PADDING / mem::size_of::()]>, + ref_counts: RefCounts, + value: [u8; 1], } + #[cfg(not(no_global_oom_handling))] -const MAX_STATIC_INNER_SLICE_ALIGNMENT: usize = 16; +unsafe impl Sync for SliceArcInnerForStatic {} +#[cfg(not(no_global_oom_handling))] static STATIC_INNER_SLICE: SliceArcInnerForStatic = SliceArcInnerForStatic { - inner: ArcInner { - strong: atomic::AtomicUsize::new(1), - weak: atomic::AtomicUsize::new(1), - data: [0], - }, + padding: MaybeUninit::uninit(), + ref_counts: RefCounts { weak: UnsafeCell::new(1), strong: UnsafeCell::new(1) }, + value: [0], }; #[cfg(not(no_global_oom_handling))] @@ -3514,8 +2871,8 @@ impl Default for Arc { fn default() -> Self { let arc: Arc<[u8]> = Default::default(); debug_assert!(core::str::from_utf8(&*arc).is_ok()); - let (ptr, alloc) = Arc::into_inner_with_allocator(arc); - unsafe { Arc::from_ptr_in(ptr.as_ptr() as *mut ArcInner, alloc) } + let (ptr, alloc) = Arc::into_raw_with_allocator(arc); + unsafe { Arc::from_raw_in(ptr as *mut str, alloc) } } } @@ -3527,14 +2884,13 @@ impl Default for Arc { /// This may or may not share an allocation with other Arcs. #[inline] fn default() -> Self { - use core::ffi::CStr; - let inner: NonNull> = NonNull::from(&STATIC_INNER_SLICE.inner); - let inner: NonNull> = - NonNull::new(inner.as_ptr() as *mut ArcInner).unwrap(); - // `this` semantically is the Arc "owned" by the static, so make sure not to drop it. - let this: mem::ManuallyDrop> = - unsafe { mem::ManuallyDrop::new(Arc::from_inner(inner)) }; - (*this).clone() + unsafe { + let ptr = NonNull::from(core::ffi::CStr::from_bytes_with_nul_unchecked( + &STATIC_INNER_SLICE.value, + )); + + Self { raw_rc: RawRc::from_raw(ptr).clone::() } + } } } @@ -3547,28 +2903,25 @@ impl Default for Arc<[T]> { #[inline] fn default() -> Self { if mem::align_of::() <= MAX_STATIC_INNER_SLICE_ALIGNMENT { - // We take a reference to the whole struct instead of the ArcInner<[u8; 1]> inside it so - // we don't shrink the range of bytes the ptr is allowed to access under Stacked Borrows. - // (Miri complains on 32-bit targets with Arc<[Align16]> otherwise.) - // (Note that NonNull::from(&STATIC_INNER_SLICE.inner) is fine under Tree Borrows.) - let inner: NonNull = NonNull::from(&STATIC_INNER_SLICE); - let inner: NonNull> = inner.cast(); - // `this` semantically is the Arc "owned" by the static, so make sure not to drop it. - let this: mem::ManuallyDrop> = - unsafe { mem::ManuallyDrop::new(Arc::from_inner(inner)) }; - return (*this).clone(); + unsafe { + let ptr = NonNull::slice_from_raw_parts( + NonNull::from(&STATIC_INNER_SLICE.value).cast(), + 0, + ); + + return Self { raw_rc: RawRc::from_raw(ptr).clone::() }; + } } // If T's alignment is too large for the static, make a new unique allocation. - let arr: [T; 0] = []; - Arc::from(arr) + Self { raw_rc: RawRc::default() } } } #[stable(feature = "rust1", since = "1.0.0")] impl Hash for Arc { fn hash(&self, state: &mut H) { - (**self).hash(state) + self.raw_rc.hash(state); } } @@ -3590,7 +2943,7 @@ impl From for Arc { /// assert_eq!(Arc::from(x), arc); /// ``` fn from(t: T) -> Self { - Arc::new(t) + Self { raw_rc: RawRc::from(t) } } } @@ -3611,7 +2964,7 @@ impl From<[T; N]> for Arc<[T]> { /// ``` #[inline] fn from(v: [T; N]) -> Arc<[T]> { - Arc::<[T; N]>::from(v) + Self { raw_rc: RawRc::from(v) } } } @@ -3630,7 +2983,7 @@ impl From<&[T]> for Arc<[T]> { /// ``` #[inline] fn from(v: &[T]) -> Arc<[T]> { - >::from_slice(v) + Self { raw_rc: RawRc::from(v) } } } @@ -3650,7 +3003,7 @@ impl From<&mut [T]> for Arc<[T]> { /// ``` #[inline] fn from(v: &mut [T]) -> Arc<[T]> { - Arc::from(&*v) + Self { raw_rc: RawRc::from(v) } } } @@ -3668,8 +3021,7 @@ impl From<&str> for Arc { /// ``` #[inline] fn from(v: &str) -> Arc { - let arc = Arc::<[u8]>::from(v.as_bytes()); - unsafe { Arc::from_raw(Arc::into_raw(arc) as *const str) } + Self { raw_rc: RawRc::from(v) } } } @@ -3689,7 +3041,7 @@ impl From<&mut str> for Arc { /// ``` #[inline] fn from(v: &mut str) -> Arc { - Arc::from(&*v) + Self { raw_rc: RawRc::from(v) } } } @@ -3708,7 +3060,7 @@ impl From for Arc { /// ``` #[inline] fn from(v: String) -> Arc { - Arc::from(&v[..]) + Self { raw_rc: RawRc::from(v) } } } @@ -3727,7 +3079,7 @@ impl From> for Arc { /// ``` #[inline] fn from(v: Box) -> Arc { - Arc::from_box_in(v) + Self { raw_rc: RawRc::from(v) } } } @@ -3746,18 +3098,7 @@ impl From> for Arc<[T], A> { /// ``` #[inline] fn from(v: Vec) -> Arc<[T], A> { - unsafe { - let (vec_ptr, len, cap, alloc) = v.into_raw_parts_with_alloc(); - - let rc_ptr = Self::allocate_for_slice_in(len, &alloc); - ptr::copy_nonoverlapping(vec_ptr, (&raw mut (*rc_ptr).data) as *mut T, len); - - // Create a `Vec` with length 0, to deallocate the buffer - // without dropping its contents or the allocator - let _ = Vec::from_raw_parts_in(vec_ptr, 0, cap, &alloc); - - Self::from_ptr_in(rc_ptr, alloc) - } + Self { raw_rc: RawRc::from(v) } } } @@ -3802,8 +3143,7 @@ impl From> for Arc<[u8]> { /// ``` #[inline] fn from(rc: Arc) -> Self { - // SAFETY: `str` has the same layout as `[u8]`. - unsafe { Arc::from_raw(Arc::into_raw(rc) as *const [u8]) } + Self { raw_rc: RawRc::from(Arc::into_raw_rc(rc)) } } } @@ -3812,11 +3152,9 @@ impl TryFrom> for Arc<[T; N], A> { type Error = Arc<[T], A>; fn try_from(boxed_slice: Arc<[T], A>) -> Result { - if boxed_slice.len() == N { - let (ptr, alloc) = Arc::into_inner_with_allocator(boxed_slice); - Ok(unsafe { Arc::from_inner_in(ptr.cast(), alloc) }) - } else { - Err(boxed_slice) + match RawRc::try_from(Arc::into_raw_rc(boxed_slice)) { + Ok(raw_rc) => Ok(Self { raw_rc }), + Err(raw_rc) => Err(Arc { raw_rc }), } } } @@ -3863,152 +3201,27 @@ impl FromIterator for Arc<[T]> { /// # assert_eq!(&*evens, &*(0..10).collect::>()); /// ``` fn from_iter>(iter: I) -> Self { - ToArcSlice::to_arc_slice(iter.into_iter()) - } -} - -#[cfg(not(no_global_oom_handling))] -/// Specialization trait used for collecting into `Arc<[T]>`. -trait ToArcSlice: Iterator + Sized { - fn to_arc_slice(self) -> Arc<[T]>; -} - -#[cfg(not(no_global_oom_handling))] -impl> ToArcSlice for I { - default fn to_arc_slice(self) -> Arc<[T]> { - self.collect::>().into() - } -} - -#[cfg(not(no_global_oom_handling))] -impl> ToArcSlice for I { - fn to_arc_slice(self) -> Arc<[T]> { - // This is the case for a `TrustedLen` iterator. - let (low, high) = self.size_hint(); - if let Some(high) = high { - debug_assert_eq!( - low, - high, - "TrustedLen iterator's size hint is not exact: {:?}", - (low, high) - ); - - unsafe { - // SAFETY: We need to ensure that the iterator has an exact length and we have. - Arc::from_iter_exact(self, low) - } - } else { - // TrustedLen contract guarantees that `upper_bound == None` implies an iterator - // length exceeding `usize::MAX`. - // The default implementation would collect into a vec which would panic. - // Thus we panic here immediately without invoking `Vec` code. - panic!("capacity overflow"); - } + Self { raw_rc: RawRc::from_iter(iter) } } } #[stable(feature = "rust1", since = "1.0.0")] impl borrow::Borrow for Arc { fn borrow(&self) -> &T { - &**self + self.raw_rc.as_ref() } } #[stable(since = "1.5.0", feature = "smart_ptr_as_ref")] impl AsRef for Arc { fn as_ref(&self) -> &T { - &**self + self.raw_rc.as_ref() } } #[stable(feature = "pin", since = "1.33.0")] impl Unpin for Arc {} -/// Gets the offset within an `ArcInner` for the payload behind a pointer. -/// -/// # Safety -/// -/// The pointer must point to (and have valid metadata for) a previously -/// valid instance of T, but the T is allowed to be dropped. -unsafe fn data_offset(ptr: *const T) -> usize { - // Align the unsized value to the end of the ArcInner. - // Because RcInner is repr(C), it will always be the last field in memory. - // SAFETY: since the only unsized types possible are slices, trait objects, - // and extern types, the input safety requirement is currently enough to - // satisfy the requirements of align_of_val_raw; this is an implementation - // detail of the language that must not be relied upon outside of std. - unsafe { data_offset_align(align_of_val_raw(ptr)) } -} - -#[inline] -fn data_offset_align(align: usize) -> usize { - let layout = Layout::new::>(); - layout.size() + layout.padding_needed_for(align) -} - -/// A unique owning pointer to an [`ArcInner`] **that does not imply the contents are initialized,** -/// but will deallocate it (without dropping the value) when dropped. -/// -/// This is a helper for [`Arc::make_mut()`] to ensure correct cleanup on panic. -#[cfg(not(no_global_oom_handling))] -struct UniqueArcUninit { - ptr: NonNull>, - layout_for_value: Layout, - alloc: Option, -} - -#[cfg(not(no_global_oom_handling))] -impl UniqueArcUninit { - /// Allocates an ArcInner with layout suitable to contain `for_value` or a clone of it. - fn new(for_value: &T, alloc: A) -> UniqueArcUninit { - let layout = Layout::for_value(for_value); - let ptr = unsafe { - Arc::allocate_for_layout( - layout, - |layout_for_arcinner| alloc.allocate(layout_for_arcinner), - |mem| mem.with_metadata_of(ptr::from_ref(for_value) as *const ArcInner), - ) - }; - Self { ptr: NonNull::new(ptr).unwrap(), layout_for_value: layout, alloc: Some(alloc) } - } - - /// Returns the pointer to be written into to initialize the [`Arc`]. - fn data_ptr(&mut self) -> *mut T { - let offset = data_offset_align(self.layout_for_value.align()); - unsafe { self.ptr.as_ptr().byte_add(offset) as *mut T } - } - - /// Upgrade this into a normal [`Arc`]. - /// - /// # Safety - /// - /// The data must have been initialized (by writing to [`Self::data_ptr()`]). - unsafe fn into_arc(self) -> Arc { - let mut this = ManuallyDrop::new(self); - let ptr = this.ptr.as_ptr(); - let alloc = this.alloc.take().unwrap(); - - // SAFETY: The pointer is valid as per `UniqueArcUninit::new`, and the caller is responsible - // for having initialized the data. - unsafe { Arc::from_ptr_in(ptr, alloc) } - } -} - -#[cfg(not(no_global_oom_handling))] -impl Drop for UniqueArcUninit { - fn drop(&mut self) { - // SAFETY: - // * new() produced a pointer safe to deallocate. - // * We own the pointer unless into_arc() was called, which forgets us. - unsafe { - self.alloc.take().unwrap().deallocate( - self.ptr.cast(), - arcinner_layout_for_value_layout(self.layout_for_value), - ); - } - } -} - #[stable(feature = "arc_error", since = "1.52.0")] impl core::error::Error for Arc { #[allow(deprecated, deprecated_in_future)] diff --git a/src/etc/gdb_providers.py b/src/etc/gdb_providers.py index 34bb5c39909e4..19ecf51c169f1 100644 --- a/src/etc/gdb_providers.py +++ b/src/etc/gdb_providers.py @@ -182,14 +182,29 @@ def display_hint(): return "array" +_REF_COUNTS_PTR_TYPE = None + + +def _get_ref_counts_ptr_type(): + global _REF_COUNTS_PTR_TYPE + + if _REF_COUNTS_PTR_TYPE is None: + _REF_COUNTS_PTR_TYPE = gdb.lookup_type("alloc::raw_rc::RefCounts").pointer() + + return _REF_COUNTS_PTR_TYPE + + class StdRcProvider(printer_base): def __init__(self, valobj, is_atomic=False): self._valobj = valobj self._is_atomic = is_atomic - self._ptr = unwrap_unique_or_non_null(valobj["ptr"]) - self._value = self._ptr["data" if is_atomic else "value"] - self._strong = self._ptr["strong"]["v" if is_atomic else "value"]["value"] - self._weak = self._ptr["weak"]["v" if is_atomic else "value"]["value"] - 1 + self._ptr = unwrap_unique_or_non_null(valobj["raw_rc"]["weak"]["ptr"]) + self._value = self._ptr.dereference() + + ref_counts_ptr = self._ptr.reinterpret_cast(_get_ref_counts_ptr_type()) - 1 + + self._strong = ref_counts_ptr["strong"]["value"] + self._weak = ref_counts_ptr["weak"]["value"] - 1 def to_string(self): if self._is_atomic: diff --git a/src/etc/lldb_providers.py b/src/etc/lldb_providers.py index 2f32ed833af1e..5bf31a47ab557 100644 --- a/src/etc/lldb_providers.py +++ b/src/etc/lldb_providers.py @@ -1,6 +1,7 @@ import sys from lldb import ( + SBAddress, SBData, SBError, SBValue, @@ -675,6 +676,18 @@ def StdRcSummaryProvider(valobj: SBValue, _dict: LLDBOpaque) -> str: return "strong={}, weak={}".format(strong, weak) +_REF_COUNTS_TYPE = None + + +def _get_or_init_ref_counts_type(target): + global _REF_COUNTS_TYPE + + if _REF_COUNTS_TYPE is None: + _REF_COUNTS_TYPE = target.FindFirstType("alloc::raw_rc::RefCounts") + + return _REF_COUNTS_TYPE + + class StdRcSyntheticProvider: """Pretty-printer for alloc::rc::Rc and alloc::sync::Arc @@ -694,21 +707,33 @@ class StdRcSyntheticProvider: def __init__(self, valobj: SBValue, _dict: LLDBOpaque, is_atomic: bool = False): self.valobj = valobj - self.ptr = unwrap_unique_or_non_null(self.valobj.GetChildMemberWithName("ptr")) + ptr = ( + self.valobj.GetChildMemberWithName("raw_rc") + .GetChildMemberWithName("weak") + .GetChildMemberWithName("ptr") + .GetChildMemberWithName("pointer") + ) - self.value = self.ptr.GetChildMemberWithName("data" if is_atomic else "value") + self.value = ptr.deref.Clone("value") - self.strong = ( - self.ptr.GetChildMemberWithName("strong") - .GetChildAtIndex(0) - .GetChildMemberWithName("value") - ) - self.weak = ( - self.ptr.GetChildMemberWithName("weak") - .GetChildAtIndex(0) - .GetChildMemberWithName("value") + target = valobj.GetTarget() + ref_counts_type = _get_or_init_ref_counts_type(target) + ref_counts_address = ptr.GetValueAsUnsigned() - ref_counts_type.size + + ref_counts_value = target.CreateValueFromAddress( + "ref_counts", + SBAddress(ref_counts_address, target), + ref_counts_type, ) + self.strong = ref_counts_value.GetChildMemberWithName( + "strong" + ).GetChildMemberWithName("value") + + self.weak = ref_counts_value.GetChildMemberWithName( + "weak" + ).GetChildMemberWithName("value") + self.value_builder = ValueBuilder(valobj) self.update() diff --git a/src/etc/natvis/liballoc.natvis b/src/etc/natvis/liballoc.natvis index 1528a8b1226ca..b8835b5cb9d47 100644 --- a/src/etc/natvis/liballoc.natvis +++ b/src/etc/natvis/liballoc.natvis @@ -73,117 +73,116 @@ --> - {ptr.pointer->value} + {*raw_rc.weak.ptr.pointer} - ptr.pointer->value - ptr.pointer->strong - ptr.pointer->weak + *raw_rc.weak.ptr.pointer + ((alloc::raw_rc::RefCounts *)raw_rc.weak.ptr.pointer - 1)->strong + ((alloc::raw_rc::RefCounts *)raw_rc.weak.ptr.pointer - 1)->weak - ptr.pointer.pointer->strong - ptr.pointer.pointer->weak + ((alloc::raw_rc::RefCounts *)raw_rc.weak.ptr.pointer.pointer - 1)->strong + ((alloc::raw_rc::RefCounts *)raw_rc.weak.ptr.pointer.pointer - 1)->weak - {{ len={ptr.pointer.length} }} + {{ len={raw_rc.weak.ptr.pointer.length} }} - ptr.pointer.length - ptr.pointer.data_ptr->strong - ptr.pointer.data_ptr->weak + raw_rc.weak.ptr.pointer.length + ((alloc::raw_rc::RefCounts *)raw_rc.weak.ptr.pointer.data_ptr - 1)->strong + ((alloc::raw_rc::RefCounts *)raw_rc.weak.ptr.pointer.data_ptr - 1)->weak - ptr.pointer.length - - ($T1*)(((size_t*)ptr.pointer.data_ptr) + 2) + raw_rc.weak.ptr.pointer.length + ($T1*)raw_rc.weak.ptr.pointer.data_ptr - {ptr.pointer->value} + {*raw_weak.ptr.pointer} - ptr.pointer->value - ptr.pointer->strong - ptr.pointer->weak + *raw_weak.ptr.pointer + ((alloc::raw_rc::RefCounts *)raw_weak.ptr.pointer - 1)->strong + ((alloc::raw_rc::RefCounts *)raw_weak.ptr.pointer - 1)->weak - ptr.pointer.pointer->strong - ptr.pointer.pointer->weak + ((alloc::raw_rc::RefCounts *)raw_weak.ptr.pointer.pointer - 1)->strong + ((alloc::raw_rc::RefCounts *)raw_weak.ptr.pointer.pointer - 1)->weak - {{ len={ptr.pointer.length} }} + {{ len={raw_weak.ptr.pointer.length} }} - ptr.pointer.length - ptr.pointer.data_ptr->strong - ptr.pointer.data_ptr->weak + raw_weak.ptr.pointer.length + ((alloc::raw_rc::RefCounts *)raw_weak.ptr.pointer.data_ptr - 1)->strong + ((alloc::raw_rc::RefCounts *)raw_weak.ptr.pointer.data_ptr - 1)->weak - ptr.pointer.length - ($T1*)(((size_t*)ptr.pointer.data_ptr) + 2) + raw_weak.ptr.pointer.length + ($T1*)raw_weak.ptr.pointer.data_ptr - {ptr.pointer->data} + {*raw_rc.weak.ptr.pointer} - ptr.pointer->data - ptr.pointer->strong - ptr.pointer->weak + *raw_rc.weak.ptr.pointer + ((alloc::raw_rc::RefCounts *)raw_rc.weak.ptr.pointer - 1)->strong + ((alloc::raw_rc::RefCounts *)raw_rc.weak.ptr.pointer - 1)->weak - ptr.pointer.pointer->strong - ptr.pointer.pointer->weak + ((alloc::raw_rc::RefCounts *)raw_rc.weak.ptr.pointer.pointer - 1)->strong + ((alloc::raw_rc::RefCounts *)raw_rc.weak.ptr.pointer.pointer - 1)->weak - {{ len={ptr.pointer.length} }} + {{ len={raw_rc.weak.ptr.pointer.length} }} - ptr.pointer.length - ptr.pointer.data_ptr->strong - ptr.pointer.data_ptr->weak + raw_rc.weak.ptr.pointer.length + ((alloc::raw_rc::RefCounts *)raw_rc.weak.ptr.pointer.data_ptr - 1)->strong + ((alloc::raw_rc::RefCounts *)raw_rc.weak.ptr.pointer.data_ptr - 1)->weak - ptr.pointer.length - ($T1*)(((size_t*)ptr.pointer.data_ptr) + 2) + raw_rc.weak.ptr.pointer.length + ($T1*)raw_rc.weak.ptr.pointer.data_ptr - {ptr.pointer->data} + {*raw_weak.ptr.pointer} - ptr.pointer->data - ptr.pointer->strong - ptr.pointer->weak + *raw_weak.ptr.pointer + ((alloc::raw_rc::RefCounts *)raw_weak.ptr.pointer - 1)->strong + ((alloc::raw_rc::RefCounts *)raw_weak.ptr.pointer - 1)->weak - ptr.pointer.pointer->strong - ptr.pointer.pointer->weak + ((alloc::raw_rc::RefCounts *)raw_weak.ptr.pointer.pointer - 1)->strong + ((alloc::raw_rc::RefCounts *)raw_weak.ptr.pointer.pointer - 1)->weak - {{ len={ptr.pointer.length} }} + {{ len={raw_weak.ptr.pointer.length} }} - ptr.pointer.length - ptr.pointer.data_ptr->strong - ptr.pointer.data_ptr->weak + raw_weak.ptr.pointer.length + ((alloc::raw_rc::RefCounts *)raw_weak.ptr.pointer.data_ptr - 1)->strong + ((alloc::raw_rc::RefCounts *)raw_weak.ptr.pointer.data_ptr - 1)->weak - ptr.pointer.length - ($T1*)(((size_t*)ptr.pointer.data_ptr) + 2) + raw_weak.ptr.pointer.length + ($T1*)raw_weak.ptr.pointer.data_ptr diff --git a/src/tools/miri/tests/fail/memleak_rc.stderr b/src/tools/miri/tests/fail/memleak_rc.stderr index df12eeed6ac64..2e8492867b3a4 100644 --- a/src/tools/miri/tests/fail/memleak_rc.stderr +++ b/src/tools/miri/tests/fail/memleak_rc.stderr @@ -1,10 +1,14 @@ error: memory leaked: ALLOC (Rust heap, SIZE, ALIGN), allocated here: - --> RUSTLIB/alloc/src/rc.rs:LL:CC + --> RUSTLIB/alloc/src/raw_rc.rs:LL:CC | -LL | Box::leak(Box::new(RcInner { strong: Cell::new(1), weak: Cell::new(1), value })) - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +LL | alloc.allocate(rc_layout.allocation_layout), + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ | = note: BACKTRACE: + = note: inside `alloc::raw_rc::allocate_uninit_for_rc::` at RUSTLIB/alloc/src/raw_rc.rs:LL:CC + = note: inside `alloc::raw_rc::RawWeak::>, std::alloc::Global>::new_uninit_in::` at RUSTLIB/alloc/src/raw_rc.rs:LL:CC + = note: inside `alloc::raw_rc::RawWeak::>, std::alloc::Global>::new_uninit::` at RUSTLIB/alloc/src/raw_rc.rs:LL:CC + = note: inside `alloc::raw_rc::RawRc::>, std::alloc::Global>::new` at RUSTLIB/alloc/src/raw_rc.rs:LL:CC = note: inside `std::rc::Rc::>>::new` at RUSTLIB/alloc/src/rc.rs:LL:CC note: inside `main` --> tests/fail/memleak_rc.rs:LL:CC diff --git a/tests/codegen/issues/issue-111603.rs b/tests/codegen/issues/issue-111603.rs index 41bfb493ff580..e09af6f11081b 100644 --- a/tests/codegen/issues/issue-111603.rs +++ b/tests/codegen/issues/issue-111603.rs @@ -20,8 +20,7 @@ pub fn new_from_array(x: u64) -> Arc<[u64]> { // CHECK-LABEL: @new_uninit #[no_mangle] pub fn new_uninit(x: u64) -> Arc<[u64; 1000]> { - // CHECK: call alloc::sync::arcinner_layout_for_value_layout - // CHECK-NOT: call alloc::sync::arcinner_layout_for_value_layout + // CHECK-NOT: call {{?}}::from_value_layout let mut arc = Arc::new_uninit(); unsafe { Arc::get_mut_unchecked(&mut arc) }.write([x; 1000]); unsafe { arc.assume_init() } @@ -30,8 +29,8 @@ pub fn new_uninit(x: u64) -> Arc<[u64; 1000]> { // CHECK-LABEL: @new_uninit_slice #[no_mangle] pub fn new_uninit_slice(x: u64) -> Arc<[u64]> { - // CHECK: call alloc::sync::arcinner_layout_for_value_layout - // CHECK-NOT: call alloc::sync::arcinner_layout_for_value_layout + // CHECK: call {{?}}::from_value_layout + // CHECK-NOT: call {{?}}::from_value_layout let mut arc = Arc::new_uninit_slice(1000); for elem in unsafe { Arc::get_mut_unchecked(&mut arc) } { elem.write(x); diff --git a/tests/codegen/placement-new.rs b/tests/codegen/placement-new.rs index 0ec2b6a6f20e7..a437164a926f0 100644 --- a/tests/codegen/placement-new.rs +++ b/tests/codegen/placement-new.rs @@ -22,9 +22,11 @@ pub fn box_default_inplace() -> Box<(String, String)> { #[no_mangle] pub fn rc_default_inplace() -> Rc<(String, String)> { // CHECK-NOT: alloca - // CHECK: [[RC:%.*]] = {{.*}}call {{.*}}__rust_alloc( + // CHECK: [[RC:%.*]] = {{.*}}call {{.*}}__rust_alloc(i[[#BITS:]] // CHECK-NOT: call void @llvm.memcpy - // CHECK: ret ptr [[RC]] + // CHECK: [[DATA:%.*]] = getelementptr inbounds i8, ptr [[RC]], i[[#BITS]] [[#div(BITS,4)]] + // CHECK-NOT: call void @llvm.memcpy + // CHECK: ret ptr [[DATA]] Rc::default() } @@ -32,8 +34,10 @@ pub fn rc_default_inplace() -> Rc<(String, String)> { #[no_mangle] pub fn arc_default_inplace() -> Arc<(String, String)> { // CHECK-NOT: alloca - // CHECK: [[ARC:%.*]] = {{.*}}call {{.*}}__rust_alloc( + // CHECK: [[RC:%.*]] = {{.*}}call {{.*}}__rust_alloc(i[[#BITS:]] + // CHECK-NOT: call void @llvm.memcpy + // CHECK: [[DATA:%.*]] = getelementptr inbounds i8, ptr [[RC]], i[[#BITS]] [[#div(BITS,4)]] // CHECK-NOT: call void @llvm.memcpy - // CHECK: ret ptr [[ARC]] + // CHECK: ret ptr [[DATA]] Arc::default() } diff --git a/tests/debuginfo/rc_arc.rs b/tests/debuginfo/rc_arc.rs index f636c60702cde..8d4cd8cdceb72 100644 --- a/tests/debuginfo/rc_arc.rs +++ b/tests/debuginfo/rc_arc.rs @@ -19,7 +19,7 @@ // lldb-command:v rc // lldb-check:[...] strong=11, weak=1 { value = 111 } // lldb-command:v arc -// lldb-check:[...] strong=21, weak=1 { data = 222 } +// lldb-check:[...] strong=21, weak=1 { value = 222 } // === CDB TESTS ================================================================================== @@ -27,39 +27,39 @@ // cdb-command:dx rc,d // cdb-check:rc,d : 111 [Type: alloc::rc::Rc] -// cdb-check: [Reference count] : 11 [Type: core::cell::Cell] -// cdb-check: [Weak reference count] : 2 [Type: core::cell::Cell] +// cdb-check: [Reference count] : 11 [Type: core::cell::UnsafeCell] +// cdb-check: [Weak reference count] : 2 [Type: core::cell::UnsafeCell] // cdb-command:dx weak_rc,d // cdb-check:weak_rc,d : 111 [Type: alloc::rc::Weak] -// cdb-check: [Reference count] : 11 [Type: core::cell::Cell] -// cdb-check: [Weak reference count] : 2 [Type: core::cell::Cell] +// cdb-check: [Reference count] : 11 [Type: core::cell::UnsafeCell] +// cdb-check: [Weak reference count] : 2 [Type: core::cell::UnsafeCell] // cdb-command:dx arc,d // cdb-check:arc,d : 222 [Type: alloc::sync::Arc] -// cdb-check: [Reference count] : 21 [Type: core::sync::atomic::AtomicUsize] -// cdb-check: [Weak reference count] : 2 [Type: core::sync::atomic::AtomicUsize] +// cdb-check: [Reference count] : 21 [Type: core::cell::UnsafeCell] +// cdb-check: [Weak reference count] : 2 [Type: core::cell::UnsafeCell] // cdb-command:dx weak_arc,d // cdb-check:weak_arc,d : 222 [Type: alloc::sync::Weak] -// cdb-check: [Reference count] : 21 [Type: core::sync::atomic::AtomicUsize] -// cdb-check: [Weak reference count] : 2 [Type: core::sync::atomic::AtomicUsize] +// cdb-check: [Reference count] : 21 [Type: core::cell::UnsafeCell] +// cdb-check: [Weak reference count] : 2 [Type: core::cell::UnsafeCell] // cdb-command:dx dyn_rc,d // cdb-check:dyn_rc,d [Type: alloc::rc::Rc,alloc::alloc::Global>] -// cdb-check: [Reference count] : 31 [Type: core::cell::Cell] -// cdb-check: [Weak reference count] : 2 [Type: core::cell::Cell] +// cdb-check: [Reference count] : 31 [Type: core::cell::UnsafeCell] +// cdb-check: [Weak reference count] : 2 [Type: core::cell::UnsafeCell] // cdb-command:dx dyn_rc_weak,d // cdb-check:dyn_rc_weak,d [Type: alloc::rc::Weak,alloc::alloc::Global>] -// cdb-check: [Reference count] : 31 [Type: core::cell::Cell] -// cdb-check: [Weak reference count] : 2 [Type: core::cell::Cell] +// cdb-check: [Reference count] : 31 [Type: core::cell::UnsafeCell] +// cdb-check: [Weak reference count] : 2 [Type: core::cell::UnsafeCell] // cdb-command:dx slice_rc,d // cdb-check:slice_rc,d : { len=3 } [Type: alloc::rc::Rc,alloc::alloc::Global>] // cdb-check: [Length] : 3 [Type: [...]] -// cdb-check: [Reference count] : 41 [Type: core::cell::Cell] -// cdb-check: [Weak reference count] : 2 [Type: core::cell::Cell] +// cdb-check: [Reference count] : 41 [Type: core::cell::UnsafeCell] +// cdb-check: [Weak reference count] : 2 [Type: core::cell::UnsafeCell] // cdb-check: [0] : 1 [Type: u32] // cdb-check: [1] : 2 [Type: u32] // cdb-check: [2] : 3 [Type: u32] @@ -67,27 +67,27 @@ // cdb-command:dx slice_rc_weak,d // cdb-check:slice_rc_weak,d : { len=3 } [Type: alloc::rc::Weak,alloc::alloc::Global>] // cdb-check: [Length] : 3 [Type: [...]] -// cdb-check: [Reference count] : 41 [Type: core::cell::Cell] -// cdb-check: [Weak reference count] : 2 [Type: core::cell::Cell] +// cdb-check: [Reference count] : 41 [Type: core::cell::UnsafeCell] +// cdb-check: [Weak reference count] : 2 [Type: core::cell::UnsafeCell] // cdb-check: [0] : 1 [Type: u32] // cdb-check: [1] : 2 [Type: u32] // cdb-check: [2] : 3 [Type: u32] // cdb-command:dx dyn_arc,d // cdb-check:dyn_arc,d [Type: alloc::sync::Arc,alloc::alloc::Global>] -// cdb-check: [Reference count] : 51 [Type: core::sync::atomic::AtomicUsize] -// cdb-check: [Weak reference count] : 2 [Type: core::sync::atomic::AtomicUsize] +// cdb-check: [Reference count] : 51 [Type: core::cell::UnsafeCell] +// cdb-check: [Weak reference count] : 2 [Type: core::cell::UnsafeCell] // cdb-command:dx dyn_arc_weak,d // cdb-check:dyn_arc_weak,d [Type: alloc::sync::Weak,alloc::alloc::Global>] -// cdb-check: [Reference count] : 51 [Type: core::sync::atomic::AtomicUsize] -// cdb-check: [Weak reference count] : 2 [Type: core::sync::atomic::AtomicUsize] +// cdb-check: [Reference count] : 51 [Type: core::cell::UnsafeCell] +// cdb-check: [Weak reference count] : 2 [Type: core::cell::UnsafeCell] // cdb-command:dx slice_arc,d // cdb-check:slice_arc,d : { len=3 } [Type: alloc::sync::Arc,alloc::alloc::Global>] // cdb-check: [Length] : 3 [Type: [...]] -// cdb-check: [Reference count] : 61 [Type: core::sync::atomic::AtomicUsize] -// cdb-check: [Weak reference count] : 2 [Type: core::sync::atomic::AtomicUsize] +// cdb-check: [Reference count] : 61 [Type: core::cell::UnsafeCell] +// cdb-check: [Weak reference count] : 2 [Type: core::cell::UnsafeCell] // cdb-check: [0] : 4 [Type: u32] // cdb-check: [1] : 5 [Type: u32] // cdb-check: [2] : 6 [Type: u32] @@ -95,8 +95,8 @@ // cdb-command:dx slice_arc_weak,d // cdb-check:slice_arc_weak,d : { len=3 } [Type: alloc::sync::Weak,alloc::alloc::Global>] // cdb-check: [Length] : 3 [Type: [...]] -// cdb-check: [Reference count] : 61 [Type: core::sync::atomic::AtomicUsize] -// cdb-check: [Weak reference count] : 2 [Type: core::sync::atomic::AtomicUsize] +// cdb-check: [Reference count] : 61 [Type: core::cell::UnsafeCell] +// cdb-check: [Weak reference count] : 2 [Type: core::cell::UnsafeCell] // cdb-check: [0] : 4 [Type: u32] // cdb-check: [1] : 5 [Type: u32] // cdb-check: [2] : 6 [Type: u32] diff --git a/tests/debuginfo/strings-and-strs.rs b/tests/debuginfo/strings-and-strs.rs index 3d6589db34b88..264e61b4ff6fb 100644 --- a/tests/debuginfo/strings-and-strs.rs +++ b/tests/debuginfo/strings-and-strs.rs @@ -19,7 +19,7 @@ // gdb-check:$4 = ("Hello", "World") // gdb-command:print str_in_rc -// gdb-check:$5 = alloc::rc::Rc<&str, alloc::alloc::Global> {ptr: core::ptr::non_null::NonNull> {pointer: 0x[...]}, phantom: core::marker::PhantomData>, alloc: alloc::alloc::Global} +// gdb-check:$5 = alloc::rc::Rc<&str, alloc::alloc::Global> {raw_rc: alloc::raw_rc::RawRc<&str, alloc::alloc::Global> {weak: alloc::raw_rc::RawWeak<&str, alloc::alloc::Global> {ptr: core::ptr::non_null::NonNull<&str> {pointer: 0x[...]}, alloc: alloc::alloc::Global}, _phantom_data: core::marker::PhantomData<&str>}} // === LLDB TESTS ================================================================================== // lldb-command:run @@ -38,7 +38,6 @@ // lldb-command:v str_in_rc // lldb-check:(alloc::rc::Rc<&str, alloc::alloc::Global>) str_in_rc = strong=1, weak=0 { value = "Hello" { [0] = 'H' [1] = 'e' [2] = 'l' [3] = 'l' [4] = 'o' } } - #![allow(unused_variables)] #![feature(omit_gdb_pretty_printer_section)] #![omit_gdb_pretty_printer_section] From e37e38b8373efddb07f720e9047a7cb8f8ef25bd Mon Sep 17 00:00:00 2001 From: EFanZh Date: Tue, 7 Jan 2025 02:14:34 +0800 Subject: [PATCH 2/2] Adding docs --- library/alloc/src/raw_rc.rs | 312 ++++++++++++++++++++++++++---------- library/alloc/src/rc.rs | 21 +-- library/alloc/src/sync.rs | 27 ++-- 3 files changed, 251 insertions(+), 109 deletions(-) diff --git a/library/alloc/src/raw_rc.rs b/library/alloc/src/raw_rc.rs index 3d3cdce64a79b..c378f66822048 100644 --- a/library/alloc/src/raw_rc.rs +++ b/library/alloc/src/raw_rc.rs @@ -1,3 +1,24 @@ +//! Base implementation for `rc::{Rc, Weak}` and `sync::{Arc, Weak}`. +//! +//! The memory layout of an reference counted allocation is designed so that the reference counts +//! has fixed offsets to the value. In this way, many operations on reference counted pointers can +//! share the same code in order to reduce binary size. +//! +//! This is done by using a layout like the following structure: +//! +//! ```ignore (illustrative) +//! #[repr(C)] +//! struct RcAllocation { +//! padding: MaybeUninit<[u8; const { align_of::().saturating_sub(size_of::()) }]>, +//! ref_counts: RefCounts, +//! value: T, +//! } +//! ``` +//! +//! In this way, for all `T`: +//! `mem::offset_of!(RcAllocation, value) - mem::offset_of!(RcAllocation, ref_counts)` +//! always equal to `size_of::()`. + use core::alloc::{AllocError, Allocator, Layout, LayoutError}; use core::any::Any; use core::cell::UnsafeCell; @@ -27,55 +48,124 @@ use crate::string::String; #[cfg(not(no_global_oom_handling))] use crate::vec::Vec; -pub trait RcOps { +/// A trait for `rc` and `sync` module to inject their concrete implementations of reference +/// count operations. +pub unsafe trait RcOps: Sized { + /// Increment strong or weak reference pointers. Used by `{RawRc,RawWeak}::clone`. + /// + /// # Safety + /// + /// - `count` should only be handled by the same `RcOps` implementation. + /// - The value of `count` should be non-zero. unsafe fn increment_ref_count(count: &UnsafeCell); + + /// Decrement strong or weak reference pointers, returns whether the reference count becomes + /// zero after decrementing. Used by `{RawRc,RawWeak}::drop`. + /// + /// # Safety + /// + /// - `count` should only be handled by the same `RcOps` implementation. + /// - The value of `count` should be non-zero. unsafe fn decrement_ref_count(count: &UnsafeCell) -> bool; + /// Increment `strong_count` if and only if `strong_count` is non-zero, returns whether + /// incrementing is performed. Used by `RawWeak::upgrade`. + /// + /// # Safety + /// + /// - `strong_count` should only be handled by the same `RcOps` implementation. unsafe fn upgrade(strong_count: &UnsafeCell) -> bool; + + /// Increment `weak_count`. This is required instead of `increment_ref_count` because `Arc` + /// requires additional synchronization with `is_unique`. + /// + /// # Safety + /// + /// - `weak_count` should only be handled by the same `RcOps` implementation. + /// - Caller should provide a `weak_count` value from a `RawRc` object. unsafe fn downgrade(weak_count: &UnsafeCell); + /// Decrement `strong_count` if and only if `strong_count` is 1, returns true if decrementing + /// is performed. Used by `RawRc::try_unwrap`. + /// + /// # Safety + /// + /// - `strong_count` should only be handled by the same `RcOps` implementation. unsafe fn lock_strong_count(strong_count: &UnsafeCell) -> bool; + + /// Set `strong_count` to 1. + /// + /// # Safety + /// + /// - `strong_count` should only be handled by the same `RcOps` implementation. + /// - `strong_count` is 0. unsafe fn unlock_strong_count(strong_count: &UnsafeCell); + /// Returns whether both `strong_count` are 1 and `weak_count` is 1. Used by `RawRc::get_mut`. + /// + /// # Safety + /// + /// - Both `strong_count` and `weak_count` should only be handled by the same `RcOps` + /// implementation. unsafe fn is_unique(strong_count: &UnsafeCell, weak_count: &UnsafeCell) -> bool; + /// Makes `rc` the sole owner of a value by: + /// + /// - If both strong count and weak count are 1, nothing will be done because caller is + /// already the sole owner of the value. + /// - If strong count is 1 and weak count is greater than 1, implementor will first + /// decrement both strong count and weak count, then `MakeMut::by_move` be called + /// to notify the caller moving is needed in order to make caller the sole owner. + /// - If strong count is greater than 1, `Make::by_clone` will be called to notify the caller + /// cloning is needed in order to make caller the sole owner. + /// + /// # Safety + /// + /// - The reference counts in `MakeMut` should only be handled by the same `RcOps` + /// implementation. #[cfg(not(no_global_oom_handling))] - unsafe fn make_unique(rc: &mut RawRc, by_clone: F, by_move: G) + unsafe fn make_mut(make_mut: MakeMut<'_, T, A, Self>) where - T: ?Sized, - F: FnOnce(&mut RawRc), - G: FnOnce(&mut RawRc); + T: CloneToUninit + ?Sized, + A: Allocator; } +/// Stores the strong and weak reference counts to a shared value. pub struct RefCounts { + /// Weak reference count (plus one if there are non-zero strong reference count). pub weak: UnsafeCell, + /// Strong reference count. pub strong: UnsafeCell, } impl RefCounts { + /// Creates a `RefCounts` with weak count of `1` and strong count of `strong_count`. pub const fn new(strong_cont: usize) -> Self { Self { weak: UnsafeCell::new(1), strong: UnsafeCell::new(strong_cont) } } } -const _: () = assert!(RefCounts::LAYOUT.size().is_power_of_two()); - +/// Describes the allocation of a reference counted value. struct RcLayout { + /// The layout of the allocation. allocation_layout: Layout, - allocation_offset_bytes: usize, + /// The offset of the value from beginning of the allocation. + value_offset_bytes: usize, } impl RcLayout { const fn from_value_layout(value_layout: Layout) -> Result { match RefCounts::LAYOUT.extend(value_layout) { - Ok((unaligned_allocation_layout, allocation_offset_bytes)) => Ok(Self { - allocation_layout: unaligned_allocation_layout.pad_to_align(), - allocation_offset_bytes, - }), + Ok((unaligned_allocation_layout, value_offset_bytes)) => { + Ok(Self { allocation_layout: unaligned_allocation_layout, value_offset_bytes }) + } Err(error) => Err(error), } } + /// # Safety + /// + /// - `RcLayout::from(value_layout)` must return `Ok(...)`. const unsafe fn from_value_layout_unchecked(value_layout: Layout) -> Self { match Self::from_value_layout(value_layout) { Ok(rc_layout) => rc_layout, @@ -91,6 +181,10 @@ impl RcLayout { Self::from_value_layout(Layout::for_value(value_ref)) } + /// # Safety + /// + /// - `value_ptr` points to a value that is contained in a reference counted allocation. + /// - `value_ptr` contains correct metadata for the memory layout of `T`. const unsafe fn from_value_ptr_unchecked(value_ptr: NonNull) -> Self where T: ?Sized, @@ -147,7 +241,7 @@ unsafe fn init_rc_allocation( rc_layout: &RcLayout, ) -> NonNull<()> { let allocation_ptr = allocation_ptr.cast::<()>(); - let value_ptr = unsafe { allocation_ptr.byte_add(rc_layout.allocation_offset_bytes) }; + let value_ptr = unsafe { allocation_ptr.byte_add(rc_layout.value_offset_bytes) }; let ref_counts = const { RefCounts::new(STRONG_COUNT) }; unsafe { ref_counts_ptr_from_value_ptr(value_ptr).write(ref_counts) }; @@ -239,6 +333,8 @@ where } } +/// Allocate a memory block for storing a reference counted value according to `rc_layout` and +/// initialize the value with `f`. If `f` panics, the allocated memory will be deallocated. #[cfg(not(no_global_oom_handling))] fn allocate_for_rc_with( alloc: &A, @@ -277,6 +373,10 @@ where ptr } +/// # Safety +/// +/// - Memory starts with `ptr` is valid to read `size` bytes. +/// - `rc_layout` has enough space for storing a value of `size` bytes. #[cfg(not(no_global_oom_handling))] unsafe fn allocate_for_rc_with_bytes( alloc: &A, @@ -341,12 +441,16 @@ where { unsafe { alloc.deallocate( - ptr.cast().byte_sub(rc_layout.allocation_offset_bytes), + ptr.cast().byte_sub(rc_layout.value_offset_bytes), rc_layout.allocation_layout, ); } } +fn is_dangling(ptr: NonNull<()>) -> bool { + ptr.addr() == NonZeroUsize::MAX +} + struct GuardedWeak<'a, T, A, R> where T: ?Sized, @@ -411,6 +515,69 @@ where } } +#[cfg(not(no_global_oom_handling))] +pub struct MakeMut<'a, T, A, R> +where + T: ?Sized, +{ + rc: &'a mut RawRc, + _phantom_data: PhantomData, +} + +#[cfg(not(no_global_oom_handling))] +impl<'a, T, A, R> MakeMut<'a, T, A, R> +where + T: ?Sized, +{ + pub fn ref_counts(&self) -> &RefCounts { + self.rc.ref_counts() + } + + /// # Safety + /// + /// - strong count in ref counts has been set to 0. + pub unsafe fn by_move(self) + where + A: Allocator, + R: RcOps, + { + let (ptr_ref, alloc) = self.rc.borrow_raw_parts(); + let old_ptr = *ptr_ref; + + unsafe { + let mut weak = RawWeak::from_raw_parts(old_ptr, &*alloc); + let guard = GuardedWeak::::new(&mut weak); + let new_ptr = allocate_for_rc_with_value_unchecked::(alloc, old_ptr.as_ref()); + + *ptr_ref = new_ptr; + + drop(guard); + } + } + + pub fn by_clone(self) + where + T: CloneToUninit, + A: Allocator, + R: RcOps, + { + let (ptr_ref, alloc) = self.rc.borrow_raw_parts(); + let old_ptr = *ptr_ref; + + unsafe { + let rc_layout = RcLayout::from_value_ptr_unchecked(old_ptr); + + let new_ptr = allocate_for_rc_with::(alloc, &rc_layout, |new_ptr| { + T::clone_to_uninit(old_ptr.as_ref(), new_ptr.as_ptr().cast()); + }); + + *ptr_ref = NonNull::new_unchecked(new_ptr.as_ptr().with_metadata_of(old_ptr.as_ptr())); + + RawRc::from_raw_parts(old_ptr, &*alloc).drop::(); + } + } +} + pub struct RawWeak where T: ?Sized, @@ -480,20 +647,23 @@ where A: Clone, R: RcOps, { - unsafe { - if !self.is_dangling() { - R::increment_ref_count(self.weak_count_unchecked()); + unsafe fn inner(ptr: NonNull<()>, alloc: &A) -> A + where + A: Clone, + R: RcOps, + { + if !is_dangling(ptr) { + unsafe { R::increment_ref_count(weak_count_ptr_from_value_ptr(ptr).as_ref()) }; } - self.clone_without_increment_weak_count() + alloc.clone() } - } - unsafe fn clone_without_increment_weak_count(&self) -> Self - where - A: Clone, - { - unsafe { Self::from_raw_parts(self.ptr, self.alloc.clone()) } + unsafe { + let alloc = inner::(self.ptr.cast(), &self.alloc); + + Self::from_raw_parts(self.ptr, alloc) + } } pub unsafe fn drop(&mut self) @@ -501,7 +671,7 @@ where A: Allocator, R: RcOps, { - if !self.is_dangling() { + if !is_dangling(self.ptr.cast()) { unsafe { self.drop_unchecked::() }; } } @@ -532,10 +702,6 @@ where (self.ptr, self.alloc) } - pub fn is_dangling(&self) -> bool { - self.ptr.addr() == NonZeroUsize::MAX - } - pub fn ptr_eq(&self, other: &Self) -> bool { ptr::addr_eq(self.ptr.as_ptr(), other.ptr.as_ptr()) } @@ -546,16 +712,15 @@ where #[cfg(not(no_sync))] pub fn ref_counts(&self) -> Option<&RefCounts> { - (!self.is_dangling()).then(|| unsafe { self.ref_counts_unchecked() }) + (!is_dangling(self.ptr.cast())).then(|| unsafe { self.ref_counts_unchecked() }) } - #[cfg(not(no_sync))] unsafe fn ref_counts_unchecked(&self) -> &RefCounts { unsafe { ref_counts_ptr_from_value_ptr(self.ptr.cast()).as_ref() } } pub fn strong_count(&self) -> Option<&UnsafeCell> { - (!self.is_dangling()).then(|| unsafe { self.strong_count_unchecked() }) + (!is_dangling(self.ptr.cast())).then(|| unsafe { self.strong_count_unchecked() }) } unsafe fn strong_count_unchecked(&self) -> &UnsafeCell { @@ -563,7 +728,7 @@ where } pub fn weak_count(&self) -> Option<&UnsafeCell> { - (!self.is_dangling()).then(|| unsafe { self.weak_count_unchecked() }) + (!is_dangling(self.ptr.cast())).then(|| unsafe { self.weak_count_unchecked() }) } unsafe fn weak_count_unchecked(&self) -> &UnsafeCell { @@ -575,7 +740,7 @@ where A: Clone, R: RcOps, { - if self.is_dangling() { None } else { unsafe { self.upgrade_unchecked::() } } + if is_dangling(self.ptr.cast()) { None } else { unsafe { self.upgrade_unchecked::() } } } unsafe fn upgrade_unchecked(&self) -> Option> @@ -830,10 +995,21 @@ where A: Clone, R: RcOps, { + unsafe fn inner(ptr: NonNull<()>, alloc: &A) -> A + where + A: Clone, + R: RcOps, + { + unsafe { R::increment_ref_count(strong_count_ptr_from_value_ptr(ptr).as_ref()) }; + + alloc.clone() + } + unsafe { - R::increment_ref_count(self.strong_count()); + let ptr = self.as_ptr(); + let alloc = inner::(ptr.cast(), self.allocator()); - Self::from_weak(self.weak.clone_without_increment_weak_count()) + Self::from_raw_parts(ptr, alloc) } } @@ -860,10 +1036,21 @@ where A: Clone, R: RcOps, { + unsafe fn inner(ptr: NonNull<()>, alloc: &A) -> A + where + A: Clone, + R: RcOps, + { + unsafe { R::downgrade(weak_count_ptr_from_value_ptr(ptr).as_ref()) }; + + alloc.clone() + } + unsafe { - R::downgrade(self.weak_count()); + let ptr = self.as_ptr(); + let alloc = inner::(ptr.cast(), self.allocator()); - self.weak.clone_without_increment_weak_count() + RawWeak::from_raw_parts(ptr, alloc) } } @@ -909,47 +1096,6 @@ where self.weak.into_raw_parts() } - #[cfg(not(no_global_oom_handling))] - unsafe fn make_unique_by_clone(&mut self) - where - T: CloneToUninit, - A: Allocator, - R: RcOps, - { - let (ptr_ref, alloc) = self.borrow_raw_parts(); - let old_ptr = *ptr_ref; - - unsafe { - let rc_layout = RcLayout::from_value_ptr_unchecked(old_ptr); - - let new_ptr = allocate_for_rc_with::(alloc, &rc_layout, |new_ptr| { - T::clone_to_uninit(old_ptr.as_ref(), new_ptr.as_ptr().cast()); - }); - - *ptr_ref = NonNull::new_unchecked(new_ptr.as_ptr().with_metadata_of(old_ptr.as_ptr())); - - RawRc::from_raw_parts(old_ptr, &*alloc).drop::(); - } - } - - #[cfg(not(no_global_oom_handling))] - unsafe fn make_unique_by_move(&mut self) - where - A: Allocator, - R: RcOps, - { - let (ptr_ref, alloc) = self.borrow_raw_parts(); - let old_ptr = *ptr_ref; - - unsafe { - let new_ptr = allocate_for_rc_with_value_unchecked::(alloc, old_ptr.as_ref()); - - *ptr_ref = new_ptr; - - RawWeak::from_raw_parts(old_ptr, &*alloc).drop_unchecked::(); - } - } - #[cfg(not(no_global_oom_handling))] pub unsafe fn make_mut(&mut self) -> &mut T where @@ -958,11 +1104,7 @@ where R: RcOps, { unsafe { - R::make_unique( - self, - |this| this.make_unique_by_clone::(), - |this| this.make_unique_by_move::(), - ); + R::make_mut(MakeMut { rc: self, _phantom_data: PhantomData }); self.get_mut_unchecked() } @@ -976,7 +1118,7 @@ where RawWeak::ptr_ne(&self.weak, &other.weak) } - #[cfg(all(not(no_global_oom_handling), not(no_sync)))] + #[cfg(not(no_global_oom_handling))] pub fn ref_counts(&self) -> &RefCounts { unsafe { self.weak.ref_counts_unchecked() } } diff --git a/library/alloc/src/rc.rs b/library/alloc/src/rc.rs index b0d6711233028..701a2262571f7 100644 --- a/library/alloc/src/rc.rs +++ b/library/alloc/src/rc.rs @@ -261,6 +261,8 @@ use crate::alloc::{AllocError, Allocator, Global}; use crate::borrow::{Cow, ToOwned}; #[cfg(not(no_global_oom_handling))] use crate::boxed::Box; +#[cfg(not(no_global_oom_handling))] +use crate::raw_rc::MakeMut; use crate::raw_rc::{self, RawRc, RawUniqueRc, RawWeak}; #[cfg(not(no_global_oom_handling))] use crate::string::String; @@ -281,7 +283,7 @@ where enum RcOps {} -impl raw_rc::RcOps for RcOps { +unsafe impl raw_rc::RcOps for RcOps { unsafe fn increment_ref_count(count: &UnsafeCell) { let count = unsafe { &mut *count.get() }; let strong = *count; @@ -349,22 +351,23 @@ impl raw_rc::RcOps for RcOps { } #[cfg(not(no_global_oom_handling))] - unsafe fn make_unique(rc: &mut RawRc, by_clone: F, by_move: G) + unsafe fn make_mut(make_mut: MakeMut<'_, T, A, Self>) where - T: ?Sized, - F: FnOnce(&mut RawRc), - G: FnOnce(&mut RawRc), + T: CloneToUninit + ?Sized, + A: Allocator, { - let strong_count = unsafe { &mut *rc.strong_count().get() }; + let ref_counts = make_mut.ref_counts(); + let strong_count = unsafe { &mut *ref_counts.strong.get() }; if *strong_count == 1 { - if unsafe { *rc.weak_count().get() } != 1 { + if unsafe { *ref_counts.weak.get() } != 1 { *strong_count = 0; - by_move(rc); + // SAFETY: We have set strong count to 0. + unsafe { make_mut.by_move() }; } } else { - by_clone(rc); + make_mut.by_clone(); } } } diff --git a/library/alloc/src/sync.rs b/library/alloc/src/sync.rs index b51c7d009b9fd..7073686ebebe5 100644 --- a/library/alloc/src/sync.rs +++ b/library/alloc/src/sync.rs @@ -26,13 +26,13 @@ use core::sync::atomic::Ordering::{Acquire, Relaxed, Release}; use core::sync::atomic::{self, AtomicUsize}; use core::{borrow, fmt, hint, intrinsics}; -#[cfg(not(no_global_oom_handling))] -use crate::alloc::Layout; use crate::alloc::{AllocError, Allocator, Global}; use crate::borrow::{Cow, ToOwned}; #[cfg(not(no_global_oom_handling))] use crate::boxed::Box; #[cfg(not(no_global_oom_handling))] +use crate::raw_rc::MakeMut; +#[cfg(not(no_global_oom_handling))] use crate::raw_rc::RefCounts; use crate::raw_rc::{self, RawRc, RawWeak}; #[cfg(not(no_global_oom_handling))] @@ -82,7 +82,7 @@ macro_rules! acquire { enum RcOps {} -impl raw_rc::RcOps for RcOps { +unsafe impl raw_rc::RcOps for RcOps { unsafe fn increment_ref_count(count: &UnsafeCell) { let count = unsafe { AtomicUsize::from_ptr(count.get()) }; @@ -236,13 +236,12 @@ impl raw_rc::RcOps for RcOps { } #[cfg(not(no_global_oom_handling))] - unsafe fn make_unique(rc: &mut raw_rc::RawRc, by_clone: F, by_move: G) + unsafe fn make_mut(make_mut: MakeMut<'_, T, A, Self>) where - T: ?Sized, - F: FnOnce(&mut raw_rc::RawRc), - G: FnOnce(&mut raw_rc::RawRc), + T: CloneToUninit + ?Sized, + A: Allocator, { - let ref_counts = rc.ref_counts(); + let ref_counts = make_mut.ref_counts(); let strong_count = unsafe { AtomicUsize::from_ptr(ref_counts.strong.get()) }; let weak_count = unsafe { AtomicUsize::from_ptr(ref_counts.weak.get()) }; @@ -272,10 +271,11 @@ impl raw_rc::RcOps for RcOps { // usize::MAX (i.e., locked), since the weak count can only be // locked by a thread with a strong reference. - by_move(rc); + // SAFETY: We have set strong count to 0. + unsafe { make_mut.by_move() }; } } else { - by_clone(rc); + make_mut.by_clone(); } } } @@ -2832,10 +2832,6 @@ impl Default for Arc { #[cfg(not(no_global_oom_handling))] const MAX_STATIC_INNER_SLICE_ALIGNMENT: usize = 16; -#[cfg(not(no_global_oom_handling))] -const STATIC_INNER_PADDING: usize = - Layout::new::().padding_needed_for(MAX_STATIC_INNER_SLICE_ALIGNMENT); - /// Struct to hold the static `ArcInner` used for empty `Arc` as /// returned by `Default::default`. /// @@ -2846,7 +2842,8 @@ const STATIC_INNER_PADDING: usize = #[cfg(not(no_global_oom_handling))] #[repr(C, align(16))] struct SliceArcInnerForStatic { - padding: MaybeUninit<[RefCounts; STATIC_INNER_PADDING / mem::size_of::()]>, + padding: + MaybeUninit<[u8; MAX_STATIC_INNER_SLICE_ALIGNMENT.saturating_sub(size_of::())]>, ref_counts: RefCounts, value: [u8; 1], }