From ce3dab7b205382658dfa821cee65e66813f3ca64 Mon Sep 17 00:00:00 2001 From: EFanZh Date: Sun, 22 Dec 2024 12:17:43 +0800 Subject: [PATCH] Refactor --- library/alloc/src/raw_rc.rs | 513 +++++++++++++++++++----------------- library/alloc/src/rc.rs | 4 +- library/alloc/src/sync.rs | 4 +- src/etc/gdb_providers.py | 10 +- 4 files changed, 279 insertions(+), 252 deletions(-) diff --git a/library/alloc/src/raw_rc.rs b/library/alloc/src/raw_rc.rs index a95c33d615ef7..c486173c717e3 100644 --- a/library/alloc/src/raw_rc.rs +++ b/library/alloc/src/raw_rc.rs @@ -57,6 +57,8 @@ impl RefCounts { } } +const _: () = assert!(RefCounts::LAYOUT.size().is_power_of_two()); + struct RcLayout { allocation_layout: Layout, allocation_offset_bytes: usize, @@ -73,6 +75,15 @@ impl RcLayout { } } + unsafe fn from_value_ptr(value_ptr: NonNull) -> Self + where + T: ?Sized, + { + unsafe { + Self::from_value_layout(Layout::for_value_raw(value_ptr.as_ptr())).unwrap_unchecked() + } + } + const fn of() -> Result { Self::from_value_layout(T::LAYOUT) } @@ -84,15 +95,6 @@ impl RcLayout { Err(error) => Err(error), } } - - unsafe fn from_value_ptr(value_ptr: NonNull) -> Self - where - T: ?Sized, - { - unsafe { - Self::from_value_layout(Layout::for_value_raw(value_ptr.as_ptr())).unwrap_unchecked() - } - } } trait RcLayoutExt { @@ -139,7 +141,7 @@ unsafe fn init_rc_allocation( value_ptr } -unsafe fn try_handle_rc_allocation( +unsafe fn try_handle_rc_allocation_result( allocation_result: Result, AllocError>, rc_layout: &RcLayout, ) -> Result, AllocError> { @@ -156,7 +158,7 @@ where A: Allocator, { unsafe { - try_handle_rc_allocation::( + try_handle_rc_allocation_result::( alloc.allocate(rc_layout.allocation_layout), rc_layout, ) @@ -171,7 +173,7 @@ where A: Allocator, { unsafe { - try_handle_rc_allocation::( + try_handle_rc_allocation_result::( alloc.allocate_zeroed(rc_layout.allocation_layout), rc_layout, ) @@ -179,7 +181,7 @@ where } #[cfg(not(no_global_oom_handling))] -unsafe fn handle_rc_allocation( +unsafe fn handle_rc_allocation_result( allocation_result: Result, AllocError>, rc_layout: &RcLayout, ) -> NonNull<()> { @@ -200,7 +202,10 @@ where A: Allocator, { unsafe { - handle_rc_allocation::(alloc.allocate(rc_layout.allocation_layout), rc_layout) + handle_rc_allocation_result::( + alloc.allocate(rc_layout.allocation_layout), + rc_layout, + ) } } @@ -213,7 +218,7 @@ where A: Allocator, { unsafe { - handle_rc_allocation::( + handle_rc_allocation_result::( alloc.allocate_zeroed(rc_layout.allocation_layout), rc_layout, ) @@ -296,17 +301,6 @@ where } } -impl AsRef for GuardedRc<'_, T, A, R> -where - T: ?Sized, - A: Allocator, - R: RcOps, -{ - fn as_ref(&self) -> &T { - self.rc.as_ref() - } -} - impl Drop for GuardedRc<'_, T, A, R> where T: ?Sized, @@ -349,23 +343,37 @@ where self.ptr } - pub unsafe fn cast_with(self, f: F) -> RawWeak + unsafe fn as_ref_unchecked(&self) -> &T { + unsafe { self.ptr.as_ref() } + } + + unsafe fn assume_init_drop(&mut self) where - U: ?Sized, - F: FnOnce(NonNull) -> NonNull, + A: Allocator, + R: RcOps, { - unsafe { RawWeak::from_raw_parts(f(self.ptr), self.alloc) } + unsafe { + let guard = GuardedWeak::::new(self); + + guard.weak.as_ptr().drop_in_place(); + }; + } + + #[cfg(not(no_global_oom_handling))] + fn borrow_raw_parts(&mut self) -> (&mut NonNull, &mut A) { + (&mut self.ptr, &mut self.alloc) } pub unsafe fn cast(self) -> RawWeak { unsafe { self.cast_with(NonNull::cast) } } - unsafe fn clone_without_inc_ref(&self) -> Self + pub unsafe fn cast_with(self, f: F) -> RawWeak where - A: Clone, + U: ?Sized, + F: FnOnce(NonNull) -> NonNull, { - unsafe { Self::from_raw_parts(self.ptr, self.alloc.clone()) } + unsafe { RawWeak::from_raw_parts(f(self.ptr), self.alloc) } } pub unsafe fn clone(&self) -> Self @@ -378,10 +386,17 @@ where R::increment_ref_count(self.weak_count_unchecked()); } - self.clone_without_inc_ref() + self.clone_without_increment_weak_count() } } + unsafe fn clone_without_increment_weak_count(&self) -> Self + where + A: Clone, + { + unsafe { Self::from_raw_parts(self.ptr, self.alloc.clone()) } + } + unsafe fn deallocate(&self) where A: Allocator, @@ -396,18 +411,6 @@ where } } - unsafe fn drop_unchecked(&mut self) - where - A: Allocator, - R: RcOps, - { - unsafe { - if R::decrement_ref_count(self.weak_count_unchecked()) { - self.deallocate(); - } - }; - } - pub unsafe fn drop(&mut self) where A: Allocator, @@ -418,29 +421,28 @@ where } } - unsafe fn assume_init_drop(&mut self) + unsafe fn drop_unchecked(&mut self) where A: Allocator, R: RcOps, { unsafe { - let guard = GuardedWeak::::new(self); - - guard.weak.as_ptr().drop_in_place(); + if R::decrement_ref_count(self.weak_count_unchecked()) { + self.deallocate(); + } }; } - pub fn into_raw_parts(self) -> (NonNull, A) { - (self.ptr, self.alloc) + unsafe fn get_mut_unchecked(&mut self) -> &mut T { + unsafe { self.ptr.as_mut() } } pub fn into_raw(self) -> NonNull { self.ptr } - #[cfg(not(no_global_oom_handling))] - fn borrow_raw_parts(&mut self) -> (&mut NonNull, &mut A) { - (&mut self.ptr, &mut self.alloc) + pub fn into_raw_parts(self) -> (NonNull, A) { + (self.ptr, self.alloc) } pub fn is_dangling(&self) -> bool { @@ -455,30 +457,38 @@ where !ptr::addr_eq(self.ptr.as_ptr(), other.ptr.as_ptr()) } + #[cfg(not(no_sync))] + pub fn ref_counts(&self) -> Option<&RefCounts> { + (!self.is_dangling()).then(|| unsafe { self.ref_counts_unchecked() }) + } + #[cfg(not(no_sync))] unsafe fn ref_counts_unchecked(&self) -> &RefCounts { unsafe { ref_counts_ptr_from_value_ptr(self.ptr.cast()).as_ref() } } - #[cfg(not(no_sync))] - pub fn ref_counts(&self) -> Option<&RefCounts> { - (!self.is_dangling()).then(|| unsafe { self.ref_counts_unchecked() }) + pub fn strong_count(&self) -> Option<&UnsafeCell> { + (!self.is_dangling()).then(|| unsafe { self.strong_count_unchecked() }) } unsafe fn strong_count_unchecked(&self) -> &UnsafeCell { unsafe { strong_count_ptr_from_value_ptr(self.ptr.cast()).as_ref() } } - pub fn strong_count(&self) -> Option<&UnsafeCell> { - (!self.is_dangling()).then(|| unsafe { self.strong_count_unchecked() }) + pub fn weak_count(&self) -> Option<&UnsafeCell> { + (!self.is_dangling()).then(|| unsafe { self.weak_count_unchecked() }) } unsafe fn weak_count_unchecked(&self) -> &UnsafeCell { unsafe { weak_count_ptr_from_value_ptr(self.ptr.cast()).as_ref() } } - pub fn weak_count(&self) -> Option<&UnsafeCell> { - (!self.is_dangling()).then(|| unsafe { self.weak_count_unchecked() }) + pub fn upgrade(&self) -> Option> + where + A: Clone, + R: RcOps, + { + if self.is_dangling() { None } else { unsafe { self.upgrade_unchecked::() } } } unsafe fn upgrade_unchecked(&self) -> Option> @@ -491,17 +501,16 @@ where .then(|| RawRc::from_raw_parts(self.ptr, self.alloc.clone())) } } +} - pub fn upgrade(&self) -> Option> +impl RawWeak { + pub fn new_dangling() -> Self where - A: Clone, - R: RcOps, + A: Default, { - if self.is_dangling() { None } else { unsafe { self.upgrade_unchecked::() } } + Self::new_dangling_in(A::default()) } -} -impl RawWeak { pub const fn new_dangling_in(alloc: A) -> Self { unsafe { Self::from_raw_parts( @@ -511,11 +520,11 @@ impl RawWeak { } } - pub fn new_dangling() -> Self + pub fn try_new_uninit() -> Result where - A: Default, + A: Allocator + Default, { - Self::new_dangling_in(A::default()) + Self::try_new_uninit_in::(A::default()) } pub fn try_new_uninit_in(alloc: A) -> Result @@ -526,11 +535,11 @@ impl RawWeak { .map(|ptr| unsafe { Self::from_raw_parts(ptr.cast(), alloc) }) } - pub fn try_new_uninit() -> Result + pub fn try_new_zeroed() -> Result where A: Allocator + Default, { - Self::try_new_uninit_in::(A::default()) + Self::try_new_zeroed_in::(A::default()) } pub fn try_new_zeroed_in(alloc: A) -> Result @@ -541,11 +550,12 @@ impl RawWeak { .map(|ptr| unsafe { Self::from_raw_parts(ptr.cast(), alloc) }) } - pub fn try_new_zeroed() -> Result + #[cfg(not(no_global_oom_handling))] + pub fn new_uninit() -> Self where A: Allocator + Default, { - Self::try_new_zeroed_in::(A::default()) + Self::new_uninit_in::(A::default()) } #[cfg(not(no_global_oom_handling))] @@ -562,11 +572,11 @@ impl RawWeak { } #[cfg(not(no_global_oom_handling))] - pub fn new_uninit() -> Self + pub fn new_zeroed() -> Self where A: Allocator + Default, { - Self::new_uninit_in::(A::default()) + Self::new_zeroed_in::(A::default()) } #[cfg(not(no_global_oom_handling))] @@ -582,14 +592,6 @@ impl RawWeak { } } - #[cfg(not(no_global_oom_handling))] - pub fn new_zeroed() -> Self - where - A: Allocator + Default, - { - Self::new_zeroed_in::(A::default()) - } - unsafe fn assume_init_into_inner(mut self) -> T where A: Allocator, @@ -618,14 +620,6 @@ impl RawWeak<[T], A> { unsafe { Self::from_raw_parts(NonNull::slice_from_raw_parts(ptr.cast(), length), alloc) } } - #[cfg(not(no_global_oom_handling))] - pub fn new_uninit_slice_in(length: usize, alloc: A) -> Self - where - A: Allocator, - { - Self::allocate_in(length, alloc, allocate_uninit_for_rc::) - } - #[cfg(not(no_global_oom_handling))] pub fn new_uninit_slice(length: usize) -> Self where @@ -635,11 +629,11 @@ impl RawWeak<[T], A> { } #[cfg(not(no_global_oom_handling))] - pub fn new_zeroed_slice_in(length: usize, alloc: A) -> Self + pub fn new_uninit_slice_in(length: usize, alloc: A) -> Self where A: Allocator, { - Self::allocate_in(length, alloc, allocate_zeroed_for_rc::) + Self::allocate_in(length, alloc, allocate_uninit_for_rc::) } #[cfg(not(no_global_oom_handling))] @@ -649,16 +643,17 @@ impl RawWeak<[T], A> { { Self::new_zeroed_slice_in::(length, A::default()) } -} -impl CoerceUnsized> for RawWeak -where - T: ?Sized + Unsize, - U: ?Sized, -{ + #[cfg(not(no_global_oom_handling))] + pub fn new_zeroed_slice_in(length: usize, alloc: A) -> Self + where + A: Allocator, + { + Self::allocate_in(length, alloc, allocate_zeroed_for_rc::) + } } -impl DispatchFromDyn> for RawWeak +impl CoerceUnsized> for RawWeak where T: ?Sized + Unsize, U: ?Sized, @@ -683,6 +678,13 @@ where } } +impl DispatchFromDyn> for RawWeak +where + T: ?Sized + Unsize, + U: ?Sized, +{ +} + pub struct RawRc where T: ?Sized, @@ -695,19 +697,19 @@ impl RawRc where T: ?Sized, { - unsafe fn from_weak(weak: RawWeak) -> Self { - Self { weak, _phantom_data: PhantomData } + pub unsafe fn from_raw(ptr: NonNull) -> Self + where + A: Default, + { + unsafe { Self::from_raw_parts(ptr, A::default()) } } pub unsafe fn from_raw_parts(ptr: NonNull, alloc: A) -> Self { unsafe { Self::from_weak(RawWeak::from_raw_parts(ptr, alloc)) } } - pub unsafe fn from_raw(ptr: NonNull) -> Self - where - A: Default, - { - unsafe { Self::from_raw_parts(ptr, A::default()) } + unsafe fn from_weak(weak: RawWeak) -> Self { + Self { weak, _phantom_data: PhantomData } } pub fn allocator(&self) -> &A { @@ -718,6 +720,15 @@ where self.weak.as_ptr() } + #[cfg(not(no_global_oom_handling))] + fn borrow_raw_parts(&mut self) -> (&mut NonNull, &mut A) { + self.weak.borrow_raw_parts() + } + + pub unsafe fn cast(self) -> RawRc { + unsafe { RawRc::from_weak(self.weak.cast()) } + } + pub unsafe fn cast_with(self, f: F) -> RawRc where U: ?Sized, @@ -726,10 +737,6 @@ where unsafe { RawRc::from_weak(self.weak.cast_with(f)) } } - pub unsafe fn cast(self) -> RawRc { - unsafe { RawRc::from_weak(self.weak.cast()) } - } - pub unsafe fn clone(&self) -> Self where A: Clone, @@ -738,35 +745,38 @@ where unsafe { R::increment_ref_count(self.strong_count()); - Self::from_weak(self.weak.clone_without_inc_ref()) + Self::from_weak(self.weak.clone_without_increment_weak_count()) } } - pub unsafe fn decrement_strong_count_in(ptr: NonNull, alloc: A) + pub unsafe fn decrement_strong_count(ptr: NonNull) where - A: Allocator, + A: Allocator + Default, { - unsafe { RawRc::from_raw_parts(ptr, alloc).drop::() }; + unsafe { Self::decrement_strong_count_in::(ptr, A::default()) }; } - pub unsafe fn decrement_strong_count(ptr: NonNull) + pub unsafe fn decrement_strong_count_in(ptr: NonNull, alloc: A) where - A: Allocator + Default, + A: Allocator, { - unsafe { Self::decrement_strong_count_in::(ptr, A::default()) }; + unsafe { RawRc::from_raw_parts(ptr, alloc).drop::() }; } pub unsafe fn increment_strong_count(ptr: NonNull) { unsafe { R::increment_ref_count(strong_count_ptr_from_value_ptr(ptr.cast()).as_ref()) }; } - #[inline(never)] - unsafe fn drop_slow(&mut self) + pub unsafe fn downgrade(&self) -> RawWeak where - A: Allocator, + A: Clone, R: RcOps, { - unsafe { self.weak.assume_init_drop::() } + unsafe { + R::downgrade(self.weak_count()); + + self.weak.clone_without_increment_weak_count() + } } pub unsafe fn drop(&mut self) @@ -781,20 +791,13 @@ where }; } - pub unsafe fn downgrade(&self) -> RawWeak + #[inline(never)] + unsafe fn drop_slow(&mut self) where - A: Clone, + A: Allocator, R: RcOps, { - unsafe { - R::downgrade(self.weak_count()); - - self.weak.clone_without_inc_ref() - } - } - - pub unsafe fn get_mut_unchecked(&mut self) -> &mut T { - unsafe { self.as_ptr().as_mut() } + unsafe { self.weak.assume_init_drop::() } } pub unsafe fn get_mut(&mut self) -> Option<&mut T> @@ -806,17 +809,16 @@ where } } - pub fn into_raw_parts(self) -> (NonNull, A) { - self.weak.into_raw_parts() + pub unsafe fn get_mut_unchecked(&mut self) -> &mut T { + unsafe { self.weak.get_mut_unchecked() } } pub fn into_raw(self) -> NonNull { self.weak.into_raw() } - #[cfg(not(no_global_oom_handling))] - fn borrow_raw_parts(&mut self) -> (&mut NonNull, &mut A) { - self.weak.borrow_raw_parts() + pub fn into_raw_parts(self) -> (NonNull, A) { + self.weak.into_raw_parts() } #[cfg(not(no_global_oom_handling))] @@ -894,10 +896,18 @@ where |this| this.make_unique_by_move::(), ); - self.as_ptr().as_mut() + self.get_mut_unchecked() } } + pub fn ptr_eq(&self, other: &Self) -> bool { + self.weak.ptr_eq(&other.weak) + } + + pub fn ptr_ne(&self, other: &Self) -> bool { + self.weak.ptr_ne(&other.weak) + } + #[cfg(all(not(no_global_oom_handling), not(no_sync)))] pub fn ref_counts(&self) -> &RefCounts { unsafe { self.weak.ref_counts_unchecked() } @@ -910,14 +920,6 @@ where pub fn weak_count(&self) -> &UnsafeCell { unsafe { self.weak.weak_count_unchecked() } } - - pub fn ptr_eq(&self, other: &Self) -> bool { - self.weak.ptr_eq(&other.weak) - } - - pub fn ptr_ne(&self, other: &Self) -> bool { - self.weak.ptr_ne(&other.weak) - } } impl RawRc { @@ -929,6 +931,14 @@ impl RawRc { } } + pub fn try_new(value: T) -> Result + where + A: Allocator + Default, + { + RawWeak::try_new_uninit::<1>() + .map(|weak| unsafe { Self::from_weak_with_value(weak, value) }) + } + pub fn try_new_in(value: T, alloc: A) -> Result where A: Allocator, @@ -937,12 +947,12 @@ impl RawRc { .map(|weak| unsafe { Self::from_weak_with_value(weak, value) }) } - pub fn try_new(value: T) -> Result + #[cfg(not(no_global_oom_handling))] + pub fn new(value: T) -> Self where A: Allocator + Default, { - RawWeak::try_new_uninit::<1>() - .map(|weak| unsafe { Self::from_weak_with_value(weak, value) }) + unsafe { Self::from_weak_with_value(RawWeak::new_uninit::<1>(), value) } } #[cfg(not(no_global_oom_handling))] @@ -954,11 +964,13 @@ impl RawRc { } #[cfg(not(no_global_oom_handling))] - pub fn new(value: T) -> Self + pub unsafe fn new_cyclic(data_fn: F) -> Self where A: Allocator + Default, + F: FnOnce(&RawWeak) -> T, + R: RcOps, { - unsafe { Self::from_weak_with_value(RawWeak::new_uninit::<1>(), value) } + unsafe { Self::new_cyclic_in::(data_fn, A::default()) } } #[cfg(not(no_global_oom_handling))] @@ -977,16 +989,6 @@ impl RawRc { unsafe { RawUniqueRc::from_weak_with_value(weak, data).into_rc::() } } - #[cfg(not(no_global_oom_handling))] - pub unsafe fn new_cyclic(data_fn: F) -> Self - where - A: Allocator + Default, - F: FnOnce(&RawWeak) -> T, - R: RcOps, - { - unsafe { Self::new_cyclic_in::(data_fn, A::default()) } - } - pub unsafe fn into_inner(self) -> Option where A: Allocator, @@ -1018,22 +1020,17 @@ impl RawRc { A: Allocator, R: RcOps, { - unsafe { self.try_unwrap::() }.unwrap_or_else(|mut rc| { - let guard = unsafe { GuardedRc::::new(&mut rc) }; + unsafe { + self.try_unwrap::().unwrap_or_else(|mut rc| { + let guard = GuardedRc::::new(&mut rc); - T::clone(guard.as_ref()) - }) + T::clone(guard.rc.as_ref()) + }) + } } } impl RawRc, A> { - pub fn try_new_uninit_in(alloc: A) -> Result - where - A: Allocator, - { - RawWeak::try_new_uninit_in::<1>(alloc).map(|weak| unsafe { Self::from_weak(weak) }) - } - pub fn try_new_uninit() -> Result where A: Allocator + Default, @@ -1041,11 +1038,11 @@ impl RawRc, A> { RawWeak::try_new_uninit::<1>().map(|weak| unsafe { Self::from_weak(weak) }) } - pub fn try_new_zeroed_in(alloc: A) -> Result + pub fn try_new_uninit_in(alloc: A) -> Result where A: Allocator, { - RawWeak::try_new_zeroed_in::<1>(alloc).map(|weak| unsafe { Self::from_weak(weak) }) + RawWeak::try_new_uninit_in::<1>(alloc).map(|weak| unsafe { Self::from_weak(weak) }) } pub fn try_new_zeroed() -> Result @@ -1055,12 +1052,11 @@ impl RawRc, A> { RawWeak::try_new_zeroed::<1>().map(|weak| unsafe { Self::from_weak(weak) }) } - #[cfg(not(no_global_oom_handling))] - pub fn new_uninit_in(alloc: A) -> Self + pub fn try_new_zeroed_in(alloc: A) -> Result where A: Allocator, { - unsafe { Self::from_weak(RawWeak::new_uninit_in::<1>(alloc)) } + RawWeak::try_new_zeroed_in::<1>(alloc).map(|weak| unsafe { Self::from_weak(weak) }) } #[cfg(not(no_global_oom_handling))] @@ -1072,11 +1068,11 @@ impl RawRc, A> { } #[cfg(not(no_global_oom_handling))] - pub fn new_zeroed_in(alloc: A) -> Self + pub fn new_uninit_in(alloc: A) -> Self where A: Allocator, { - unsafe { Self::from_weak(RawWeak::new_zeroed_in::<1>(alloc)) } + unsafe { Self::from_weak(RawWeak::new_uninit_in::<1>(alloc)) } } #[cfg(not(no_global_oom_handling))] @@ -1087,6 +1083,14 @@ impl RawRc, A> { unsafe { Self::from_weak(RawWeak::new_zeroed::<1>()) } } + #[cfg(not(no_global_oom_handling))] + pub fn new_zeroed_in(alloc: A) -> Self + where + A: Allocator, + { + unsafe { Self::from_weak(RawWeak::new_zeroed_in::<1>(alloc)) } + } + pub unsafe fn assume_init(self) -> RawRc { unsafe { self.cast() } } @@ -1099,11 +1103,27 @@ impl RawRc<[T], A> { A: Allocator + Default, I: Iterator, { - struct Guard<'a, T, A> + struct DeallocateOnDrop<'a, T, A> where A: Allocator, { uninit_rc: &'a mut RawRc<[MaybeUninit], A>, + } + + impl Drop for DeallocateOnDrop<'_, T, A> + where + A: Allocator, + { + fn drop(&mut self) { + unsafe { self.uninit_rc.weak.deallocate() }; + } + } + + struct Guard<'a, T, A> + where + A: Allocator, + { + uninit_rc: DeallocateOnDrop<'a, T, A>, tail: NonNull, } @@ -1113,24 +1133,24 @@ impl RawRc<[T], A> { { fn drop(&mut self) { unsafe { - let length = self.tail.offset_from(self.uninit_rc.as_ptr().cast()); + let length = self.tail.sub_ptr(self.uninit_rc.uninit_rc.as_ptr().cast()); NonNull::<[T]>::slice_from_raw_parts( - self.uninit_rc.as_ptr().cast(), - length as _, + self.uninit_rc.uninit_rc.as_ptr().cast(), + length, ) .drop_in_place(); - - self.uninit_rc.weak.deallocate(); } } } let mut uninit_rc = RawRc::<[MaybeUninit], A>::new_uninit_slice(length); - let tail = uninit_rc.as_ptr().cast(); unsafe { - let mut guard = Guard:: { uninit_rc: &mut uninit_rc, tail }; + let tail = uninit_rc.as_ptr().cast(); + + let mut guard = + Guard:: { uninit_rc: DeallocateOnDrop { uninit_rc: &mut uninit_rc }, tail }; iter.for_each(|value| { guard.tail.write(value); @@ -1145,14 +1165,6 @@ impl RawRc<[T], A> { } impl RawRc<[MaybeUninit], A> { - #[cfg(not(no_global_oom_handling))] - pub fn new_uninit_slice_in(length: usize, alloc: A) -> Self - where - A: Allocator, - { - unsafe { Self::from_weak(RawWeak::new_uninit_slice_in::<1>(length, alloc)) } - } - #[cfg(not(no_global_oom_handling))] pub fn new_uninit_slice(length: usize) -> Self where @@ -1162,11 +1174,11 @@ impl RawRc<[MaybeUninit], A> { } #[cfg(not(no_global_oom_handling))] - pub fn new_zeroed_slice_in(length: usize, alloc: A) -> Self + pub fn new_uninit_slice_in(length: usize, alloc: A) -> Self where A: Allocator, { - unsafe { Self::from_weak(RawWeak::new_zeroed_slice_in::<1>(length, alloc)) } + unsafe { Self::from_weak(RawWeak::new_uninit_slice_in::<1>(length, alloc)) } } #[cfg(not(no_global_oom_handling))] @@ -1177,41 +1189,49 @@ impl RawRc<[MaybeUninit], A> { unsafe { Self::from_weak(RawWeak::new_zeroed_slice::<1>(length)) } } + #[cfg(not(no_global_oom_handling))] + pub fn new_zeroed_slice_in(length: usize, alloc: A) -> Self + where + A: Allocator, + { + unsafe { Self::from_weak(RawWeak::new_zeroed_slice_in::<1>(length, alloc)) } + } + pub unsafe fn assume_init(self) -> RawRc<[T], A> { unsafe { self.cast_with(|ptr| NonNull::new_unchecked(ptr.as_ptr() as _)) } } } impl RawRc { - pub unsafe fn downcast_unchecked(self) -> RawRc + pub fn downcast(self) -> Result, Self> where T: Any, { - unsafe { self.cast() } + if self.as_ref().is::() { Ok(unsafe { self.downcast_unchecked() }) } else { Err(self) } } - pub fn downcast(self) -> Result, Self> + pub unsafe fn downcast_unchecked(self) -> RawRc where T: Any, { - if self.as_ref().is::() { Ok(unsafe { self.downcast_unchecked() }) } else { Err(self) } + unsafe { self.cast() } } } #[cfg(not(no_sync))] impl RawRc { - pub unsafe fn downcast_unchecked(self) -> RawRc + pub fn downcast(self) -> Result, Self> where T: Any, { - unsafe { self.cast() } + if self.as_ref().is::() { Ok(unsafe { self.downcast_unchecked() }) } else { Err(self) } } - pub fn downcast(self) -> Result, Self> + pub unsafe fn downcast_unchecked(self) -> RawRc where T: Any, { - if self.as_ref().is::() { Ok(unsafe { self.downcast_unchecked() }) } else { Err(self) } + unsafe { self.cast() } } } @@ -1220,7 +1240,7 @@ where T: ?Sized, { fn as_ref(&self) -> &T { - unsafe { self.as_ptr().as_ref() } + unsafe { self.weak.as_ref_unchecked() } } } @@ -1231,13 +1251,6 @@ where { } -impl DispatchFromDyn> for RawRc -where - T: ?Sized + Unsize, - U: ?Sized, -{ -} - impl Debug for RawRc where T: Debug + ?Sized, @@ -1256,6 +1269,13 @@ where } } +impl DispatchFromDyn> for RawRc +where + T: ?Sized + Unsize, + U: ?Sized, +{ +} + impl Pointer for RawRc where T: ?Sized, @@ -1286,8 +1306,8 @@ impl SpecRcDefault for RawRc where T: Default, { - default fn spec_default() -> Self { - unsafe fn default_impl() -> RawRc + fn spec_default() -> Self { + unsafe fn spec_default_impl() -> RawRc where T: Default, { @@ -1311,6 +1331,13 @@ where } } + const { + assert!( + RefCounts::LAYOUT.size().checked_mul(N).unwrap() + == RefCounts::LAYOUT.padding_needed_for(T::LAYOUT.align()) + ); + }; + let (allocation_ptr, alloc) = Box::into_non_null_with_allocator(Box::>::default()); @@ -1325,7 +1352,7 @@ where macro_rules! select_impl { ($($value:literal,)*) => { match RefCounts::LAYOUT.padding_needed_for(T::LAYOUT.align()) / RefCounts::LAYOUT.size() { - $($value => default_impl::,)* + $($value => spec_default_impl::,)* _ => panic!("invalid padding"), } }; @@ -1540,7 +1567,7 @@ where A: Allocator + Default, T: Copy, { - default fn spec_from_slice(slice: &[T]) -> Self { + fn spec_from_slice(slice: &[T]) -> Self { unsafe { let alloc = A::default(); let ptr = allocate_for_rc_with_value::<[T], A, 1>(slice, &alloc); @@ -1713,19 +1740,19 @@ trait MarkerEq: PartialEq {} impl MarkerEq for T where T: Eq {} trait SpecPartialEq { - fn eq(&self, other: &Self) -> bool; - fn ne(&self, other: &Self) -> bool; + fn spec_eq(&self, other: &Self) -> bool; + fn spec_ne(&self, other: &Self) -> bool; } impl SpecPartialEq for RawRc where T: PartialEq + ?Sized, { - default fn eq(&self, other: &Self) -> bool { + default fn spec_eq(&self, other: &Self) -> bool { T::eq(self.as_ref(), other.as_ref()) } - default fn ne(&self, other: &Self) -> bool { + default fn spec_ne(&self, other: &Self) -> bool { T::ne(self.as_ref(), other.as_ref()) } } @@ -1734,11 +1761,11 @@ impl SpecPartialEq for RawRc where T: MarkerEq + ?Sized, { - fn eq(&self, other: &Self) -> bool { + fn spec_eq(&self, other: &Self) -> bool { Self::ptr_eq(self, other) || T::eq(self.as_ref(), other.as_ref()) } - fn ne(&self, other: &Self) -> bool { + fn spec_ne(&self, other: &Self) -> bool { Self::ptr_ne(self, other) && T::ne(self.as_ref(), other.as_ref()) } } @@ -1748,11 +1775,11 @@ where T: PartialEq + ?Sized, { fn eq(&self, other: &Self) -> bool { - SpecPartialEq::eq(self, other) + Self::spec_eq(self, other) } fn ne(&self, other: &Self) -> bool { - SpecPartialEq::ne(self, other) + Self::spec_ne(self, other) } } @@ -1848,19 +1875,19 @@ impl RawUniqueRc { } #[cfg(not(no_global_oom_handling))] - pub fn new_in(value: T, alloc: A) -> Self + pub fn new(value: T) -> Self where - A: Allocator, + A: Allocator + Default, { - unsafe { Self::from_weak_with_value(RawWeak::new_uninit_in::<0>(alloc), value) } + unsafe { Self::from_weak_with_value(RawWeak::new_uninit::<0>(), value) } } #[cfg(not(no_global_oom_handling))] - pub fn new(value: T) -> Self + pub fn new_in(value: T, alloc: A) -> Self where - A: Allocator + Default, + A: Allocator, { - unsafe { Self::from_weak_with_value(RawWeak::new_uninit::<0>(), value) } + unsafe { Self::from_weak_with_value(RawWeak::new_uninit_in::<0>(alloc), value) } } } @@ -1869,7 +1896,7 @@ where T: ?Sized, { fn as_ref(&self) -> &T { - unsafe { self.weak.as_ptr().as_ref() } + unsafe { self.weak.as_ref_unchecked() } } } @@ -1878,7 +1905,7 @@ where T: ?Sized, { fn as_mut(&mut self) -> &mut T { - unsafe { self.weak.as_ptr().as_mut() } + unsafe { self.weak.get_mut_unchecked() } } } @@ -1890,13 +1917,6 @@ where { } -impl DispatchFromDyn> for RawUniqueRc -where - T: ?Sized + Unsize, - U: ?Sized, -{ -} - impl Debug for RawUniqueRc where T: Debug + ?Sized, @@ -1905,3 +1925,10 @@ where Debug::fmt(self.as_ref(), f) } } + +impl DispatchFromDyn> for RawUniqueRc +where + T: ?Sized + Unsize, + U: ?Sized, +{ +} diff --git a/library/alloc/src/rc.rs b/library/alloc/src/rc.rs index 01f557726f41d..440b51d660287 100644 --- a/library/alloc/src/rc.rs +++ b/library/alloc/src/rc.rs @@ -1649,7 +1649,7 @@ impl Rc { /// assert!(!Rc::ptr_eq(&five, &other_five)); /// ``` pub fn ptr_eq(this: &Self, other: &Self) -> bool { - this.raw_rc.ptr_eq(&other.raw_rc) + RawRc::ptr_eq(&this.raw_rc, &other.raw_rc) } } @@ -2809,7 +2809,7 @@ impl Weak { #[must_use] #[stable(feature = "weak_ptr_eq", since = "1.39.0")] pub fn ptr_eq(&self, other: &Self) -> bool { - self.raw_weak.ptr_eq(&other.raw_weak) + RawWeak::ptr_eq(&self.raw_weak, &other.raw_weak) } } diff --git a/library/alloc/src/sync.rs b/library/alloc/src/sync.rs index d0f6677090583..2e25c0e5679aa 100644 --- a/library/alloc/src/sync.rs +++ b/library/alloc/src/sync.rs @@ -1808,7 +1808,7 @@ impl Arc { #[must_use] #[stable(feature = "ptr_eq", since = "1.17.0")] pub fn ptr_eq(this: &Self, other: &Self) -> bool { - this.raw_rc.ptr_eq(&other.raw_rc) + RawRc::ptr_eq(&this.raw_rc, &other.raw_rc) } } @@ -2538,7 +2538,7 @@ impl Weak { #[must_use] #[stable(feature = "weak_ptr_eq", since = "1.39.0")] pub fn ptr_eq(&self, other: &Self) -> bool { - self.raw_weak.ptr_eq(&other.raw_weak) + RawWeak::ptr_eq(&self.raw_weak, &other.raw_weak) } } diff --git a/src/etc/gdb_providers.py b/src/etc/gdb_providers.py index 9974d7698d616..19ecf51c169f1 100644 --- a/src/etc/gdb_providers.py +++ b/src/etc/gdb_providers.py @@ -182,16 +182,16 @@ def display_hint(): return "array" -_REF_COUNTS_TYPE = None +_REF_COUNTS_PTR_TYPE = None def _get_ref_counts_ptr_type(): - global _REF_COUNTS_TYPE + global _REF_COUNTS_PTR_TYPE - if _REF_COUNTS_TYPE is None: - _REF_COUNTS_TYPE = gdb.lookup_type("alloc::raw_rc::RefCounts").pointer() + if _REF_COUNTS_PTR_TYPE is None: + _REF_COUNTS_PTR_TYPE = gdb.lookup_type("alloc::raw_rc::RefCounts").pointer() - return _REF_COUNTS_TYPE + return _REF_COUNTS_PTR_TYPE class StdRcProvider(printer_base):