10
10
11
11
use core:: any:: Any ;
12
12
use core:: borrow;
13
+ #[ cfg( not( no_global_oom_handling) ) ]
13
14
use core:: clone:: CloneToUninit ;
14
15
use core:: cmp:: Ordering ;
15
16
use core:: fmt;
@@ -33,7 +34,6 @@ use core::sync::atomic::Ordering::{Acquire, Relaxed, Release};
33
34
34
35
#[ cfg( not( no_global_oom_handling) ) ]
35
36
use crate :: alloc:: handle_alloc_error;
36
- #[ cfg( not( no_global_oom_handling) ) ]
37
37
use crate :: alloc:: { AllocError , Allocator , Global , Layout } ;
38
38
use crate :: borrow:: { Cow , ToOwned } ;
39
39
use crate :: boxed:: Box ;
@@ -2055,7 +2055,8 @@ impl<T: ?Sized, A: Allocator> Deref for Arc<T, A> {
2055
2055
#[ unstable( feature = "receiver_trait" , issue = "none" ) ]
2056
2056
impl < T : ?Sized > Receiver for Arc < T > { }
2057
2057
2058
- impl < T : Clone , A : Allocator + Clone > Arc < T , A > {
2058
+ #[ cfg( not( no_global_oom_handling) ) ]
2059
+ impl < T : ?Sized + CloneToUninit , A : Allocator + Clone > Arc < T , A > {
2059
2060
/// Makes a mutable reference into the given `Arc`.
2060
2061
///
2061
2062
/// If there are other `Arc` pointers to the same allocation, then `make_mut` will
@@ -2106,10 +2107,11 @@ impl<T: Clone, A: Allocator + Clone> Arc<T, A> {
2106
2107
/// assert!(76 == *data);
2107
2108
/// assert!(weak.upgrade().is_none());
2108
2109
/// ```
2109
- #[ cfg( not( no_global_oom_handling) ) ]
2110
2110
#[ inline]
2111
2111
#[ stable( feature = "arc_unique" , since = "1.4.0" ) ]
2112
2112
pub fn make_mut ( this : & mut Self ) -> & mut T {
2113
+ let size_of_val = mem:: size_of_val :: < T > ( & * * this) ;
2114
+
2113
2115
// Note that we hold both a strong reference and a weak reference.
2114
2116
// Thus, releasing our strong reference only will not, by itself, cause
2115
2117
// the memory to be deallocated.
@@ -2120,13 +2122,19 @@ impl<T: Clone, A: Allocator + Clone> Arc<T, A> {
2120
2122
// deallocated.
2121
2123
if this. inner ( ) . strong . compare_exchange ( 1 , 0 , Acquire , Relaxed ) . is_err ( ) {
2122
2124
// Another strong pointer exists, so we must clone.
2123
- // Pre-allocate memory to allow writing the cloned value directly.
2124
- let mut arc = Self :: new_uninit_in ( this. alloc . clone ( ) ) ;
2125
- unsafe {
2126
- let data = Arc :: get_mut_unchecked ( & mut arc) ;
2127
- ( * * this) . clone_to_uninit ( data. as_mut_ptr ( ) ) ;
2128
- * this = arc. assume_init ( ) ;
2129
- }
2125
+
2126
+ let this_data_ref: & T = & * * this;
2127
+ // `in_progress` drops the allocation if we panic before finishing initializing it.
2128
+ let mut in_progress: ArcUninit < T , A > =
2129
+ ArcUninit :: new ( this_data_ref, this. alloc . clone ( ) ) ;
2130
+
2131
+ let initialized_clone = unsafe {
2132
+ // Clone. If the clone panics, `in_progress` will be dropped and clean up.
2133
+ this_data_ref. clone_to_uninit ( in_progress. data_ptr ( ) ) ;
2134
+ // Cast type of pointer, now that it is initialized.
2135
+ in_progress. into_arc ( )
2136
+ } ;
2137
+ * this = initialized_clone;
2130
2138
} else if this. inner ( ) . weak . load ( Relaxed ) != 1 {
2131
2139
// Relaxed suffices in the above because this is fundamentally an
2132
2140
// optimization: we are always racing with weak pointers being
@@ -2145,11 +2153,21 @@ impl<T: Clone, A: Allocator + Clone> Arc<T, A> {
2145
2153
let _weak = Weak { ptr : this. ptr , alloc : this. alloc . clone ( ) } ;
2146
2154
2147
2155
// Can just steal the data, all that's left is Weaks
2148
- let mut arc = Self :: new_uninit_in ( this. alloc . clone ( ) ) ;
2156
+ //
2157
+ // We don't need panic-protection like the above branch does, but we might as well
2158
+ // use the same mechanism.
2159
+ let mut in_progress: ArcUninit < T , A > = ArcUninit :: new ( & * * this, this. alloc . clone ( ) ) ;
2149
2160
unsafe {
2150
- let data = Arc :: get_mut_unchecked ( & mut arc) ;
2151
- data. as_mut_ptr ( ) . copy_from_nonoverlapping ( & * * this, 1 ) ;
2152
- ptr:: write ( this, arc. assume_init ( ) ) ;
2161
+ // Initialize `in_progress` with move of **this.
2162
+ // We have to express this in terms of bytes because `T: ?Sized`; there is no
2163
+ // operation that just copies a value based on its `size_of_val()`.
2164
+ ptr:: copy_nonoverlapping (
2165
+ ptr:: from_ref ( & * * this) . cast :: < u8 > ( ) ,
2166
+ in_progress. data_ptr ( ) . cast :: < u8 > ( ) ,
2167
+ size_of_val,
2168
+ ) ;
2169
+
2170
+ ptr:: write ( this, in_progress. into_arc ( ) ) ;
2153
2171
}
2154
2172
} else {
2155
2173
// We were the sole reference of either kind; bump back up the
@@ -2161,7 +2179,9 @@ impl<T: Clone, A: Allocator + Clone> Arc<T, A> {
2161
2179
// either unique to begin with, or became one upon cloning the contents.
2162
2180
unsafe { Self :: get_mut_unchecked ( this) }
2163
2181
}
2182
+ }
2164
2183
2184
+ impl < T : Clone , A : Allocator + Clone > Arc < T , A > {
2165
2185
/// If we have the only reference to `T` then unwrap it. Otherwise, clone `T` and return the
2166
2186
/// clone.
2167
2187
///
@@ -3557,6 +3577,68 @@ fn data_offset_align(align: usize) -> usize {
3557
3577
layout. size ( ) + layout. padding_needed_for ( align)
3558
3578
}
3559
3579
3580
+ /// A unique owning pointer to a [`ArcInner`] **that does not imply the contents are initialized,**
3581
+ /// but will deallocate it (without dropping the value) when dropped.
3582
+ ///
3583
+ /// This is a helper for [`Arc::make_mut()`] to ensure correct cleanup on panic.
3584
+ #[ cfg( not( no_global_oom_handling) ) ]
3585
+ struct ArcUninit < T : ?Sized , A : Allocator > {
3586
+ ptr : NonNull < ArcInner < T > > ,
3587
+ layout_for_value : Layout ,
3588
+ alloc : Option < A > ,
3589
+ }
3590
+
3591
+ #[ cfg( not( no_global_oom_handling) ) ]
3592
+ impl < T : ?Sized , A : Allocator > ArcUninit < T , A > {
3593
+ /// Allocate a ArcInner with layout suitable to contain `for_value` or a clone of it.
3594
+ fn new ( for_value : & T , alloc : A ) -> ArcUninit < T , A > {
3595
+ let layout = Layout :: for_value ( for_value) ;
3596
+ let ptr = unsafe {
3597
+ Arc :: allocate_for_layout (
3598
+ layout,
3599
+ |layout_for_arcinner| alloc. allocate ( layout_for_arcinner) ,
3600
+ |mem| mem. with_metadata_of ( ptr:: from_ref ( for_value) as * const ArcInner < T > ) ,
3601
+ )
3602
+ } ;
3603
+ Self { ptr : NonNull :: new ( ptr) . unwrap ( ) , layout_for_value : layout, alloc : Some ( alloc) }
3604
+ }
3605
+
3606
+ /// Returns the pointer to be written into to initialize the [`Arc`].
3607
+ fn data_ptr ( & mut self ) -> * mut T {
3608
+ let offset = data_offset_align ( self . layout_for_value . align ( ) ) ;
3609
+ unsafe { self . ptr . as_ptr ( ) . byte_add ( offset) as * mut T }
3610
+ }
3611
+
3612
+ /// Upgrade this into a normal [`Arc`].
3613
+ ///
3614
+ /// # Safety
3615
+ ///
3616
+ /// The data must have been initialized (by writing to [`Self::data_ptr()`]).
3617
+ unsafe fn into_arc ( mut self ) -> Arc < T , A > {
3618
+ let ptr = self . ptr ;
3619
+ let alloc = self . alloc . take ( ) . unwrap ( ) ;
3620
+ mem:: forget ( self ) ;
3621
+ // SAFETY: The pointer is valid as per `ArcUninit::new`, and the caller is responsible
3622
+ // for having initialized the data.
3623
+ unsafe { Arc :: from_ptr_in ( ptr. as_ptr ( ) , alloc) }
3624
+ }
3625
+ }
3626
+
3627
+ #[ cfg( not( no_global_oom_handling) ) ]
3628
+ impl < T : ?Sized , A : Allocator > Drop for ArcUninit < T , A > {
3629
+ fn drop ( & mut self ) {
3630
+ // SAFETY:
3631
+ // * new() produced a pointer safe to deallocate.
3632
+ // * We own the pointer unless into_arc() was called, which forgets us.
3633
+ unsafe {
3634
+ self . alloc . take ( ) . unwrap ( ) . deallocate (
3635
+ self . ptr . cast ( ) ,
3636
+ arcinner_layout_for_value_layout ( self . layout_for_value ) ,
3637
+ ) ;
3638
+ }
3639
+ }
3640
+ }
3641
+
3560
3642
#[ stable( feature = "arc_error" , since = "1.52.0" ) ]
3561
3643
impl < T : core:: error:: Error + ?Sized > core:: error:: Error for Arc < T > {
3562
3644
#[ allow( deprecated, deprecated_in_future) ]
0 commit comments