@@ -183,17 +183,14 @@ use core::{
183
183
NonZeroU16 , NonZeroU32 , NonZeroU64 , NonZeroU8 , NonZeroUsize , Wrapping ,
184
184
} ,
185
185
ops:: { Deref , DerefMut } ,
186
- ptr, slice,
186
+ ptr:: { self , NonNull } ,
187
+ slice,
187
188
} ;
188
189
189
190
#[ cfg( feature = "alloc" ) ]
190
191
extern crate alloc;
191
192
#[ cfg( feature = "alloc" ) ]
192
- use {
193
- alloc:: boxed:: Box ,
194
- alloc:: vec:: Vec ,
195
- core:: { alloc:: Layout , ptr:: NonNull } ,
196
- } ;
193
+ use { alloc:: boxed:: Box , alloc:: vec:: Vec , core:: alloc:: Layout } ;
197
194
198
195
// This is a hack to allow zerocopy-derive derives to work in this crate. They
199
196
// assume that zerocopy is linked as an extern crate, so they access items from
@@ -203,6 +200,203 @@ mod zerocopy {
203
200
pub ( crate ) use crate :: * ;
204
201
}
205
202
203
+ /// When performing a byte-slice-to-type cast, is the type taken from the prefix
204
+ /// of the byte slice or from the suffix of the byte slice?
205
+ #[ doc( hidden) ]
206
+ #[ allow( missing_debug_implementations, missing_copy_implementations) ]
207
+ pub enum CastType {
208
+ Prefix ,
209
+ Suffix ,
210
+ }
211
+
212
+ /// A trait which carries information about a type's layout that is used by the
213
+ /// internals of this crate.
214
+ ///
215
+ /// This trait is not meant for consumption by code outsie of this crate. While
216
+ /// the normal semver stability guarantees apply with respect to which types
217
+ /// implement this trait and which trait implementations are implied by this
218
+ /// trait, no semver stability guarantees are made regarding its internals; they
219
+ /// may change at any time, and code which makes use of them may break.
220
+ ///
221
+ /// # Safety
222
+ ///
223
+ /// This trait does not convey any safety guarantees to code outside this crate.
224
+ pub unsafe trait KnownLayout : sealed:: KnownLayoutSealed {
225
+ #[ doc( hidden) ]
226
+ const FIXED_PREFIX_SIZE : usize ;
227
+ #[ doc( hidden) ]
228
+ const ALIGN : NonZeroUsize ;
229
+ #[ doc( hidden) ]
230
+ const TRAILING_SLICE_ELEM_SIZE : Option < usize > ;
231
+
232
+ /// Validates that the memory region at `addr` of length `bytes_len`
233
+ /// satisfies `Self`'s size and alignment requirements, returning `(elems,
234
+ /// split_at, prefix_suffix_bytes)`.
235
+ ///
236
+ /// In particular, `validate_size_align` validates that:
237
+ /// - `bytes_len` is large enough to hold an instance of `Self`
238
+ /// - If `cast_type` is `Prefix`, `addr` satisfies `Self`'s alignment
239
+ /// requirements
240
+ /// - If `cast_type` is `Suffix`, `addr + split_at` satisfies `Self`'s
241
+ /// alignment requirements
242
+ ///
243
+ /// For DSTs, `elems` is the maximum number of trailing slice elements such
244
+ /// that a `Self` with that number of trailing slice elements can fit in the
245
+ /// provided space. For sized types, `elems` is always 0.
246
+ ///
247
+ /// `split_at` indicates the point at which to split the memory region in
248
+ /// order to split it into the `Self` and the prefix or suffix. If
249
+ /// `cast_type` is `Prefix`, `split_at` is the address of the first byte of
250
+ /// the suffix. If `cast_type` is `Suffix`, `split_at` is the address of the
251
+ /// first byte of the `Self`.
252
+ ///
253
+ /// # Panics
254
+ ///
255
+ /// Panics if called on a DST whose trailing slice element type is a
256
+ /// zero-sized type.
257
+ #[ doc( hidden) ]
258
+ #[ inline( always) ]
259
+ fn validate_size_align < A : crate :: util:: AsAddress > (
260
+ addr : A ,
261
+ bytes_len : usize ,
262
+ cast_type : CastType ,
263
+ ) -> Option < ( usize , usize , usize ) > {
264
+ let trailing_slice_bytes = bytes_len. checked_sub ( Self :: FIXED_PREFIX_SIZE ) ?;
265
+ let ( elems, self_bytes) = if let Some ( elem_size) = Self :: TRAILING_SLICE_ELEM_SIZE {
266
+ let elem_size = NonZeroUsize :: new ( elem_size)
267
+ . expect ( "attempted to cast to slice type with zero-sized element" ) ;
268
+ #[ allow( clippy:: arithmetic_side_effects) ]
269
+ let elems = trailing_slice_bytes / elem_size;
270
+ #[ allow( clippy:: arithmetic_side_effects) ]
271
+ let self_bytes = Self :: FIXED_PREFIX_SIZE + ( elems * elem_size. get ( ) ) ;
272
+ ( elems, self_bytes)
273
+ } else {
274
+ ( 0 , Self :: FIXED_PREFIX_SIZE )
275
+ } ;
276
+
277
+ // `self_addr` indicates where in the given byte range the `Self` will
278
+ // start. If we're doing a prefix cast, it starts at the beginning. If
279
+ // we're doing a suffix cast, it starts after whatever bytes are
280
+ // remaining.
281
+ #[ allow( clippy:: arithmetic_side_effects) ]
282
+ let ( self_addr, split_at) = match cast_type {
283
+ CastType :: Prefix => ( addr. addr ( ) , self_bytes) ,
284
+ CastType :: Suffix => {
285
+ let split_at = bytes_len - self_bytes;
286
+ ( addr. addr ( ) + split_at, split_at)
287
+ }
288
+ } ;
289
+
290
+ #[ allow( clippy:: arithmetic_side_effects) ]
291
+ if self_addr % Self :: ALIGN != 0 {
292
+ return None ;
293
+ }
294
+
295
+ #[ allow( clippy:: arithmetic_side_effects) ]
296
+ let ret = Some ( ( elems, split_at, bytes_len - self_bytes) ) ;
297
+ ret
298
+ }
299
+
300
+ /// SAFETY: The returned pointer has the same address and provenance as
301
+ /// `bytes`. If `Self` is a DST, the returned pointer's referent has `elems`
302
+ /// elements in its trailing slice.
303
+ #[ doc( hidden) ]
304
+ fn raw_from_ptr_len ( bytes : NonNull < u8 > , elems : usize ) -> NonNull < Self > ;
305
+ }
306
+
307
+ impl < T : KnownLayout > sealed:: KnownLayoutSealed for [ T ] { }
308
+ // SAFETY: See inline comments.
309
+ unsafe impl < T : KnownLayout > KnownLayout for [ T ] {
310
+ // `[T]` is a slice type; it has no fields before the trailing slice.
311
+ const FIXED_PREFIX_SIZE : usize = 0 ;
312
+ // Slices have the same layout as the array they slice. [1] Arrays `[T; _]`
313
+ // have the same alignment as `T`. [2]
314
+ //
315
+ // [1] https://doc.rust-lang.org/reference/type-layout.html#slice-layout
316
+ // [2] https://doc.rust-lang.org/reference/type-layout.html#array-layout
317
+ const ALIGN : NonZeroUsize = if let Some ( align) = NonZeroUsize :: new ( mem:: align_of :: < T > ( ) ) {
318
+ align
319
+ } else {
320
+ unreachable ! ( )
321
+ } ;
322
+ const TRAILING_SLICE_ELEM_SIZE : Option < usize > = Some ( mem:: size_of :: < T > ( ) ) ;
323
+
324
+ // SAFETY: `.cast` preserves address and provenance. The returned pointer
325
+ // refers to an object with `elems` elements by construction.
326
+ #[ inline( always) ]
327
+ fn raw_from_ptr_len ( data : NonNull < u8 > , elems : usize ) -> NonNull < Self > {
328
+ // TODO(#67): Remove this allow. See NonNullExt for more details.
329
+ #[ allow( unstable_name_collisions) ]
330
+ NonNull :: slice_from_raw_parts ( data. cast :: < T > ( ) , elems)
331
+ }
332
+ }
333
+
334
+ /// Implements `KnownLayout` for a sized type.
335
+ macro_rules! impl_known_layout {
336
+ ( const $constvar: ident : $constty: ty, $tyvar: ident $( : ?$optbound: ident) ? => $ty: ty) => {
337
+ impl_known_layout!( @inner const $constvar: $constty, $tyvar $( : ?$optbound) ? => $ty) ;
338
+ } ;
339
+ ( $tyvar: ident $( : ?$optbound: ident) ? => $ty: ty) => {
340
+ impl_known_layout!( @inner , $tyvar $( : ?$optbound) ? => $ty) ;
341
+ } ;
342
+ ( $ty: ty) => {
343
+ impl_known_layout!( @inner , => $ty) ;
344
+ } ;
345
+ ( $( $tyvar: ident $( : ?$optbound: ident) ? => $ty: ty) ,* ) => {
346
+ $(
347
+ impl_known_layout!( @inner , $tyvar $( : ?$optbound) ? => $ty) ;
348
+ ) *
349
+ } ;
350
+ ( $( $ty: ty) ,* ) => {
351
+ $(
352
+ impl_known_layout!( @inner , => $ty) ;
353
+ ) *
354
+ } ;
355
+ ( @inner $( const $constvar: ident : $constty: ty) ? , $( $tyvar: ident $( : ?$optbound: ident) ?) ? => $ty: ty) => {
356
+ impl <$( const $constvar : $constty, ) ? $( $tyvar $( : ?$optbound) ?) ?> sealed:: KnownLayoutSealed for $ty { }
357
+ // SAFETY: See inline comments.
358
+ unsafe impl <$( const $constvar : $constty, ) ? $( $tyvar $( : ?$optbound) ?) ?> KnownLayout for $ty {
359
+ const FIXED_PREFIX_SIZE : usize = mem:: size_of:: <$ty>( ) ;
360
+ const ALIGN : NonZeroUsize = if let Some ( align) = NonZeroUsize :: new( mem:: align_of:: <$ty>( ) ) {
361
+ align
362
+ } else {
363
+ unreachable!( )
364
+ } ;
365
+ // `T` is sized so it has no trailing slice.
366
+ const TRAILING_SLICE_ELEM_SIZE : Option <usize > = None ;
367
+
368
+ // SAFETY: `.cast` preserves address and provenance.
369
+ #[ inline( always) ]
370
+ fn raw_from_ptr_len( bytes: NonNull <u8 >, _elems: usize ) -> NonNull <Self > {
371
+ bytes. cast:: <Self >( )
372
+ }
373
+ }
374
+ } ;
375
+ }
376
+
377
+ #[ rustfmt:: skip]
378
+ impl_known_layout ! (
379
+ ( ) ,
380
+ u8 , i8 , u16 , i16 , u32 , i32 , u64 , i64 , u128 , i128 , usize , isize , f32 , f64 ,
381
+ bool , char ,
382
+ NonZeroU8 , NonZeroI8 , NonZeroU16 , NonZeroI16 , NonZeroU32 , NonZeroI32 ,
383
+ NonZeroU64 , NonZeroI64 , NonZeroU128 , NonZeroI128 , NonZeroUsize , NonZeroIsize
384
+ ) ;
385
+ impl_known_layout ! ( T => Option <T >) ;
386
+ impl_known_layout ! ( T : ?Sized => PhantomData <T >) ;
387
+ impl_known_layout ! ( T => Wrapping <T >) ;
388
+ impl_known_layout ! ( T => MaybeUninit <T >) ;
389
+ impl_known_layout ! ( const N : usize , T => [ T ; N ] ) ;
390
+
391
+ safety_comment ! {
392
+ /// SAFETY:
393
+ /// `str` and `ManuallyDrop<[T]>` have the same representations as `[u8]`
394
+ /// and `[T]` repsectively. `str` has different bit validity than `[u8]`,
395
+ /// but that doesn't affect the soundness of this impl.
396
+ unsafe_impl_known_layout!( #[ repr( [ u8 ] ) ] str ) ;
397
+ unsafe_impl_known_layout!( T : ?Sized + KnownLayout => #[ repr( T ) ] ManuallyDrop <T >) ;
398
+ }
399
+
206
400
/// Types for which a sequence of bytes all set to zero represents a valid
207
401
/// instance of the type.
208
402
///
@@ -1157,6 +1351,7 @@ mod simd {
1157
1351
use core:: arch:: $arch:: { $( $typ) ,* } ;
1158
1352
1159
1353
use crate :: * ;
1354
+ impl_known_layout!( $( $typ) ,* ) ;
1160
1355
safety_comment! {
1161
1356
/// SAFETY:
1162
1357
/// See comment on module definition for justification.
@@ -2261,7 +2456,8 @@ where
2261
2456
}
2262
2457
2263
2458
mod sealed {
2264
- pub trait Sealed { }
2459
+ pub trait ByteSliceSealed { }
2460
+ pub trait KnownLayoutSealed { }
2265
2461
}
2266
2462
2267
2463
// ByteSlice and ByteSliceMut abstract over [u8] references (&[u8], &mut [u8],
@@ -2287,7 +2483,9 @@ mod sealed {
2287
2483
///
2288
2484
/// [`Vec<u8>`]: alloc::vec::Vec
2289
2485
/// [`split_at`]: crate::ByteSlice::split_at
2290
- pub unsafe trait ByteSlice : Deref < Target = [ u8 ] > + Sized + self :: sealed:: Sealed {
2486
+ pub unsafe trait ByteSlice :
2487
+ Deref < Target = [ u8 ] > + Sized + self :: sealed:: ByteSliceSealed
2488
+ {
2291
2489
/// Gets a raw pointer to the first byte in the slice.
2292
2490
#[ inline]
2293
2491
fn as_ptr ( & self ) -> * const u8 {
@@ -2318,7 +2516,7 @@ pub unsafe trait ByteSliceMut: ByteSlice + DerefMut {
2318
2516
}
2319
2517
}
2320
2518
2321
- impl < ' a > sealed:: Sealed for & ' a [ u8 ] { }
2519
+ impl < ' a > sealed:: ByteSliceSealed for & ' a [ u8 ] { }
2322
2520
// TODO(#61): Add a "SAFETY" comment and remove this `allow`.
2323
2521
#[ allow( clippy:: undocumented_unsafe_blocks) ]
2324
2522
unsafe impl < ' a > ByteSlice for & ' a [ u8 ] {
@@ -2328,7 +2526,7 @@ unsafe impl<'a> ByteSlice for &'a [u8] {
2328
2526
}
2329
2527
}
2330
2528
2331
- impl < ' a > sealed:: Sealed for & ' a mut [ u8 ] { }
2529
+ impl < ' a > sealed:: ByteSliceSealed for & ' a mut [ u8 ] { }
2332
2530
// TODO(#61): Add a "SAFETY" comment and remove this `allow`.
2333
2531
#[ allow( clippy:: undocumented_unsafe_blocks) ]
2334
2532
unsafe impl < ' a > ByteSlice for & ' a mut [ u8 ] {
@@ -2338,7 +2536,7 @@ unsafe impl<'a> ByteSlice for &'a mut [u8] {
2338
2536
}
2339
2537
}
2340
2538
2341
- impl < ' a > sealed:: Sealed for cell:: Ref < ' a , [ u8 ] > { }
2539
+ impl < ' a > sealed:: ByteSliceSealed for cell:: Ref < ' a , [ u8 ] > { }
2342
2540
// TODO(#61): Add a "SAFETY" comment and remove this `allow`.
2343
2541
#[ allow( clippy:: undocumented_unsafe_blocks) ]
2344
2542
unsafe impl < ' a > ByteSlice for cell:: Ref < ' a , [ u8 ] > {
@@ -2348,7 +2546,7 @@ unsafe impl<'a> ByteSlice for cell::Ref<'a, [u8]> {
2348
2546
}
2349
2547
}
2350
2548
2351
- impl < ' a > sealed:: Sealed for RefMut < ' a , [ u8 ] > { }
2549
+ impl < ' a > sealed:: ByteSliceSealed for RefMut < ' a , [ u8 ] > { }
2352
2550
// TODO(#61): Add a "SAFETY" comment and remove this `allow`.
2353
2551
#[ allow( clippy:: undocumented_unsafe_blocks) ]
2354
2552
unsafe impl < ' a > ByteSlice for RefMut < ' a , [ u8 ] > {
@@ -2366,6 +2564,63 @@ unsafe impl<'a> ByteSliceMut for &'a mut [u8] {}
2366
2564
#[ allow( clippy:: undocumented_unsafe_blocks) ]
2367
2565
unsafe impl < ' a > ByteSliceMut for RefMut < ' a , [ u8 ] > { }
2368
2566
2567
+ // A polyfill for `<*const _>::cast_mut` that we can use before our MSRV is
2568
+ // 1.65, when that method was stabilized.
2569
+
2570
+ // TODO(#67): Once our MSRV is 1.65, remove this.
2571
+ trait RawPtrExt {
2572
+ type Mut ;
2573
+ fn cast_mut ( self ) -> Self :: Mut ;
2574
+ }
2575
+
2576
+ impl < T : ?Sized > RawPtrExt for * const T {
2577
+ type Mut = * mut T ;
2578
+ #[ allow( clippy:: as_conversions) ]
2579
+ #[ inline( always) ]
2580
+ fn cast_mut ( self ) -> * mut T {
2581
+ self as * mut T
2582
+ }
2583
+ }
2584
+
2585
+ // A polyfill for `<*mut _>::cast_const` that we can use before our MSRV is
2586
+ // 1.65, when that method was stabilized.
2587
+ //
2588
+ // TODO(#67): Once our MSRV is 1.65, remove this.
2589
+ trait RawMutPtrExt {
2590
+ type Const ;
2591
+ fn cast_const ( self ) -> Self :: Const ;
2592
+ }
2593
+
2594
+ impl < T : ?Sized > RawMutPtrExt for * mut T {
2595
+ type Const = * const T ;
2596
+ #[ allow( clippy:: as_conversions) ]
2597
+ #[ inline( always) ]
2598
+ fn cast_const ( self ) -> * const T {
2599
+ self as * const T
2600
+ }
2601
+ }
2602
+
2603
+ // A polyfill for `NonNull::slice_from_raw_parts` that we can use before our
2604
+ // MSRV is 1.70, when that function was stabilized.
2605
+ //
2606
+ // TODO(#67): Once our MSRV is 1.70, remove this.
2607
+ trait NonNullExt {
2608
+ type SliceOfSelf ;
2609
+
2610
+ fn slice_from_raw_parts ( data : Self , len : usize ) -> Self :: SliceOfSelf ;
2611
+ }
2612
+
2613
+ impl < T > NonNullExt for NonNull < T > {
2614
+ type SliceOfSelf = NonNull < [ T ] > ;
2615
+
2616
+ #[ inline( always) ]
2617
+ fn slice_from_raw_parts ( data : Self , len : usize ) -> NonNull < [ T ] > {
2618
+ let ptr = ptr:: slice_from_raw_parts_mut ( data. as_ptr ( ) , len) ;
2619
+ // SAFETY: `ptr` is converted from `data`, which is non-null.
2620
+ unsafe { NonNull :: new_unchecked ( ptr) }
2621
+ }
2622
+ }
2623
+
2369
2624
#[ cfg( feature = "alloc" ) ]
2370
2625
mod alloc_support {
2371
2626
use alloc:: vec:: Vec ;
0 commit comments