@@ -183,17 +183,14 @@ use core::{
183
183
NonZeroU16 , NonZeroU32 , NonZeroU64 , NonZeroU8 , NonZeroUsize , Wrapping ,
184
184
} ,
185
185
ops:: { Deref , DerefMut } ,
186
- ptr, slice,
186
+ ptr:: { self , NonNull } ,
187
+ slice,
187
188
} ;
188
189
189
190
#[ cfg( feature = "alloc" ) ]
190
191
extern crate alloc;
191
192
#[ cfg( feature = "alloc" ) ]
192
- use {
193
- alloc:: boxed:: Box ,
194
- alloc:: vec:: Vec ,
195
- core:: { alloc:: Layout , ptr:: NonNull } ,
196
- } ;
193
+ use { alloc:: boxed:: Box , alloc:: vec:: Vec , core:: alloc:: Layout } ;
197
194
198
195
// This is a hack to allow zerocopy-derive derives to work in this crate. They
199
196
// assume that zerocopy is linked as an extern crate, so they access items from
@@ -203,6 +200,135 @@ mod zerocopy {
203
200
pub ( crate ) use crate :: * ;
204
201
}
205
202
203
+ /// When performing a byte-slice-to-type cast, is the type taken from the prefix
204
+ /// of the byte slice or from the suffix of the byte slice?
205
+ #[ doc( hidden) ]
206
+ #[ allow( missing_debug_implementations, missing_copy_implementations) ]
207
+ pub enum CastType {
208
+ Prefix ,
209
+ Suffix ,
210
+ }
211
+
212
+ /// A trait which carries information about a type's layout that is used by the
213
+ /// internals of this crate.
214
+ ///
215
+ /// This trait is not meant for consumption by code outsie of this crate. While
216
+ /// the normal semver stability guarantees apply with respect to which types
217
+ /// implement this trait and which trait implementations are implied by this
218
+ /// trait, no semver stability guarantees are made regarding its internals; they
219
+ /// may change at any time, and code which makes use of them may break.
220
+ ///
221
+ /// # Safety
222
+ ///
223
+ /// This trait does not convey any safety guarantees to code outside this crate.
224
+ pub unsafe trait KnownLayout : sealed:: KnownLayoutSealed {
225
+ #[ doc( hidden) ]
226
+ const FIXED_PREFIX_SIZE : usize ;
227
+ #[ doc( hidden) ]
228
+ const ALIGN : NonZeroUsize ;
229
+ #[ doc( hidden) ]
230
+ const TRAILING_SLICE_ELEM_SIZE : Option < usize > ;
231
+
232
+ /// SAFETY: The returned pointer has the same address and provenance as
233
+ /// `bytes`. If `Self` is a DST, the returned pointer's referent has `elems`
234
+ /// elements in its trailing slice.
235
+ #[ doc( hidden) ]
236
+ fn raw_from_ptr_len ( bytes : NonNull < u8 > , elems : usize ) -> NonNull < Self > ;
237
+ }
238
+
239
+ impl < T : KnownLayout > sealed:: KnownLayoutSealed for [ T ] { }
240
+ // SAFETY: See inline comments.
241
+ unsafe impl < T : KnownLayout > KnownLayout for [ T ] {
242
+ // `[T]` is a slice type; it has no fields before the trailing slice.
243
+ const FIXED_PREFIX_SIZE : usize = 0 ;
244
+ // Slices have the same layout as the array they slice. [1] Arrays `[T; _]`
245
+ // have the same alignment as `T`. [2]
246
+ //
247
+ // [1] https://doc.rust-lang.org/reference/type-layout.html#slice-layout
248
+ // [2] https://doc.rust-lang.org/reference/type-layout.html#array-layout
249
+ const ALIGN : NonZeroUsize = if let Some ( align) = NonZeroUsize :: new ( mem:: align_of :: < T > ( ) ) {
250
+ align
251
+ } else {
252
+ unreachable ! ( )
253
+ } ;
254
+ const TRAILING_SLICE_ELEM_SIZE : Option < usize > = Some ( mem:: size_of :: < T > ( ) ) ;
255
+
256
+ // SAFETY: `.cast` preserves address and provenance. The returned pointer
257
+ // refers to an object with `elems` elements by construction.
258
+ #[ inline( always) ]
259
+ fn raw_from_ptr_len ( data : NonNull < u8 > , elems : usize ) -> NonNull < Self > {
260
+ // TODO(#67): Remove this allow. See NonNullExt for more details.
261
+ #[ allow( unstable_name_collisions) ]
262
+ NonNull :: slice_from_raw_parts ( data. cast :: < T > ( ) , elems)
263
+ }
264
+ }
265
+
266
+ /// Implements `KnownLayout` for a sized type.
267
+ macro_rules! impl_known_layout {
268
+ ( const $constvar: ident : $constty: ty, $tyvar: ident $( : ?$optbound: ident) ? => $ty: ty) => {
269
+ impl_known_layout!( @inner const $constvar: $constty, $tyvar $( : ?$optbound) ? => $ty) ;
270
+ } ;
271
+ ( $tyvar: ident $( : ?$optbound: ident) ? => $ty: ty) => {
272
+ impl_known_layout!( @inner , $tyvar $( : ?$optbound) ? => $ty) ;
273
+ } ;
274
+ ( $ty: ty) => {
275
+ impl_known_layout!( @inner , => $ty) ;
276
+ } ;
277
+ ( $( $tyvar: ident $( : ?$optbound: ident) ? => $ty: ty) ,* ) => {
278
+ $(
279
+ impl_known_layout!( @inner , $tyvar $( : ?$optbound) ? => $ty) ;
280
+ ) *
281
+ } ;
282
+ ( $( $ty: ty) ,* ) => {
283
+ $(
284
+ impl_known_layout!( @inner , => $ty) ;
285
+ ) *
286
+ } ;
287
+ ( @inner $( const $constvar: ident : $constty: ty) ? , $( $tyvar: ident $( : ?$optbound: ident) ?) ? => $ty: ty) => {
288
+ impl <$( const $constvar : $constty, ) ? $( $tyvar $( : ?$optbound) ?) ?> sealed:: KnownLayoutSealed for $ty { }
289
+ // SAFETY: See inline comments.
290
+ unsafe impl <$( const $constvar : $constty, ) ? $( $tyvar $( : ?$optbound) ?) ?> KnownLayout for $ty {
291
+ const FIXED_PREFIX_SIZE : usize = mem:: size_of:: <$ty>( ) ;
292
+ const ALIGN : NonZeroUsize = if let Some ( align) = NonZeroUsize :: new( mem:: align_of:: <$ty>( ) ) {
293
+ align
294
+ } else {
295
+ unreachable!( )
296
+ } ;
297
+ // `T` is sized so it has no trailing slice.
298
+ const TRAILING_SLICE_ELEM_SIZE : Option <usize > = None ;
299
+
300
+ // SAFETY: `.cast` preserves address and provenance.
301
+ #[ inline( always) ]
302
+ fn raw_from_ptr_len( bytes: NonNull <u8 >, _elems: usize ) -> NonNull <Self > {
303
+ bytes. cast:: <Self >( )
304
+ }
305
+ }
306
+ } ;
307
+ }
308
+
309
+ #[ rustfmt:: skip]
310
+ impl_known_layout ! (
311
+ ( ) ,
312
+ u8 , i8 , u16 , i16 , u32 , i32 , u64 , i64 , u128 , i128 , usize , isize , f32 , f64 ,
313
+ bool , char ,
314
+ NonZeroU8 , NonZeroI8 , NonZeroU16 , NonZeroI16 , NonZeroU32 , NonZeroI32 ,
315
+ NonZeroU64 , NonZeroI64 , NonZeroU128 , NonZeroI128 , NonZeroUsize , NonZeroIsize
316
+ ) ;
317
+ impl_known_layout ! ( T => Option <T >) ;
318
+ impl_known_layout ! ( T : ?Sized => PhantomData <T >) ;
319
+ impl_known_layout ! ( T => Wrapping <T >) ;
320
+ impl_known_layout ! ( T => MaybeUninit <T >) ;
321
+ impl_known_layout ! ( const N : usize , T => [ T ; N ] ) ;
322
+
323
+ safety_comment ! {
324
+ /// SAFETY:
325
+ /// `str` and `ManuallyDrop<[T]>` have the same representations as `[u8]`
326
+ /// and `[T]` repsectively. `str` has different bit validity than `[u8]`,
327
+ /// but that doesn't affect the soundness of this impl.
328
+ unsafe_impl_known_layout!( #[ repr( [ u8 ] ) ] str ) ;
329
+ unsafe_impl_known_layout!( T : ?Sized + KnownLayout => #[ repr( T ) ] ManuallyDrop <T >) ;
330
+ }
331
+
206
332
/// Types for which a sequence of bytes all set to zero represents a valid
207
333
/// instance of the type.
208
334
///
@@ -1157,6 +1283,7 @@ mod simd {
1157
1283
use core:: arch:: $arch:: { $( $typ) ,* } ;
1158
1284
1159
1285
use crate :: * ;
1286
+ impl_known_layout!( $( $typ) ,* ) ;
1160
1287
safety_comment! {
1161
1288
/// SAFETY:
1162
1289
/// See comment on module definition for justification.
@@ -2261,7 +2388,8 @@ where
2261
2388
}
2262
2389
2263
2390
mod sealed {
2264
- pub trait Sealed { }
2391
+ pub trait ByteSliceSealed { }
2392
+ pub trait KnownLayoutSealed { }
2265
2393
}
2266
2394
2267
2395
// ByteSlice and ByteSliceMut abstract over [u8] references (&[u8], &mut [u8],
@@ -2287,7 +2415,9 @@ mod sealed {
2287
2415
///
2288
2416
/// [`Vec<u8>`]: alloc::vec::Vec
2289
2417
/// [`split_at`]: crate::ByteSlice::split_at
2290
- pub unsafe trait ByteSlice : Deref < Target = [ u8 ] > + Sized + self :: sealed:: Sealed {
2418
+ pub unsafe trait ByteSlice :
2419
+ Deref < Target = [ u8 ] > + Sized + self :: sealed:: ByteSliceSealed
2420
+ {
2291
2421
/// Gets a raw pointer to the first byte in the slice.
2292
2422
#[ inline]
2293
2423
fn as_ptr ( & self ) -> * const u8 {
@@ -2318,7 +2448,7 @@ pub unsafe trait ByteSliceMut: ByteSlice + DerefMut {
2318
2448
}
2319
2449
}
2320
2450
2321
- impl < ' a > sealed:: Sealed for & ' a [ u8 ] { }
2451
+ impl < ' a > sealed:: ByteSliceSealed for & ' a [ u8 ] { }
2322
2452
// TODO(#61): Add a "SAFETY" comment and remove this `allow`.
2323
2453
#[ allow( clippy:: undocumented_unsafe_blocks) ]
2324
2454
unsafe impl < ' a > ByteSlice for & ' a [ u8 ] {
@@ -2328,7 +2458,7 @@ unsafe impl<'a> ByteSlice for &'a [u8] {
2328
2458
}
2329
2459
}
2330
2460
2331
- impl < ' a > sealed:: Sealed for & ' a mut [ u8 ] { }
2461
+ impl < ' a > sealed:: ByteSliceSealed for & ' a mut [ u8 ] { }
2332
2462
// TODO(#61): Add a "SAFETY" comment and remove this `allow`.
2333
2463
#[ allow( clippy:: undocumented_unsafe_blocks) ]
2334
2464
unsafe impl < ' a > ByteSlice for & ' a mut [ u8 ] {
@@ -2338,7 +2468,7 @@ unsafe impl<'a> ByteSlice for &'a mut [u8] {
2338
2468
}
2339
2469
}
2340
2470
2341
- impl < ' a > sealed:: Sealed for cell:: Ref < ' a , [ u8 ] > { }
2471
+ impl < ' a > sealed:: ByteSliceSealed for cell:: Ref < ' a , [ u8 ] > { }
2342
2472
// TODO(#61): Add a "SAFETY" comment and remove this `allow`.
2343
2473
#[ allow( clippy:: undocumented_unsafe_blocks) ]
2344
2474
unsafe impl < ' a > ByteSlice for cell:: Ref < ' a , [ u8 ] > {
@@ -2348,7 +2478,7 @@ unsafe impl<'a> ByteSlice for cell::Ref<'a, [u8]> {
2348
2478
}
2349
2479
}
2350
2480
2351
- impl < ' a > sealed:: Sealed for RefMut < ' a , [ u8 ] > { }
2481
+ impl < ' a > sealed:: ByteSliceSealed for RefMut < ' a , [ u8 ] > { }
2352
2482
// TODO(#61): Add a "SAFETY" comment and remove this `allow`.
2353
2483
#[ allow( clippy:: undocumented_unsafe_blocks) ]
2354
2484
unsafe impl < ' a > ByteSlice for RefMut < ' a , [ u8 ] > {
@@ -2366,6 +2496,63 @@ unsafe impl<'a> ByteSliceMut for &'a mut [u8] {}
2366
2496
#[ allow( clippy:: undocumented_unsafe_blocks) ]
2367
2497
unsafe impl < ' a > ByteSliceMut for RefMut < ' a , [ u8 ] > { }
2368
2498
2499
+ // A polyfill for `<*const _>::cast_mut` that we can use before our MSRV is
2500
+ // 1.65, when that method was stabilized.
2501
+
2502
+ // TODO(#67): Once our MSRV is 1.65, remove this.
2503
+ trait RawPtrExt {
2504
+ type Mut ;
2505
+ fn cast_mut ( self ) -> Self :: Mut ;
2506
+ }
2507
+
2508
+ impl < T : ?Sized > RawPtrExt for * const T {
2509
+ type Mut = * mut T ;
2510
+ #[ allow( clippy:: as_conversions) ]
2511
+ #[ inline( always) ]
2512
+ fn cast_mut ( self ) -> * mut T {
2513
+ self as * mut T
2514
+ }
2515
+ }
2516
+
2517
+ // A polyfill for `<*mut _>::cast_const` that we can use before our MSRV is
2518
+ // 1.65, when that method was stabilized.
2519
+ //
2520
+ // TODO(#67): Once our MSRV is 1.65, remove this.
2521
+ trait RawMutPtrExt {
2522
+ type Const ;
2523
+ fn cast_const ( self ) -> Self :: Const ;
2524
+ }
2525
+
2526
+ impl < T : ?Sized > RawMutPtrExt for * mut T {
2527
+ type Const = * const T ;
2528
+ #[ allow( clippy:: as_conversions) ]
2529
+ #[ inline( always) ]
2530
+ fn cast_const ( self ) -> * const T {
2531
+ self as * const T
2532
+ }
2533
+ }
2534
+
2535
+ // A polyfill for `NonNull::slice_from_raw_parts` that we can use before our
2536
+ // MSRV is 1.70, when that function was stabilized.
2537
+ //
2538
+ // TODO(#67): Once our MSRV is 1.70, remove this.
2539
+ trait NonNullExt {
2540
+ type SliceOfSelf ;
2541
+
2542
+ fn slice_from_raw_parts ( data : Self , len : usize ) -> Self :: SliceOfSelf ;
2543
+ }
2544
+
2545
+ impl < T > NonNullExt for NonNull < T > {
2546
+ type SliceOfSelf = NonNull < [ T ] > ;
2547
+
2548
+ #[ inline( always) ]
2549
+ fn slice_from_raw_parts ( data : Self , len : usize ) -> NonNull < [ T ] > {
2550
+ let ptr = ptr:: slice_from_raw_parts_mut ( data. as_ptr ( ) , len) ;
2551
+ // SAFETY: `ptr` is converted from `data`, which is non-null.
2552
+ unsafe { NonNull :: new_unchecked ( ptr) }
2553
+ }
2554
+ }
2555
+
2369
2556
#[ cfg( feature = "alloc" ) ]
2370
2557
mod alloc_support {
2371
2558
use alloc:: vec:: Vec ;
0 commit comments