xref: /aosp_15_r20/external/cronet/third_party/rust/chromium_crates_io/vendor/bytemuck-1.15.0/src/allocation.rs (revision 6777b5387eb2ff775bb5750e3f5d96f37fb7352b)
1 #![cfg(feature = "extern_crate_alloc")]
2 
3 //! Stuff to boost things in the `alloc` crate.
4 //!
5 //! * You must enable the `extern_crate_alloc` feature of `bytemuck` or you will
6 //!   not be able to use this module! This is generally done by adding the
7 //!   feature to the dependency in Cargo.toml like so:
8 //!
9 //!   `bytemuck = { version = "VERSION_YOU_ARE_USING", features =
10 //! ["extern_crate_alloc"]}`
11 
12 use super::*;
13 #[cfg(target_has_atomic = "ptr")]
14 use alloc::sync::Arc;
15 use alloc::{
16   alloc::{alloc_zeroed, Layout},
17   boxed::Box,
18   rc::Rc,
19   vec,
20   vec::Vec,
21 };
22 use core::ops::{Deref, DerefMut};
23 
24 /// As [`try_cast_box`](try_cast_box), but unwraps for you.
25 #[inline]
cast_box<A: NoUninit, B: AnyBitPattern>(input: Box<A>) -> Box<B>26 pub fn cast_box<A: NoUninit, B: AnyBitPattern>(input: Box<A>) -> Box<B> {
27   try_cast_box(input).map_err(|(e, _v)| e).unwrap()
28 }
29 
30 /// Attempts to cast the content type of a [`Box`](alloc::boxed::Box).
31 ///
32 /// On failure you get back an error along with the starting `Box`.
33 ///
34 /// ## Failure
35 ///
36 /// * The start and end content type of the `Box` must have the exact same
37 ///   alignment.
38 /// * The start and end size of the `Box` must have the exact same size.
39 #[inline]
try_cast_box<A: NoUninit, B: AnyBitPattern>( input: Box<A>, ) -> Result<Box<B>, (PodCastError, Box<A>)>40 pub fn try_cast_box<A: NoUninit, B: AnyBitPattern>(
41   input: Box<A>,
42 ) -> Result<Box<B>, (PodCastError, Box<A>)> {
43   if align_of::<A>() != align_of::<B>() {
44     Err((PodCastError::AlignmentMismatch, input))
45   } else if size_of::<A>() != size_of::<B>() {
46     Err((PodCastError::SizeMismatch, input))
47   } else {
48     // Note(Lokathor): This is much simpler than with the Vec casting!
49     let ptr: *mut B = Box::into_raw(input) as *mut B;
50     Ok(unsafe { Box::from_raw(ptr) })
51   }
52 }
53 
54 /// Allocates a `Box<T>` with all of the contents being zeroed out.
55 ///
56 /// This uses the global allocator to create a zeroed allocation and _then_
57 /// turns it into a Box. In other words, it's 100% assured that the zeroed data
58 /// won't be put temporarily on the stack. You can make a box of any size
59 /// without fear of a stack overflow.
60 ///
61 /// ## Failure
62 ///
63 /// This fails if the allocation fails.
64 #[inline]
try_zeroed_box<T: Zeroable>() -> Result<Box<T>, ()>65 pub fn try_zeroed_box<T: Zeroable>() -> Result<Box<T>, ()> {
66   if size_of::<T>() == 0 {
67     // This will not allocate but simply create a dangling pointer.
68     let dangling = core::ptr::NonNull::dangling().as_ptr();
69     return Ok(unsafe { Box::from_raw(dangling) });
70   }
71   let layout = Layout::new::<T>();
72   let ptr = unsafe { alloc_zeroed(layout) };
73   if ptr.is_null() {
74     // we don't know what the error is because `alloc_zeroed` is a dumb API
75     Err(())
76   } else {
77     Ok(unsafe { Box::<T>::from_raw(ptr as *mut T) })
78   }
79 }
80 
81 /// As [`try_zeroed_box`], but unwraps for you.
82 #[inline]
zeroed_box<T: Zeroable>() -> Box<T>83 pub fn zeroed_box<T: Zeroable>() -> Box<T> {
84   try_zeroed_box().unwrap()
85 }
86 
87 /// Allocates a `Vec<T>` of length and capacity exactly equal to `length` and
88 /// all elements zeroed.
89 ///
90 /// ## Failure
91 ///
92 /// This fails if the allocation fails, or if a layout cannot be calculated for
93 /// the allocation.
try_zeroed_vec<T: Zeroable>(length: usize) -> Result<Vec<T>, ()>94 pub fn try_zeroed_vec<T: Zeroable>(length: usize) -> Result<Vec<T>, ()> {
95   if length == 0 {
96     Ok(Vec::new())
97   } else {
98     let boxed_slice = try_zeroed_slice_box(length)?;
99     Ok(boxed_slice.into_vec())
100   }
101 }
102 
103 /// As [`try_zeroed_vec`] but unwraps for you
zeroed_vec<T: Zeroable>(length: usize) -> Vec<T>104 pub fn zeroed_vec<T: Zeroable>(length: usize) -> Vec<T> {
105   try_zeroed_vec(length).unwrap()
106 }
107 
108 /// Allocates a `Box<[T]>` with all contents being zeroed out.
109 ///
110 /// This uses the global allocator to create a zeroed allocation and _then_
111 /// turns it into a Box. In other words, it's 100% assured that the zeroed data
112 /// won't be put temporarily on the stack. You can make a box of any size
113 /// without fear of a stack overflow.
114 ///
115 /// ## Failure
116 ///
117 /// This fails if the allocation fails, or if a layout cannot be calculated for
118 /// the allocation.
119 #[inline]
try_zeroed_slice_box<T: Zeroable>( length: usize, ) -> Result<Box<[T]>, ()>120 pub fn try_zeroed_slice_box<T: Zeroable>(
121   length: usize,
122 ) -> Result<Box<[T]>, ()> {
123   if size_of::<T>() == 0 || length == 0 {
124     // This will not allocate but simply create a dangling slice pointer.
125     let dangling = core::ptr::NonNull::dangling().as_ptr();
126     let dangling_slice = core::ptr::slice_from_raw_parts_mut(dangling, length);
127     return Ok(unsafe { Box::from_raw(dangling_slice) });
128   }
129   let layout = core::alloc::Layout::array::<T>(length).map_err(|_| ())?;
130   let ptr = unsafe { alloc_zeroed(layout) };
131   if ptr.is_null() {
132     // we don't know what the error is because `alloc_zeroed` is a dumb API
133     Err(())
134   } else {
135     let slice =
136       unsafe { core::slice::from_raw_parts_mut(ptr as *mut T, length) };
137     Ok(unsafe { Box::<[T]>::from_raw(slice) })
138   }
139 }
140 
141 /// As [`try_zeroed_slice_box`](try_zeroed_slice_box), but unwraps for you.
zeroed_slice_box<T: Zeroable>(length: usize) -> Box<[T]>142 pub fn zeroed_slice_box<T: Zeroable>(length: usize) -> Box<[T]> {
143   try_zeroed_slice_box(length).unwrap()
144 }
145 
146 /// As [`try_cast_slice_box`](try_cast_slice_box), but unwraps for you.
147 #[inline]
cast_slice_box<A: NoUninit, B: AnyBitPattern>( input: Box<[A]>, ) -> Box<[B]>148 pub fn cast_slice_box<A: NoUninit, B: AnyBitPattern>(
149   input: Box<[A]>,
150 ) -> Box<[B]> {
151   try_cast_slice_box(input).map_err(|(e, _v)| e).unwrap()
152 }
153 
154 /// Attempts to cast the content type of a `Box<[T]>`.
155 ///
156 /// On failure you get back an error along with the starting `Box<[T]>`.
157 ///
158 /// ## Failure
159 ///
160 /// * The start and end content type of the `Box<[T]>` must have the exact same
161 ///   alignment.
162 /// * The start and end content size in bytes of the `Box<[T]>` must be the
163 ///   exact same.
164 #[inline]
try_cast_slice_box<A: NoUninit, B: AnyBitPattern>( input: Box<[A]>, ) -> Result<Box<[B]>, (PodCastError, Box<[A]>)>165 pub fn try_cast_slice_box<A: NoUninit, B: AnyBitPattern>(
166   input: Box<[A]>,
167 ) -> Result<Box<[B]>, (PodCastError, Box<[A]>)> {
168   if align_of::<A>() != align_of::<B>() {
169     Err((PodCastError::AlignmentMismatch, input))
170   } else if size_of::<A>() != size_of::<B>() {
171     if size_of::<A>() * input.len() % size_of::<B>() != 0 {
172       // If the size in bytes of the underlying buffer does not match an exact
173       // multiple of the size of B, we cannot cast between them.
174       Err((PodCastError::SizeMismatch, input))
175     } else {
176       // Because the size is an exact multiple, we can now change the length
177       // of the slice and recreate the Box
178       // NOTE: This is a valid operation because according to the docs of
179       // std::alloc::GlobalAlloc::dealloc(), the Layout that was used to alloc
180       // the block must be the same Layout that is used to dealloc the block.
181       // Luckily, Layout only stores two things, the alignment, and the size in
182       // bytes. So as long as both of those stay the same, the Layout will
183       // remain a valid input to dealloc.
184       let length = size_of::<A>() * input.len() / size_of::<B>();
185       let box_ptr: *mut A = Box::into_raw(input) as *mut A;
186       let ptr: *mut [B] =
187         unsafe { core::slice::from_raw_parts_mut(box_ptr as *mut B, length) };
188       Ok(unsafe { Box::<[B]>::from_raw(ptr) })
189     }
190   } else {
191     let box_ptr: *mut [A] = Box::into_raw(input);
192     let ptr: *mut [B] = box_ptr as *mut [B];
193     Ok(unsafe { Box::<[B]>::from_raw(ptr) })
194   }
195 }
196 
197 /// As [`try_cast_vec`](try_cast_vec), but unwraps for you.
198 #[inline]
cast_vec<A: NoUninit, B: AnyBitPattern>(input: Vec<A>) -> Vec<B>199 pub fn cast_vec<A: NoUninit, B: AnyBitPattern>(input: Vec<A>) -> Vec<B> {
200   try_cast_vec(input).map_err(|(e, _v)| e).unwrap()
201 }
202 
203 /// Attempts to cast the content type of a [`Vec`](alloc::vec::Vec).
204 ///
205 /// On failure you get back an error along with the starting `Vec`.
206 ///
207 /// ## Failure
208 ///
209 /// * The start and end content type of the `Vec` must have the exact same
210 ///   alignment.
211 /// * The start and end content size in bytes of the `Vec` must be the exact
212 ///   same.
213 /// * The start and end capacity in bytes of the `Vec` must be the exact same.
214 #[inline]
try_cast_vec<A: NoUninit, B: AnyBitPattern>( input: Vec<A>, ) -> Result<Vec<B>, (PodCastError, Vec<A>)>215 pub fn try_cast_vec<A: NoUninit, B: AnyBitPattern>(
216   input: Vec<A>,
217 ) -> Result<Vec<B>, (PodCastError, Vec<A>)> {
218   if align_of::<A>() != align_of::<B>() {
219     Err((PodCastError::AlignmentMismatch, input))
220   } else if size_of::<A>() != size_of::<B>() {
221     if size_of::<A>() * input.len() % size_of::<B>() != 0
222       || size_of::<A>() * input.capacity() % size_of::<B>() != 0
223     {
224       // If the size in bytes of the underlying buffer does not match an exact
225       // multiple of the size of B, we cannot cast between them.
226       // Note that we have to pay special attention to make sure that both
227       // length and capacity are valid under B, as we do not want to
228       // change which bytes are considered part of the initialized slice
229       // of the Vec
230       Err((PodCastError::SizeMismatch, input))
231     } else {
232       // Because the size is an exact multiple, we can now change the length and
233       // capacity and recreate the Vec
234       // NOTE: This is a valid operation because according to the docs of
235       // std::alloc::GlobalAlloc::dealloc(), the Layout that was used to alloc
236       // the block must be the same Layout that is used to dealloc the block.
237       // Luckily, Layout only stores two things, the alignment, and the size in
238       // bytes. So as long as both of those stay the same, the Layout will
239       // remain a valid input to dealloc.
240 
241       // Note(Lokathor): First we record the length and capacity, which don't
242       // have any secret provenance metadata.
243       let length: usize = size_of::<A>() * input.len() / size_of::<B>();
244       let capacity: usize = size_of::<A>() * input.capacity() / size_of::<B>();
245       // Note(Lokathor): Next we "pre-forget" the old Vec by wrapping with
246       // ManuallyDrop, because if we used `core::mem::forget` after taking the
247       // pointer then that would invalidate our pointer. In nightly there's a
248       // "into raw parts" method, which we can switch this too eventually.
249       let mut manual_drop_vec = ManuallyDrop::new(input);
250       let vec_ptr: *mut A = manual_drop_vec.as_mut_ptr();
251       let ptr: *mut B = vec_ptr as *mut B;
252       Ok(unsafe { Vec::from_raw_parts(ptr, length, capacity) })
253     }
254   } else {
255     // Note(Lokathor): First we record the length and capacity, which don't have
256     // any secret provenance metadata.
257     let length: usize = input.len();
258     let capacity: usize = input.capacity();
259     // Note(Lokathor): Next we "pre-forget" the old Vec by wrapping with
260     // ManuallyDrop, because if we used `core::mem::forget` after taking the
261     // pointer then that would invalidate our pointer. In nightly there's a
262     // "into raw parts" method, which we can switch this too eventually.
263     let mut manual_drop_vec = ManuallyDrop::new(input);
264     let vec_ptr: *mut A = manual_drop_vec.as_mut_ptr();
265     let ptr: *mut B = vec_ptr as *mut B;
266     Ok(unsafe { Vec::from_raw_parts(ptr, length, capacity) })
267   }
268 }
269 
270 /// This "collects" a slice of pod data into a vec of a different pod type.
271 ///
272 /// Unlike with [`cast_slice`] and [`cast_slice_mut`], this will always work.
273 ///
274 /// The output vec will be of a minimal size/capacity to hold the slice given.
275 ///
276 /// ```rust
277 /// # use bytemuck::*;
278 /// let halfwords: [u16; 4] = [5, 6, 7, 8];
279 /// let vec_of_words: Vec<u32> = pod_collect_to_vec(&halfwords);
280 /// if cfg!(target_endian = "little") {
281 ///   assert_eq!(&vec_of_words[..], &[0x0006_0005, 0x0008_0007][..])
282 /// } else {
283 ///   assert_eq!(&vec_of_words[..], &[0x0005_0006, 0x0007_0008][..])
284 /// }
285 /// ```
pod_collect_to_vec<A: NoUninit, B: NoUninit + AnyBitPattern>( src: &[A], ) -> Vec<B>286 pub fn pod_collect_to_vec<A: NoUninit, B: NoUninit + AnyBitPattern>(
287   src: &[A],
288 ) -> Vec<B> {
289   let src_size = size_of_val(src);
290   // Note(Lokathor): dst_count is rounded up so that the dest will always be at
291   // least as many bytes as the src.
292   let dst_count = src_size / size_of::<B>()
293     + if src_size % size_of::<B>() != 0 { 1 } else { 0 };
294   let mut dst = vec![B::zeroed(); dst_count];
295 
296   let src_bytes: &[u8] = cast_slice(src);
297   let dst_bytes: &mut [u8] = cast_slice_mut(&mut dst[..]);
298   dst_bytes[..src_size].copy_from_slice(src_bytes);
299   dst
300 }
301 
302 /// As [`try_cast_rc`](try_cast_rc), but unwraps for you.
303 #[inline]
cast_rc<A: NoUninit + AnyBitPattern, B: NoUninit + AnyBitPattern>( input: Rc<A>, ) -> Rc<B>304 pub fn cast_rc<A: NoUninit + AnyBitPattern, B: NoUninit + AnyBitPattern>(
305   input: Rc<A>,
306 ) -> Rc<B> {
307   try_cast_rc(input).map_err(|(e, _v)| e).unwrap()
308 }
309 
310 /// Attempts to cast the content type of a [`Rc`](alloc::rc::Rc).
311 ///
312 /// On failure you get back an error along with the starting `Rc`.
313 ///
314 /// The bounds on this function are the same as [`cast_mut`], because a user
315 /// could call `Rc::get_unchecked_mut` on the output, which could be observable
316 /// in the input.
317 ///
318 /// ## Failure
319 ///
320 /// * The start and end content type of the `Rc` must have the exact same
321 ///   alignment.
322 /// * The start and end size of the `Rc` must have the exact same size.
323 #[inline]
try_cast_rc<A: NoUninit + AnyBitPattern, B: NoUninit + AnyBitPattern>( input: Rc<A>, ) -> Result<Rc<B>, (PodCastError, Rc<A>)>324 pub fn try_cast_rc<A: NoUninit + AnyBitPattern, B: NoUninit + AnyBitPattern>(
325   input: Rc<A>,
326 ) -> Result<Rc<B>, (PodCastError, Rc<A>)> {
327   if align_of::<A>() != align_of::<B>() {
328     Err((PodCastError::AlignmentMismatch, input))
329   } else if size_of::<A>() != size_of::<B>() {
330     Err((PodCastError::SizeMismatch, input))
331   } else {
332     // Safety: Rc::from_raw requires size and alignment match, which is met.
333     let ptr: *const B = Rc::into_raw(input) as *const B;
334     Ok(unsafe { Rc::from_raw(ptr) })
335   }
336 }
337 
338 /// As [`try_cast_arc`](try_cast_arc), but unwraps for you.
339 #[inline]
340 #[cfg(target_has_atomic = "ptr")]
cast_arc<A: NoUninit + AnyBitPattern, B: NoUninit + AnyBitPattern>( input: Arc<A>, ) -> Arc<B>341 pub fn cast_arc<A: NoUninit + AnyBitPattern, B: NoUninit + AnyBitPattern>(
342   input: Arc<A>,
343 ) -> Arc<B> {
344   try_cast_arc(input).map_err(|(e, _v)| e).unwrap()
345 }
346 
347 /// Attempts to cast the content type of a [`Arc`](alloc::sync::Arc).
348 ///
349 /// On failure you get back an error along with the starting `Arc`.
350 ///
351 /// The bounds on this function are the same as [`cast_mut`], because a user
352 /// could call `Rc::get_unchecked_mut` on the output, which could be observable
353 /// in the input.
354 ///
355 /// ## Failure
356 ///
357 /// * The start and end content type of the `Arc` must have the exact same
358 ///   alignment.
359 /// * The start and end size of the `Arc` must have the exact same size.
360 #[inline]
361 #[cfg(target_has_atomic = "ptr")]
try_cast_arc< A: NoUninit + AnyBitPattern, B: NoUninit + AnyBitPattern, >( input: Arc<A>, ) -> Result<Arc<B>, (PodCastError, Arc<A>)>362 pub fn try_cast_arc<
363   A: NoUninit + AnyBitPattern,
364   B: NoUninit + AnyBitPattern,
365 >(
366   input: Arc<A>,
367 ) -> Result<Arc<B>, (PodCastError, Arc<A>)> {
368   if align_of::<A>() != align_of::<B>() {
369     Err((PodCastError::AlignmentMismatch, input))
370   } else if size_of::<A>() != size_of::<B>() {
371     Err((PodCastError::SizeMismatch, input))
372   } else {
373     // Safety: Arc::from_raw requires size and alignment match, which is met.
374     let ptr: *const B = Arc::into_raw(input) as *const B;
375     Ok(unsafe { Arc::from_raw(ptr) })
376   }
377 }
378 
379 /// As [`try_cast_slice_rc`](try_cast_slice_rc), but unwraps for you.
380 #[inline]
cast_slice_rc< A: NoUninit + AnyBitPattern, B: NoUninit + AnyBitPattern, >( input: Rc<[A]>, ) -> Rc<[B]>381 pub fn cast_slice_rc<
382   A: NoUninit + AnyBitPattern,
383   B: NoUninit + AnyBitPattern,
384 >(
385   input: Rc<[A]>,
386 ) -> Rc<[B]> {
387   try_cast_slice_rc(input).map_err(|(e, _v)| e).unwrap()
388 }
389 
390 /// Attempts to cast the content type of a `Rc<[T]>`.
391 ///
392 /// On failure you get back an error along with the starting `Rc<[T]>`.
393 ///
394 /// The bounds on this function are the same as [`cast_mut`], because a user
395 /// could call `Rc::get_unchecked_mut` on the output, which could be observable
396 /// in the input.
397 ///
398 /// ## Failure
399 ///
400 /// * The start and end content type of the `Rc<[T]>` must have the exact same
401 ///   alignment.
402 /// * The start and end content size in bytes of the `Rc<[T]>` must be the exact
403 ///   same.
404 #[inline]
try_cast_slice_rc< A: NoUninit + AnyBitPattern, B: NoUninit + AnyBitPattern, >( input: Rc<[A]>, ) -> Result<Rc<[B]>, (PodCastError, Rc<[A]>)>405 pub fn try_cast_slice_rc<
406   A: NoUninit + AnyBitPattern,
407   B: NoUninit + AnyBitPattern,
408 >(
409   input: Rc<[A]>,
410 ) -> Result<Rc<[B]>, (PodCastError, Rc<[A]>)> {
411   if align_of::<A>() != align_of::<B>() {
412     Err((PodCastError::AlignmentMismatch, input))
413   } else if size_of::<A>() != size_of::<B>() {
414     if size_of::<A>() * input.len() % size_of::<B>() != 0 {
415       // If the size in bytes of the underlying buffer does not match an exact
416       // multiple of the size of B, we cannot cast between them.
417       Err((PodCastError::SizeMismatch, input))
418     } else {
419       // Because the size is an exact multiple, we can now change the length
420       // of the slice and recreate the Rc
421       // NOTE: This is a valid operation because according to the docs of
422       // std::rc::Rc::from_raw(), the type U that was in the original Rc<U>
423       // acquired from Rc::into_raw() must have the same size alignment and
424       // size of the type T in the new Rc<T>. So as long as both the size
425       // and alignment stay the same, the Rc will remain a valid Rc.
426       let length = size_of::<A>() * input.len() / size_of::<B>();
427       let rc_ptr: *const A = Rc::into_raw(input) as *const A;
428       // Must use ptr::slice_from_raw_parts, because we cannot make an
429       // intermediate const reference, because it has mutable provenance,
430       // nor an intermediate mutable reference, because it could be aliased.
431       let ptr = core::ptr::slice_from_raw_parts(rc_ptr as *const B, length);
432       Ok(unsafe { Rc::<[B]>::from_raw(ptr) })
433     }
434   } else {
435     let rc_ptr: *const [A] = Rc::into_raw(input);
436     let ptr: *const [B] = rc_ptr as *const [B];
437     Ok(unsafe { Rc::<[B]>::from_raw(ptr) })
438   }
439 }
440 
441 /// As [`try_cast_slice_arc`](try_cast_slice_arc), but unwraps for you.
442 #[inline]
443 #[cfg(target_has_atomic = "ptr")]
cast_slice_arc< A: NoUninit + AnyBitPattern, B: NoUninit + AnyBitPattern, >( input: Arc<[A]>, ) -> Arc<[B]>444 pub fn cast_slice_arc<
445   A: NoUninit + AnyBitPattern,
446   B: NoUninit + AnyBitPattern,
447 >(
448   input: Arc<[A]>,
449 ) -> Arc<[B]> {
450   try_cast_slice_arc(input).map_err(|(e, _v)| e).unwrap()
451 }
452 
453 /// Attempts to cast the content type of a `Arc<[T]>`.
454 ///
455 /// On failure you get back an error along with the starting `Arc<[T]>`.
456 ///
457 /// The bounds on this function are the same as [`cast_mut`], because a user
458 /// could call `Rc::get_unchecked_mut` on the output, which could be observable
459 /// in the input.
460 ///
461 /// ## Failure
462 ///
463 /// * The start and end content type of the `Arc<[T]>` must have the exact same
464 ///   alignment.
465 /// * The start and end content size in bytes of the `Arc<[T]>` must be the
466 ///   exact same.
467 #[inline]
468 #[cfg(target_has_atomic = "ptr")]
try_cast_slice_arc< A: NoUninit + AnyBitPattern, B: NoUninit + AnyBitPattern, >( input: Arc<[A]>, ) -> Result<Arc<[B]>, (PodCastError, Arc<[A]>)>469 pub fn try_cast_slice_arc<
470   A: NoUninit + AnyBitPattern,
471   B: NoUninit + AnyBitPattern,
472 >(
473   input: Arc<[A]>,
474 ) -> Result<Arc<[B]>, (PodCastError, Arc<[A]>)> {
475   if align_of::<A>() != align_of::<B>() {
476     Err((PodCastError::AlignmentMismatch, input))
477   } else if size_of::<A>() != size_of::<B>() {
478     if size_of::<A>() * input.len() % size_of::<B>() != 0 {
479       // If the size in bytes of the underlying buffer does not match an exact
480       // multiple of the size of B, we cannot cast between them.
481       Err((PodCastError::SizeMismatch, input))
482     } else {
483       // Because the size is an exact multiple, we can now change the length
484       // of the slice and recreate the Arc
485       // NOTE: This is a valid operation because according to the docs of
486       // std::sync::Arc::from_raw(), the type U that was in the original Arc<U>
487       // acquired from Arc::into_raw() must have the same size alignment and
488       // size of the type T in the new Arc<T>. So as long as both the size
489       // and alignment stay the same, the Arc will remain a valid Arc.
490       let length = size_of::<A>() * input.len() / size_of::<B>();
491       let arc_ptr: *const A = Arc::into_raw(input) as *const A;
492       // Must use ptr::slice_from_raw_parts, because we cannot make an
493       // intermediate const reference, because it has mutable provenance,
494       // nor an intermediate mutable reference, because it could be aliased.
495       let ptr = core::ptr::slice_from_raw_parts(arc_ptr as *const B, length);
496       Ok(unsafe { Arc::<[B]>::from_raw(ptr) })
497     }
498   } else {
499     let arc_ptr: *const [A] = Arc::into_raw(input);
500     let ptr: *const [B] = arc_ptr as *const [B];
501     Ok(unsafe { Arc::<[B]>::from_raw(ptr) })
502   }
503 }
504 
505 /// An extension trait for `TransparentWrapper` and alloc types.
506 pub trait TransparentWrapperAlloc<Inner: ?Sized>:
507   TransparentWrapper<Inner>
508 {
509   /// Convert a vec of the inner type into a vec of the wrapper type.
wrap_vec(s: Vec<Inner>) -> Vec<Self> where Self: Sized, Inner: Sized,510   fn wrap_vec(s: Vec<Inner>) -> Vec<Self>
511   where
512     Self: Sized,
513     Inner: Sized,
514   {
515     let mut s = core::mem::ManuallyDrop::new(s);
516 
517     let length = s.len();
518     let capacity = s.capacity();
519     let ptr = s.as_mut_ptr();
520 
521     unsafe {
522       // SAFETY:
523       // * ptr comes from Vec (and will not be double-dropped)
524       // * the two types have the identical representation
525       // * the len and capacity fields are valid
526       Vec::from_raw_parts(ptr as *mut Self, length, capacity)
527     }
528   }
529 
530   /// Convert a box to the inner type into a box to the wrapper
531   /// type.
532   #[inline]
wrap_box(s: Box<Inner>) -> Box<Self>533   fn wrap_box(s: Box<Inner>) -> Box<Self> {
534     assert!(size_of::<*mut Inner>() == size_of::<*mut Self>());
535 
536     unsafe {
537       // A pointer cast doesn't work here because rustc can't tell that
538       // the vtables match (because of the `?Sized` restriction relaxation).
539       // A `transmute` doesn't work because the sizes are unspecified.
540       //
541       // SAFETY:
542       // * The unsafe contract requires that pointers to Inner and Self have
543       //   identical representations
544       // * Box is guaranteed to have representation identical to a (non-null)
545       //   pointer
546       // * The pointer comes from a box (and thus satisfies all safety
547       //   requirements of Box)
548       let inner_ptr: *mut Inner = Box::into_raw(s);
549       let wrapper_ptr: *mut Self = transmute!(inner_ptr);
550       Box::from_raw(wrapper_ptr)
551     }
552   }
553 
554   /// Convert an [`Rc`](alloc::rc::Rc) to the inner type into an `Rc` to the
555   /// wrapper type.
556   #[inline]
wrap_rc(s: Rc<Inner>) -> Rc<Self>557   fn wrap_rc(s: Rc<Inner>) -> Rc<Self> {
558     assert!(size_of::<*mut Inner>() == size_of::<*mut Self>());
559 
560     unsafe {
561       // A pointer cast doesn't work here because rustc can't tell that
562       // the vtables match (because of the `?Sized` restriction relaxation).
563       // A `transmute` doesn't work because the layout of Rc is unspecified.
564       //
565       // SAFETY:
566       // * The unsafe contract requires that pointers to Inner and Self have
567       //   identical representations, and that the size and alignment of Inner
568       //   and Self are the same, which meets the safety requirements of
569       //   Rc::from_raw
570       let inner_ptr: *const Inner = Rc::into_raw(s);
571       let wrapper_ptr: *const Self = transmute!(inner_ptr);
572       Rc::from_raw(wrapper_ptr)
573     }
574   }
575 
576   /// Convert an [`Arc`](alloc::sync::Arc) to the inner type into an `Arc` to
577   /// the wrapper type.
578   #[inline]
579   #[cfg(target_has_atomic = "ptr")]
wrap_arc(s: Arc<Inner>) -> Arc<Self>580   fn wrap_arc(s: Arc<Inner>) -> Arc<Self> {
581     assert!(size_of::<*mut Inner>() == size_of::<*mut Self>());
582 
583     unsafe {
584       // A pointer cast doesn't work here because rustc can't tell that
585       // the vtables match (because of the `?Sized` restriction relaxation).
586       // A `transmute` doesn't work because the layout of Arc is unspecified.
587       //
588       // SAFETY:
589       // * The unsafe contract requires that pointers to Inner and Self have
590       //   identical representations, and that the size and alignment of Inner
591       //   and Self are the same, which meets the safety requirements of
592       //   Arc::from_raw
593       let inner_ptr: *const Inner = Arc::into_raw(s);
594       let wrapper_ptr: *const Self = transmute!(inner_ptr);
595       Arc::from_raw(wrapper_ptr)
596     }
597   }
598 
599   /// Convert a vec of the wrapper type into a vec of the inner type.
peel_vec(s: Vec<Self>) -> Vec<Inner> where Self: Sized, Inner: Sized,600   fn peel_vec(s: Vec<Self>) -> Vec<Inner>
601   where
602     Self: Sized,
603     Inner: Sized,
604   {
605     let mut s = core::mem::ManuallyDrop::new(s);
606 
607     let length = s.len();
608     let capacity = s.capacity();
609     let ptr = s.as_mut_ptr();
610 
611     unsafe {
612       // SAFETY:
613       // * ptr comes from Vec (and will not be double-dropped)
614       // * the two types have the identical representation
615       // * the len and capacity fields are valid
616       Vec::from_raw_parts(ptr as *mut Inner, length, capacity)
617     }
618   }
619 
620   /// Convert a box to the wrapper type into a box to the inner
621   /// type.
622   #[inline]
peel_box(s: Box<Self>) -> Box<Inner>623   fn peel_box(s: Box<Self>) -> Box<Inner> {
624     assert!(size_of::<*mut Inner>() == size_of::<*mut Self>());
625 
626     unsafe {
627       // A pointer cast doesn't work here because rustc can't tell that
628       // the vtables match (because of the `?Sized` restriction relaxation).
629       // A `transmute` doesn't work because the sizes are unspecified.
630       //
631       // SAFETY:
632       // * The unsafe contract requires that pointers to Inner and Self have
633       //   identical representations
634       // * Box is guaranteed to have representation identical to a (non-null)
635       //   pointer
636       // * The pointer comes from a box (and thus satisfies all safety
637       //   requirements of Box)
638       let wrapper_ptr: *mut Self = Box::into_raw(s);
639       let inner_ptr: *mut Inner = transmute!(wrapper_ptr);
640       Box::from_raw(inner_ptr)
641     }
642   }
643 
644   /// Convert an [`Rc`](alloc::rc::Rc) to the wrapper type into an `Rc` to the
645   /// inner type.
646   #[inline]
peel_rc(s: Rc<Self>) -> Rc<Inner>647   fn peel_rc(s: Rc<Self>) -> Rc<Inner> {
648     assert!(size_of::<*mut Inner>() == size_of::<*mut Self>());
649 
650     unsafe {
651       // A pointer cast doesn't work here because rustc can't tell that
652       // the vtables match (because of the `?Sized` restriction relaxation).
653       // A `transmute` doesn't work because the layout of Rc is unspecified.
654       //
655       // SAFETY:
656       // * The unsafe contract requires that pointers to Inner and Self have
657       //   identical representations, and that the size and alignment of Inner
658       //   and Self are the same, which meets the safety requirements of
659       //   Rc::from_raw
660       let wrapper_ptr: *const Self = Rc::into_raw(s);
661       let inner_ptr: *const Inner = transmute!(wrapper_ptr);
662       Rc::from_raw(inner_ptr)
663     }
664   }
665 
666   /// Convert an [`Arc`](alloc::sync::Arc) to the wrapper type into an `Arc` to
667   /// the inner type.
668   #[inline]
669   #[cfg(target_has_atomic = "ptr")]
peel_arc(s: Arc<Self>) -> Arc<Inner>670   fn peel_arc(s: Arc<Self>) -> Arc<Inner> {
671     assert!(size_of::<*mut Inner>() == size_of::<*mut Self>());
672 
673     unsafe {
674       // A pointer cast doesn't work here because rustc can't tell that
675       // the vtables match (because of the `?Sized` restriction relaxation).
676       // A `transmute` doesn't work because the layout of Arc is unspecified.
677       //
678       // SAFETY:
679       // * The unsafe contract requires that pointers to Inner and Self have
680       //   identical representations, and that the size and alignment of Inner
681       //   and Self are the same, which meets the safety requirements of
682       //   Arc::from_raw
683       let wrapper_ptr: *const Self = Arc::into_raw(s);
684       let inner_ptr: *const Inner = transmute!(wrapper_ptr);
685       Arc::from_raw(inner_ptr)
686     }
687   }
688 }
689 
690 impl<I: ?Sized, T: ?Sized + TransparentWrapper<I>> TransparentWrapperAlloc<I>
691   for T
692 {
693 }
694 
695 /// As `Box<[u8]>`, but remembers the original alignment.
696 pub struct BoxBytes {
697   // SAFETY: `ptr` is owned, was allocated with `layout`, and points to
698   // `layout.size()` initialized bytes.
699   ptr: NonNull<u8>,
700   layout: Layout,
701 }
702 
703 impl Deref for BoxBytes {
704   type Target = [u8];
705 
deref(&self) -> &Self::Target706   fn deref(&self) -> &Self::Target {
707     // SAFETY: See type invariant.
708     unsafe {
709       core::slice::from_raw_parts(self.ptr.as_ptr(), self.layout.size())
710     }
711   }
712 }
713 
714 impl DerefMut for BoxBytes {
deref_mut(&mut self) -> &mut Self::Target715   fn deref_mut(&mut self) -> &mut Self::Target {
716     // SAFETY: See type invariant.
717     unsafe {
718       core::slice::from_raw_parts_mut(self.ptr.as_ptr(), self.layout.size())
719     }
720   }
721 }
722 
723 impl Drop for BoxBytes {
drop(&mut self)724   fn drop(&mut self) {
725     // SAFETY: See type invariant.
726     unsafe { alloc::alloc::dealloc(self.ptr.as_ptr(), self.layout) };
727   }
728 }
729 
730 impl<T: ?Sized + sealed::BoxBytesOf> From<Box<T>> for BoxBytes {
from(value: Box<T>) -> Self731   fn from(value: Box<T>) -> Self {
732     value.box_bytes_of()
733   }
734 }
735 
736 mod sealed {
737   use crate::{BoxBytes, PodCastError};
738   use alloc::boxed::Box;
739 
740   pub trait BoxBytesOf {
box_bytes_of(self: Box<Self>) -> BoxBytes741     fn box_bytes_of(self: Box<Self>) -> BoxBytes;
742   }
743 
744   pub trait FromBoxBytes {
try_from_box_bytes( bytes: BoxBytes, ) -> Result<Box<Self>, (PodCastError, BoxBytes)>745     fn try_from_box_bytes(
746       bytes: BoxBytes,
747     ) -> Result<Box<Self>, (PodCastError, BoxBytes)>;
748   }
749 }
750 
751 impl<T: NoUninit> sealed::BoxBytesOf for T {
box_bytes_of(self: Box<Self>) -> BoxBytes752   fn box_bytes_of(self: Box<Self>) -> BoxBytes {
753     let layout = Layout::new::<T>();
754     let ptr = Box::into_raw(self) as *mut u8;
755     // SAFETY: Box::into_raw() returns a non-null pointer.
756     let ptr = unsafe { NonNull::new_unchecked(ptr) };
757     BoxBytes { ptr, layout }
758   }
759 }
760 
761 impl<T: NoUninit> sealed::BoxBytesOf for [T] {
box_bytes_of(self: Box<Self>) -> BoxBytes762   fn box_bytes_of(self: Box<Self>) -> BoxBytes {
763     let layout = Layout::for_value::<[T]>(&self);
764     let ptr = Box::into_raw(self) as *mut u8;
765     // SAFETY: Box::into_raw() returns a non-null pointer.
766     let ptr = unsafe { NonNull::new_unchecked(ptr) };
767     BoxBytes { ptr, layout }
768   }
769 }
770 
771 impl sealed::BoxBytesOf for str {
box_bytes_of(self: Box<Self>) -> BoxBytes772   fn box_bytes_of(self: Box<Self>) -> BoxBytes {
773     self.into_boxed_bytes().box_bytes_of()
774   }
775 }
776 
777 impl<T: AnyBitPattern> sealed::FromBoxBytes for T {
try_from_box_bytes( bytes: BoxBytes, ) -> Result<Box<Self>, (PodCastError, BoxBytes)>778   fn try_from_box_bytes(
779     bytes: BoxBytes,
780   ) -> Result<Box<Self>, (PodCastError, BoxBytes)> {
781     let layout = Layout::new::<T>();
782     if bytes.layout.align() != layout.align() {
783       Err((PodCastError::AlignmentMismatch, bytes))
784     } else if bytes.layout.size() != layout.size() {
785       Err((PodCastError::SizeMismatch, bytes))
786     } else {
787       let (ptr, _) = bytes.into_raw_parts();
788       // SAFETY: See BoxBytes type invariant.
789       Ok(unsafe { Box::from_raw(ptr.as_ptr() as *mut T) })
790     }
791   }
792 }
793 
794 impl<T: AnyBitPattern> sealed::FromBoxBytes for [T] {
try_from_box_bytes( bytes: BoxBytes, ) -> Result<Box<Self>, (PodCastError, BoxBytes)>795   fn try_from_box_bytes(
796     bytes: BoxBytes,
797   ) -> Result<Box<Self>, (PodCastError, BoxBytes)> {
798     let single_layout = Layout::new::<T>();
799     if bytes.layout.align() != single_layout.align() {
800       Err((PodCastError::AlignmentMismatch, bytes))
801     } else if single_layout.size() == 0 {
802       Err((PodCastError::SizeMismatch, bytes))
803     } else if bytes.layout.size() % single_layout.size() != 0 {
804       Err((PodCastError::OutputSliceWouldHaveSlop, bytes))
805     } else {
806       let (ptr, layout) = bytes.into_raw_parts();
807       let length = layout.size() / single_layout.size();
808       let ptr =
809         core::ptr::slice_from_raw_parts_mut(ptr.as_ptr() as *mut T, length);
810       // SAFETY: See BoxBytes type invariant.
811       Ok(unsafe { Box::from_raw(ptr) })
812     }
813   }
814 }
815 
816 /// Re-interprets `Box<T>` as `BoxBytes`.
817 ///
818 /// `T` must be either [`Sized`] and [`NoUninit`],
819 /// [`[U]`](slice) where `U: NoUninit`, or [`str`].
820 #[inline]
box_bytes_of<T: sealed::BoxBytesOf + ?Sized>(input: Box<T>) -> BoxBytes821 pub fn box_bytes_of<T: sealed::BoxBytesOf + ?Sized>(input: Box<T>) -> BoxBytes {
822   input.box_bytes_of()
823 }
824 
825 /// Re-interprets `BoxBytes` as `Box<T>`.
826 ///
827 /// `T` must be either [`Sized`] + [`AnyBitPattern`], or
828 /// [`[U]`](slice) where `U: AnyBitPattern`.
829 ///
830 /// ## Panics
831 ///
832 /// This is [`try_from_box_bytes`] but will panic on error and the input will be
833 /// dropped.
834 #[inline]
from_box_bytes<T: sealed::FromBoxBytes + ?Sized>( input: BoxBytes, ) -> Box<T>835 pub fn from_box_bytes<T: sealed::FromBoxBytes + ?Sized>(
836   input: BoxBytes,
837 ) -> Box<T> {
838   try_from_box_bytes(input).map_err(|(error, _)| error).unwrap()
839 }
840 
841 /// Re-interprets `BoxBytes` as `Box<T>`.
842 ///
843 /// `T` must be either [`Sized`] + [`AnyBitPattern`], or
844 /// [`[U]`](slice) where `U: AnyBitPattern`.
845 ///
846 /// Returns `Err`:
847 /// * If the input isn't aligned for `T`.
848 /// * If `T: Sized` and the input's length isn't exactly the size of `T`.
849 /// * If `T = [U]` and the input's length isn't exactly a multiple of the size
850 ///   of `U`.
851 #[inline]
try_from_box_bytes<T: sealed::FromBoxBytes + ?Sized>( input: BoxBytes, ) -> Result<Box<T>, (PodCastError, BoxBytes)>852 pub fn try_from_box_bytes<T: sealed::FromBoxBytes + ?Sized>(
853   input: BoxBytes,
854 ) -> Result<Box<T>, (PodCastError, BoxBytes)> {
855   T::try_from_box_bytes(input)
856 }
857 
858 impl BoxBytes {
859   /// Constructs a `BoxBytes` from its raw parts.
860   ///
861   /// # Safety
862   ///
863   /// The pointer is owned, has been allocated with the provided layout, and
864   /// points to `layout.size()` initialized bytes.
from_raw_parts(ptr: NonNull<u8>, layout: Layout) -> Self865   pub unsafe fn from_raw_parts(ptr: NonNull<u8>, layout: Layout) -> Self {
866     BoxBytes { ptr, layout }
867   }
868 
869   /// Deconstructs a `BoxBytes` into its raw parts.
870   ///
871   /// The pointer is owned, has been allocated with the provided layout, and
872   /// points to `layout.size()` initialized bytes.
into_raw_parts(self) -> (NonNull<u8>, Layout)873   pub fn into_raw_parts(self) -> (NonNull<u8>, Layout) {
874     let me = ManuallyDrop::new(self);
875     (me.ptr, me.layout)
876   }
877 
878   /// Returns the original layout.
layout(&self) -> Layout879   pub fn layout(&self) -> Layout {
880     self.layout
881   }
882 }
883