1 // Copyright 2023 The Fuchsia Authors
2 //
3 // Licensed under a BSD-style license <LICENSE-BSD>, Apache License, Version 2.0
4 // <LICENSE-APACHE or https://www.apache.org/licenses/LICENSE-2.0>, or the MIT
5 // license <LICENSE-MIT or https://opensource.org/licenses/MIT>, at your option.
6 // This file may not be copied, modified, or distributed except according to
7 // those terms.
8 
9 #[path = "third_party/rust/layout.rs"]
10 pub(crate) mod core_layout;
11 
12 use core::{mem, num::NonZeroUsize};
13 
14 pub(crate) mod ptr {
15     use core::{
16         fmt::{Debug, Formatter},
17         marker::PhantomData,
18         ptr::NonNull,
19     };
20 
21     use crate::{util::AsAddress, KnownLayout, _CastType};
22 
23     /// A raw pointer with more restrictions.
24     ///
25     /// `Ptr<T>` is similar to `NonNull<T>`, but it is more restrictive in the
26     /// following ways:
27     /// - It must derive from a valid allocation
28     /// - It must reference a byte range which is contained inside the
29     ///   allocation from which it derives
30     ///   - As a consequence, the byte range it references must have a size
31     ///     which does not overflow `isize`
32     /// - It must satisfy `T`'s alignment requirement
33     ///
34     /// Thanks to these restrictions, it is easier to prove the soundness of
35     /// some operations using `Ptr`s.
36     ///
37     /// `Ptr<'a, T>` is [covariant] in `'a` and `T`.
38     ///
39     /// [covariant]: https://doc.rust-lang.org/reference/subtyping.html
40     pub struct Ptr<'a, T: 'a + ?Sized> {
41         // INVARIANTS:
42         // 1. `ptr` is derived from some valid Rust allocation, `A`
43         // 2. `ptr` has the same provenance as `A`
44         // 3. `ptr` addresses a byte range which is entirely contained in `A`
45         // 4. `ptr` addresses a byte range whose length fits in an `isize`
46         // 5. `ptr` addresses a byte range which does not wrap around the address
47         //     space
48         // 6. `ptr` is validly-aligned for `T`
49         // 7. `A` is guaranteed to live for at least `'a`
50         // 8. `T: 'a`
51         ptr: NonNull<T>,
52         _lifetime: PhantomData<&'a ()>,
53     }
54 
55     impl<'a, T: ?Sized> Copy for Ptr<'a, T> {}
56     impl<'a, T: ?Sized> Clone for Ptr<'a, T> {
57         #[inline]
clone(&self) -> Self58         fn clone(&self) -> Self {
59             *self
60         }
61     }
62 
63     impl<'a, T: ?Sized> Ptr<'a, T> {
64         /// Returns a shared reference to the value.
65         ///
66         /// # Safety
67         ///
68         /// For the duration of `'a`:
69         /// - The referenced memory must contain a validly-initialized `T` for
70         ///   the duration of `'a`.
71         /// - The referenced memory must not also be referenced by any mutable
72         ///   references.
73         /// - The referenced memory must not be mutated, even via an
74         ///   [`UnsafeCell`].
75         /// - There must not exist any references to the same memory region
76         ///   which contain `UnsafeCell`s at byte ranges which are not identical
77         ///   to the byte ranges at which `T` contains `UnsafeCell`s.
78         ///
79         /// [`UnsafeCell`]: core::cell::UnsafeCell
80         // TODO(#429): The safety requirements are likely overly-restrictive.
81         // Notably, mutation via `UnsafeCell`s is probably fine. Once the rules
82         // are more clearly defined, we should relax the safety requirements.
83         // For an example of why this is subtle, see:
84         // https://github.com/rust-lang/unsafe-code-guidelines/issues/463#issuecomment-1736771593
85         #[allow(unused)]
as_ref(&self) -> &'a T86         pub(crate) unsafe fn as_ref(&self) -> &'a T {
87             // SAFETY:
88             // - By invariant, `self.ptr` is properly-aligned for `T`.
89             // - By invariant, `self.ptr` is "dereferenceable" in that it points
90             //   to a single allocation.
91             // - By invariant, the allocation is live for `'a`.
92             // - The caller promises that no mutable references exist to this
93             //   region during `'a`.
94             // - The caller promises that `UnsafeCell`s match exactly.
95             // - The caller promises that no mutation will happen during `'a`,
96             //   even via `UnsafeCell`s.
97             // - The caller promises that the memory region contains a
98             //   validly-intialized `T`.
99             unsafe { self.ptr.as_ref() }
100         }
101 
102         /// Casts to a different (unsized) target type.
103         ///
104         /// # Safety
105         ///
106         /// The caller promises that
107         /// - `cast(p)` is implemented exactly as follows: `|p: *mut T| p as
108         ///   *mut U`.
109         /// - The size of the object referenced by the resulting pointer is less
110         ///   than or equal to the size of the object referenced by `self`.
111         /// - The alignment of `U` is less than or equal to the alignment of
112         ///   `T`.
cast_unsized<U: 'a + ?Sized, F: FnOnce(*mut T) -> *mut U>( self, cast: F, ) -> Ptr<'a, U>113         pub(crate) unsafe fn cast_unsized<U: 'a + ?Sized, F: FnOnce(*mut T) -> *mut U>(
114             self,
115             cast: F,
116         ) -> Ptr<'a, U> {
117             let ptr = cast(self.ptr.as_ptr());
118             // SAFETY: Caller promises that `cast` is just an `as` cast. We call
119             // `cast` on `self.ptr.as_ptr()`, which is non-null by construction.
120             let ptr = unsafe { NonNull::new_unchecked(ptr) };
121             // SAFETY:
122             // - By invariant, `self.ptr` is derived from some valid Rust
123             //   allocation, and since `ptr` is just `self.ptr as *mut U`, so is
124             //   `ptr`.
125             // - By invariant, `self.ptr` has the same provenance as `A`, and so
126             //   the same is true of `ptr`.
127             // - By invariant, `self.ptr` addresses a byte range which is
128             //   entirely contained in `A`, and so the same is true of `ptr`.
129             // - By invariant, `self.ptr` addresses a byte range whose length
130             //   fits in an `isize`, and so the same is true of `ptr`.
131             // - By invariant, `self.ptr` addresses a byte range which does not
132             //   wrap around the address space, and so the same is true of
133             //   `ptr`.
134             // - By invariant, `self.ptr` is validly-aligned for `T`. Since
135             //   `ptr` has the same address, and since the caller promises that
136             //   the alignment of `U` is less than or equal to the alignment of
137             //   `T`, `ptr` is validly-aligned for `U`.
138             // - By invariant, `A` is guaranteed to live for at least `'a`.
139             // - `U: 'a`
140             Ptr { ptr, _lifetime: PhantomData }
141         }
142     }
143 
144     impl<'a> Ptr<'a, [u8]> {
145         /// Attempts to cast `self` to a `U` using the given cast type.
146         ///
147         /// Returns `None` if the resulting `U` would be invalidly-aligned or if
148         /// no `U` can fit in `self`. On success, returns a pointer to the
149         /// largest-possible `U` which fits in `self`.
150         ///
151         /// # Safety
152         ///
153         /// The caller may assume that this implementation is correct, and may
154         /// rely on that assumption for the soundness of their code. In
155         /// particular, the caller may assume that, if `try_cast_into` returns
156         /// `Some((ptr, split_at))`, then:
157         /// - If this is a prefix cast, `ptr` refers to the byte range `[0,
158         ///   split_at)` in `self`.
159         /// - If this is a suffix cast, `ptr` refers to the byte range
160         ///   `[split_at, self.len())` in `self`.
161         ///
162         /// # Panics
163         ///
164         /// Panics if `U` is a DST whose trailing slice element is zero-sized.
try_cast_into<U: 'a + ?Sized + KnownLayout>( &self, cast_type: _CastType, ) -> Option<(Ptr<'a, U>, usize)>165         pub(crate) fn try_cast_into<U: 'a + ?Sized + KnownLayout>(
166             &self,
167             cast_type: _CastType,
168         ) -> Option<(Ptr<'a, U>, usize)> {
169             // PANICS: By invariant, the byte range addressed by `self.ptr` does
170             // not wrap around the address space. This implies that the sum of
171             // the address (represented as a `usize`) and length do not overflow
172             // `usize`, as required by `validate_cast_and_convert_metadata`.
173             // Thus, this call to `validate_cast_and_convert_metadata` won't
174             // panic.
175             let (elems, split_at) = U::LAYOUT.validate_cast_and_convert_metadata(
176                 AsAddress::addr(self.ptr.as_ptr()),
177                 self.len(),
178                 cast_type,
179             )?;
180             let offset = match cast_type {
181                 _CastType::_Prefix => 0,
182                 _CastType::_Suffix => split_at,
183             };
184 
185             let ptr = self.ptr.cast::<u8>().as_ptr();
186             // SAFETY: `offset` is either `0` or `split_at`.
187             // `validate_cast_and_convert_metadata` promises that `split_at` is
188             // in the range `[0, self.len()]`. Thus, in both cases, `offset` is
189             // in `[0, self.len()]`. Thus:
190             // - The resulting pointer is in or one byte past the end of the
191             //   same byte range as `self.ptr`. Since, by invariant, `self.ptr`
192             //   addresses a byte range entirely contained within a single
193             //   allocation, the pointer resulting from this operation is within
194             //   or one byte past the end of that same allocation.
195             // - By invariant, `self.len() <= isize::MAX`. Since `offset <=
196             //   self.len()`, `offset <= isize::MAX`.
197             // - By invariant, `self.ptr` addresses a byte range which does not
198             //   wrap around the address space. This means that the base pointer
199             //   plus the `self.len()` does not overflow `usize`. Since `offset
200             //   <= self.len()`, this addition does not overflow `usize`.
201             let base = unsafe { ptr.add(offset) };
202             // SAFETY: Since `add` is not allowed to wrap around, the preceding line
203             // produces a pointer whose address is greater than or equal to that of
204             // `ptr`. Since `ptr` is a `NonNull`, `base` is also non-null.
205             let base = unsafe { NonNull::new_unchecked(base) };
206             let ptr = U::raw_from_ptr_len(base, elems);
207             // SAFETY:
208             // - By invariant, `self.ptr` is derived from some valid Rust
209             //   allocation, `A`, and has the same provenance as `A`. All
210             //   operations performed on `self.ptr` and values derived from it
211             //   in this method preserve provenance, so:
212             //   - `ptr` is derived from a valid Rust allocation, `A`.
213             //   - `ptr` has the same provenance as `A`.
214             // - `validate_cast_and_convert_metadata` promises that the object
215             //   described by `elems` and `split_at` lives at a byte range which
216             //   is a subset of the input byte range. Thus:
217             //   - Since, by invariant, `self.ptr` addresses a byte range
218             //     entirely contained in `A`, so does `ptr`.
219             //   - Since, by invariant, `self.ptr` addresses a range whose
220             //     length is not longer than `isize::MAX` bytes, so does `ptr`.
221             //   - Since, by invariant, `self.ptr` addresses a range which does
222             //     not wrap around the address space, so does `ptr`.
223             // - `validate_cast_and_convert_metadata` promises that the object
224             //   described by `split_at` is validly-aligned for `U`.
225             // - By invariant on `self`, `A` is guaranteed to live for at least
226             //   `'a`.
227             // - `U: 'a` by trait bound.
228             Some((Ptr { ptr, _lifetime: PhantomData }, split_at))
229         }
230 
231         /// Attempts to cast `self` into a `U`, failing if all of the bytes of
232         /// `self` cannot be treated as a `U`.
233         ///
234         /// In particular, this method fails if `self` is not validly-aligned
235         /// for `U` or if `self`'s size is not a valid size for `U`.
236         ///
237         /// # Safety
238         ///
239         /// On success, the caller may assume that the returned pointer
240         /// references the same byte range as `self`.
241         #[allow(unused)]
242         #[inline(always)]
try_cast_into_no_leftover<U: 'a + ?Sized + KnownLayout>( &self, ) -> Option<Ptr<'a, U>>243         pub(crate) fn try_cast_into_no_leftover<U: 'a + ?Sized + KnownLayout>(
244             &self,
245         ) -> Option<Ptr<'a, U>> {
246             // TODO(#67): Remove this allow. See NonNulSlicelExt for more
247             // details.
248             #[allow(unstable_name_collisions)]
249             match self.try_cast_into(_CastType::_Prefix) {
250                 Some((slf, split_at)) if split_at == self.len() => Some(slf),
251                 Some(_) | None => None,
252             }
253         }
254     }
255 
256     impl<'a, T> Ptr<'a, [T]> {
257         /// The number of slice elements referenced by `self`.
258         ///
259         /// # Safety
260         ///
261         /// Unsafe code my rely on `len` satisfying the above contract.
len(&self) -> usize262         fn len(&self) -> usize {
263             #[allow(clippy::as_conversions)]
264             let slc = self.ptr.as_ptr() as *const [()];
265             // SAFETY:
266             // - `()` has alignment 1, so `slc` is trivially aligned.
267             // - `slc` was derived from a non-null pointer.
268             // - The size is 0 regardless of the length, so it is sound to
269             //   materialize a reference regardless of location.
270             // - By invariant, `self.ptr` has valid provenance.
271             let slc = unsafe { &*slc };
272             // This is correct because the preceding `as` cast preserves the
273             // number of slice elements. Per
274             // https://doc.rust-lang.org/nightly/reference/expressions/operator-expr.html#slice-dst-pointer-to-pointer-cast:
275             //
276             //   For slice types like `[T]` and `[U]`, the raw pointer types
277             //   `*const [T]`, `*mut [T]`, `*const [U]`, and `*mut [U]` encode
278             //   the number of elements in this slice. Casts between these raw
279             //   pointer types preserve the number of elements. Note that, as a
280             //   consequence, such casts do *not* necessarily preserve the size
281             //   of the pointer's referent (e.g., casting `*const [u16]` to
282             //   `*const [u8]` will result in a raw pointer which refers to an
283             //   object of half the size of the original). The same holds for
284             //   `str` and any compound type whose unsized tail is a slice type,
285             //   such as struct `Foo(i32, [u8])` or `(u64, Foo)`.
286             //
287             // TODO(#429),
288             // TODO(https://github.com/rust-lang/reference/pull/1417): Once this
289             // text is available on the Stable docs, cite those instead of the
290             // Nightly docs.
291             slc.len()
292         }
293 
iter(&self) -> impl Iterator<Item = Ptr<'a, T>>294         pub(crate) fn iter(&self) -> impl Iterator<Item = Ptr<'a, T>> {
295             // TODO(#429): Once `NonNull::cast` documents that it preserves
296             // provenance, cite those docs.
297             let base = self.ptr.cast::<T>().as_ptr();
298             (0..self.len()).map(move |i| {
299                 // TODO(https://github.com/rust-lang/rust/issues/74265): Use
300                 // `NonNull::get_unchecked_mut`.
301 
302                 // SAFETY: If the following conditions are not satisfied
303                 // `pointer::cast` may induce Undefined Behavior [1]:
304                 // > 1. Both the starting and resulting pointer must be either
305                 // >    in bounds or one byte past the end of the same allocated
306                 // >    object.
307                 // > 2. The computed offset, in bytes, cannot overflow an
308                 // >    `isize`.
309                 // > 3. The offset being in bounds cannot rely on “wrapping
310                 // >    around” the address space. That is, the
311                 // >    infinite-precision sum must fit in a `usize`.
312                 //
313                 // [1] https://doc.rust-lang.org/std/primitive.pointer.html#method.add
314                 //
315                 // We satisfy all three of these conditions here:
316                 // 1. `base` (by invariant on `self`) points to an allocated
317                 //    object. By contract, `self.len()` accurately reflects the
318                 //    number of elements in the slice. `i` is in bounds of
319                 //   `c.len()` by construction, and so the result of this
320                 //   addition cannot overflow past the end of the allocation
321                 //   referred to by `c`.
322                 // 2. By invariant on `Ptr`, `self` addresses a byte range whose
323                 //    length fits in an `isize`. Since `elem` is contained in
324                 //    `self`, the computed offset of `elem` must fit within
325                 //    `isize.`
326                 // 3. By invariant on `Ptr`, `self` addresses a byte range which
327                 //    does not wrap around the address space. Since `elem` is
328                 //    contained in `self`, the computed offset of `elem` must
329                 //    wrap around the address space.
330                 //
331                 // TODO(#429): Once `pointer::add` documents that it preserves
332                 // provenance, cite those docs.
333                 let elem = unsafe { base.add(i) };
334 
335                 // SAFETY:
336                 //  - `elem` must not be null. `base` is constructed from a
337                 //    `NonNull` pointer, and the addition that produces `elem`
338                 //    must not overflow or wrap around, so `elem >= base > 0`.
339                 //
340                 // TODO(#429): Once `NonNull::new_unchecked` documents that it
341                 // preserves provenance, cite those docs.
342                 let elem = unsafe { NonNull::new_unchecked(elem) };
343 
344                 // SAFETY: The safety invariants of `Ptr` (see definition) are
345                 // satisfied:
346                 // 1. `elem` is derived from a valid Rust allocation, because
347                 //    `self` is derived from a valid Rust allocation, by
348                 //    invariant on `Ptr`
349                 // 2. `elem` has the same provenance as `self`, because it
350                 //    derived from `self` using a series of
351                 //    provenance-preserving operations
352                 // 3. `elem` is entirely contained in the allocation of `self`
353                 //    (see above)
354                 // 4. `elem` addresses a byte range whose length fits in an
355                 //    `isize` (see above)
356                 // 5. `elem` addresses a byte range which does not wrap around
357                 //    the address space (see above)
358                 // 6. `elem` is validly-aligned for `T`. `self`, which
359                 //    represents a `[T]` is validly aligned for `T`, and `elem`
360                 //    is an element within that `[T]`
361                 // 7. The allocation of `elem` is guaranteed to live for at
362                 //    least `'a`, because `elem` is entirely contained in
363                 //    `self`, which lives for at least `'a` by invariant on
364                 //    `Ptr`.
365                 // 8. `T: 'a`, because `elem` is an element within `[T]`, and
366                 //    `[T]: 'a` by invariant on `Ptr`
367                 Ptr { ptr: elem, _lifetime: PhantomData }
368             })
369         }
370     }
371 
372     impl<'a, T: 'a + ?Sized> From<&'a T> for Ptr<'a, T> {
373         #[inline(always)]
from(t: &'a T) -> Ptr<'a, T>374         fn from(t: &'a T) -> Ptr<'a, T> {
375             // SAFETY: `t` points to a valid Rust allocation, `A`, by
376             // construction. Thus:
377             // - `ptr` is derived from `A`
378             // - Since we use `NonNull::from`, which preserves provenance, `ptr`
379             //   has the same provenance as `A`
380             // - Since `NonNull::from` creates a pointer which addresses the
381             //   same bytes as `t`, `ptr` addresses a byte range entirely
382             //   contained in (in this case, identical to) `A`
383             // - Since `t: &T`, it addresses no more than `isize::MAX` bytes [1]
384             // - Since `t: &T`, it addresses a byte range which does not wrap
385             //   around the address space [2]
386             // - Since it is constructed from a valid `&T`, `ptr` is
387             //   validly-aligned for `T`
388             // - Since `t: &'a T`, the allocation `A` is guaranteed to live for
389             //   at least `'a`
390             // - `T: 'a` by trait bound
391             //
392             // TODO(#429),
393             // TODO(https://github.com/rust-lang/rust/issues/116181): Once it's
394             // documented, reference the guarantee that `NonNull::from`
395             // preserves provenance.
396             //
397             // TODO(#429),
398             // TODO(https://github.com/rust-lang/unsafe-code-guidelines/issues/465):
399             // - [1] Where does the reference document that allocations fit in
400             //   `isize`?
401             // - [2] Where does the reference document that allocations don't
402             //   wrap around the address space?
403             Ptr { ptr: NonNull::from(t), _lifetime: PhantomData }
404         }
405     }
406 
407     impl<'a, T: 'a + ?Sized> Debug for Ptr<'a, T> {
408         #[inline]
fmt(&self, f: &mut Formatter<'_>) -> core::fmt::Result409         fn fmt(&self, f: &mut Formatter<'_>) -> core::fmt::Result {
410             self.ptr.fmt(f)
411         }
412     }
413 
414     #[cfg(test)]
415     mod tests {
416         use core::mem::{self, MaybeUninit};
417 
418         use super::*;
419         use crate::{util::testutil::AU64, FromBytes};
420 
421         #[test]
test_ptrtry_cast_into_soundness()422         fn test_ptrtry_cast_into_soundness() {
423             // This test is designed so that if `Ptr::try_cast_into_xxx` are
424             // buggy, it will manifest as unsoundness that Miri can detect.
425 
426             // - If `size_of::<T>() == 0`, `N == 4`
427             // - Else, `N == 4 * size_of::<T>()`
428             fn test<const N: usize, T: ?Sized + KnownLayout + FromBytes>() {
429                 let mut bytes = [MaybeUninit::<u8>::uninit(); N];
430                 let initialized = [MaybeUninit::new(0u8); N];
431                 for start in 0..=bytes.len() {
432                     for end in start..=bytes.len() {
433                         // Set all bytes to uninitialized other than those in
434                         // the range we're going to pass to `try_cast_from`.
435                         // This allows Miri to detect out-of-bounds reads
436                         // because they read uninitialized memory. Without this,
437                         // some out-of-bounds reads would still be in-bounds of
438                         // `bytes`, and so might spuriously be accepted.
439                         bytes = [MaybeUninit::<u8>::uninit(); N];
440                         let bytes = &mut bytes[start..end];
441                         // Initialize only the byte range we're going to pass to
442                         // `try_cast_from`.
443                         bytes.copy_from_slice(&initialized[start..end]);
444 
445                         let bytes = {
446                             let bytes: *const [MaybeUninit<u8>] = bytes;
447                             #[allow(clippy::as_conversions)]
448                             let bytes = bytes as *const [u8];
449                             // SAFETY: We just initialized these bytes to valid
450                             // `u8`s.
451                             unsafe { &*bytes }
452                         };
453 
454                         /// # Safety
455                         ///
456                         /// - `slf` must reference a byte range which is
457                         ///   entirely initialized.
458                         /// - `slf` must reference a byte range which is only
459                         ///   referenced by shared references which do not
460                         ///   contain `UnsafeCell`s during its lifetime.
461                         unsafe fn validate_and_get_len<T: ?Sized + KnownLayout + FromBytes>(
462                             slf: Ptr<'_, T>,
463                         ) -> usize {
464                             // SAFETY:
465                             // - Since all bytes in `slf` are initialized and
466                             //   `T: FromBytes`, `slf` contains a valid `T`.
467                             // - The caller promises that the referenced memory
468                             //   is not also referenced by any mutable
469                             //   references.
470                             // - The caller promises that the referenced memory
471                             //   is not also referenced as a type which contains
472                             //   `UnsafeCell`s.
473                             let t = unsafe { slf.as_ref() };
474 
475                             let bytes = {
476                                 let len = mem::size_of_val(t);
477                                 let t: *const T = t;
478                                 // SAFETY:
479                                 // - We know `t`'s bytes are all initialized
480                                 //   because we just read it from `slf`, which
481                                 //   points to an initialized range of bytes. If
482                                 //   there's a bug and this doesn't hold, then
483                                 //   that's exactly what we're hoping Miri will
484                                 //   catch!
485                                 // - Since `T: FromBytes`, `T` doesn't contain
486                                 //   any `UnsafeCell`s, so it's okay for `t: T`
487                                 //   and a `&[u8]` to the same memory to be
488                                 //   alive concurrently.
489                                 unsafe { core::slice::from_raw_parts(t.cast::<u8>(), len) }
490                             };
491 
492                             // This assertion ensures that `t`'s bytes are read
493                             // and compared to another value, which in turn
494                             // ensures that Miri gets a chance to notice if any
495                             // of `t`'s bytes are uninitialized, which they
496                             // shouldn't be (see the comment above).
497                             assert_eq!(bytes, vec![0u8; bytes.len()]);
498 
499                             mem::size_of_val(t)
500                         }
501 
502                         for cast_type in [_CastType::_Prefix, _CastType::_Suffix] {
503                             if let Some((slf, split_at)) =
504                                 Ptr::from(bytes).try_cast_into::<T>(cast_type)
505                             {
506                                 // SAFETY: All bytes in `bytes` have been
507                                 // initialized.
508                                 let len = unsafe { validate_and_get_len(slf) };
509                                 match cast_type {
510                                     _CastType::_Prefix => assert_eq!(split_at, len),
511                                     _CastType::_Suffix => assert_eq!(split_at, bytes.len() - len),
512                                 }
513                             }
514                         }
515 
516                         if let Some(slf) = Ptr::from(bytes).try_cast_into_no_leftover::<T>() {
517                             // SAFETY: All bytes in `bytes` have been
518                             // initialized.
519                             let len = unsafe { validate_and_get_len(slf) };
520                             assert_eq!(len, bytes.len());
521                         }
522                     }
523                 }
524             }
525 
526             macro_rules! test {
527             ($($ty:ty),*) => {
528                 $({
529                     const S: usize = core::mem::size_of::<$ty>();
530                     const N: usize = if S == 0 { 4 } else { S * 4 };
531                     test::<N, $ty>();
532                     // We don't support casting into DSTs whose trailing slice
533                     // element is a ZST.
534                     if S > 0 {
535                         test::<N, [$ty]>();
536                     }
537                     // TODO: Test with a slice DST once we have any that
538                     // implement `KnownLayout + FromBytes`.
539                 })*
540             };
541         }
542 
543             test!(());
544             test!(u8, u16, u32, u64, u128, usize, AU64);
545             test!(i8, i16, i32, i64, i128, isize);
546             test!(f32, f64);
547         }
548     }
549 }
550 
551 pub(crate) trait AsAddress {
addr(self) -> usize552     fn addr(self) -> usize;
553 }
554 
555 impl<'a, T: ?Sized> AsAddress for &'a T {
556     #[inline(always)]
addr(self) -> usize557     fn addr(self) -> usize {
558         let ptr: *const T = self;
559         AsAddress::addr(ptr)
560     }
561 }
562 
563 impl<'a, T: ?Sized> AsAddress for &'a mut T {
564     #[inline(always)]
addr(self) -> usize565     fn addr(self) -> usize {
566         let ptr: *const T = self;
567         AsAddress::addr(ptr)
568     }
569 }
570 
571 impl<T: ?Sized> AsAddress for *const T {
572     #[inline(always)]
addr(self) -> usize573     fn addr(self) -> usize {
574         // TODO(#181), TODO(https://github.com/rust-lang/rust/issues/95228): Use
575         // `.addr()` instead of `as usize` once it's stable, and get rid of this
576         // `allow`. Currently, `as usize` is the only way to accomplish this.
577         #[allow(clippy::as_conversions)]
578         #[cfg_attr(__INTERNAL_USE_ONLY_NIGHLTY_FEATURES_IN_TESTS, allow(lossy_provenance_casts))]
579         return self.cast::<()>() as usize;
580     }
581 }
582 
583 impl<T: ?Sized> AsAddress for *mut T {
584     #[inline(always)]
addr(self) -> usize585     fn addr(self) -> usize {
586         let ptr: *const T = self;
587         AsAddress::addr(ptr)
588     }
589 }
590 
591 /// Is `t` aligned to `mem::align_of::<U>()`?
592 #[inline(always)]
aligned_to<T: AsAddress, U>(t: T) -> bool593 pub(crate) fn aligned_to<T: AsAddress, U>(t: T) -> bool {
594     // `mem::align_of::<U>()` is guaranteed to return a non-zero value, which in
595     // turn guarantees that this mod operation will not panic.
596     #[allow(clippy::arithmetic_side_effects)]
597     let remainder = t.addr() % mem::align_of::<U>();
598     remainder == 0
599 }
600 
601 /// Round `n` down to the largest value `m` such that `m <= n` and `m % align ==
602 /// 0`.
603 ///
604 /// # Panics
605 ///
606 /// May panic if `align` is not a power of two. Even if it doesn't panic in this
607 /// case, it will produce nonsense results.
608 #[inline(always)]
round_down_to_next_multiple_of_alignment( n: usize, align: NonZeroUsize, ) -> usize609 pub(crate) const fn round_down_to_next_multiple_of_alignment(
610     n: usize,
611     align: NonZeroUsize,
612 ) -> usize {
613     let align = align.get();
614     debug_assert!(align.is_power_of_two());
615 
616     // Subtraction can't underflow because `align.get() >= 1`.
617     #[allow(clippy::arithmetic_side_effects)]
618     let mask = !(align - 1);
619     n & mask
620 }
621 
max(a: NonZeroUsize, b: NonZeroUsize) -> NonZeroUsize622 pub(crate) const fn max(a: NonZeroUsize, b: NonZeroUsize) -> NonZeroUsize {
623     if a.get() < b.get() {
624         b
625     } else {
626         a
627     }
628 }
629 
min(a: NonZeroUsize, b: NonZeroUsize) -> NonZeroUsize630 pub(crate) const fn min(a: NonZeroUsize, b: NonZeroUsize) -> NonZeroUsize {
631     if a.get() > b.get() {
632         b
633     } else {
634         a
635     }
636 }
637 
638 /// Since we support multiple versions of Rust, there are often features which
639 /// have been stabilized in the most recent stable release which do not yet
640 /// exist (stably) on our MSRV. This module provides polyfills for those
641 /// features so that we can write more "modern" code, and just remove the
642 /// polyfill once our MSRV supports the corresponding feature. Without this,
643 /// we'd have to write worse/more verbose code and leave TODO comments sprinkled
644 /// throughout the codebase to update to the new pattern once it's stabilized.
645 ///
646 /// Each trait is imported as `_` at the crate root; each polyfill should "just
647 /// work" at usage sites.
648 pub(crate) mod polyfills {
649     use core::ptr::{self, NonNull};
650 
651     // A polyfill for `NonNull::slice_from_raw_parts` that we can use before our
652     // MSRV is 1.70, when that function was stabilized.
653     //
654     // TODO(#67): Once our MSRV is 1.70, remove this.
655     #[allow(unused)]
656     pub(crate) trait NonNullExt<T> {
slice_from_raw_parts(data: Self, len: usize) -> NonNull<[T]>657         fn slice_from_raw_parts(data: Self, len: usize) -> NonNull<[T]>;
658     }
659 
660     #[allow(unused)]
661     impl<T> NonNullExt<T> for NonNull<T> {
662         #[inline(always)]
slice_from_raw_parts(data: Self, len: usize) -> NonNull<[T]>663         fn slice_from_raw_parts(data: Self, len: usize) -> NonNull<[T]> {
664             let ptr = ptr::slice_from_raw_parts_mut(data.as_ptr(), len);
665             // SAFETY: `ptr` is converted from `data`, which is non-null.
666             unsafe { NonNull::new_unchecked(ptr) }
667         }
668     }
669 }
670 
671 #[cfg(test)]
672 pub(crate) mod testutil {
673     use core::fmt::{self, Display, Formatter};
674 
675     use crate::*;
676 
677     /// A `T` which is aligned to at least `align_of::<A>()`.
678     #[derive(Default)]
679     pub(crate) struct Align<T, A> {
680         pub(crate) t: T,
681         _a: [A; 0],
682     }
683 
684     impl<T: Default, A> Align<T, A> {
set_default(&mut self)685         pub(crate) fn set_default(&mut self) {
686             self.t = T::default();
687         }
688     }
689 
690     impl<T, A> Align<T, A> {
new(t: T) -> Align<T, A>691         pub(crate) const fn new(t: T) -> Align<T, A> {
692             Align { t, _a: [] }
693         }
694     }
695 
696     // A `u64` with alignment 8.
697     //
698     // Though `u64` has alignment 8 on some platforms, it's not guaranteed.
699     // By contrast, `AU64` is guaranteed to have alignment 8.
700     #[derive(
701         KnownLayout,
702         FromZeroes,
703         FromBytes,
704         AsBytes,
705         Eq,
706         PartialEq,
707         Ord,
708         PartialOrd,
709         Default,
710         Debug,
711         Copy,
712         Clone,
713     )]
714     #[repr(C, align(8))]
715     pub(crate) struct AU64(pub(crate) u64);
716 
717     impl AU64 {
718         // Converts this `AU64` to bytes using this platform's endianness.
to_bytes(self) -> [u8; 8]719         pub(crate) fn to_bytes(self) -> [u8; 8] {
720             crate::transmute!(self)
721         }
722     }
723 
724     impl Display for AU64 {
fmt(&self, f: &mut Formatter<'_>) -> fmt::Result725         fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
726             Display::fmt(&self.0, f)
727         }
728     }
729 
730     #[derive(
731         FromZeroes, FromBytes, Eq, PartialEq, Ord, PartialOrd, Default, Debug, Copy, Clone,
732     )]
733     #[repr(C)]
734     pub(crate) struct Nested<T, U: ?Sized> {
735         _t: T,
736         _u: U,
737     }
738 }
739 
740 #[cfg(test)]
741 mod tests {
742     use super::*;
743 
744     #[test]
test_round_down_to_next_multiple_of_alignment()745     fn test_round_down_to_next_multiple_of_alignment() {
746         fn alt_impl(n: usize, align: NonZeroUsize) -> usize {
747             let mul = n / align.get();
748             mul * align.get()
749         }
750 
751         for align in [1, 2, 4, 8, 16] {
752             for n in 0..256 {
753                 let align = NonZeroUsize::new(align).unwrap();
754                 let want = alt_impl(n, align);
755                 let got = round_down_to_next_multiple_of_alignment(n, align);
756                 assert_eq!(got, want, "round_down_to_next_multiple_of_alignment({n}, {align})");
757             }
758         }
759     }
760 }
761 
762 #[cfg(kani)]
763 mod proofs {
764     use super::*;
765 
766     #[kani::proof]
prove_round_down_to_next_multiple_of_alignment()767     fn prove_round_down_to_next_multiple_of_alignment() {
768         fn model_impl(n: usize, align: NonZeroUsize) -> usize {
769             assert!(align.get().is_power_of_two());
770             let mul = n / align.get();
771             mul * align.get()
772         }
773 
774         let align: NonZeroUsize = kani::any();
775         kani::assume(align.get().is_power_of_two());
776         let n: usize = kani::any();
777 
778         let expected = model_impl(n, align);
779         let actual = round_down_to_next_multiple_of_alignment(n, align);
780         assert_eq!(expected, actual, "round_down_to_next_multiple_of_alignment({n}, {align})");
781     }
782 
783     // Restricted to nightly since we use the unstable `usize::next_multiple_of`
784     // in our model implementation.
785     #[cfg(__INTERNAL_USE_ONLY_NIGHLTY_FEATURES_IN_TESTS)]
786     #[kani::proof]
prove_padding_needed_for()787     fn prove_padding_needed_for() {
788         fn model_impl(len: usize, align: NonZeroUsize) -> usize {
789             let padded = len.next_multiple_of(align.get());
790             let padding = padded - len;
791             padding
792         }
793 
794         let align: NonZeroUsize = kani::any();
795         kani::assume(align.get().is_power_of_two());
796         let len: usize = kani::any();
797         // Constrain `len` to valid Rust lengths, since our model implementation
798         // isn't robust to overflow.
799         kani::assume(len <= isize::MAX as usize);
800         kani::assume(align.get() < 1 << 29);
801 
802         let expected = model_impl(len, align);
803         let actual = core_layout::padding_needed_for(len, align);
804         assert_eq!(expected, actual, "padding_needed_for({len}, {align})");
805 
806         let padded_len = actual + len;
807         assert_eq!(padded_len % align, 0);
808         assert!(padded_len / align >= len / align);
809     }
810 }
811