1*bb4ee6a4SAndroid Build Coastguard Worker // Copyright 2017 The ChromiumOS Authors
2*bb4ee6a4SAndroid Build Coastguard Worker // Use of this source code is governed by a BSD-style license that can be
3*bb4ee6a4SAndroid Build Coastguard Worker // found in the LICENSE file.
4*bb4ee6a4SAndroid Build Coastguard Worker
5*bb4ee6a4SAndroid Build Coastguard Worker //! Types for volatile access to memory.
6*bb4ee6a4SAndroid Build Coastguard Worker //!
7*bb4ee6a4SAndroid Build Coastguard Worker //! Two of the core rules for safe rust is no data races and no aliased mutable references.
8*bb4ee6a4SAndroid Build Coastguard Worker //! `VolatileSlice`, along with types that produce it which implement
9*bb4ee6a4SAndroid Build Coastguard Worker //! `VolatileMemory`, allow us to sidestep that rule by wrapping pointers that absolutely have to be
10*bb4ee6a4SAndroid Build Coastguard Worker //! accessed volatile. Some systems really do need to operate on shared memory and can't have the
11*bb4ee6a4SAndroid Build Coastguard Worker //! compiler reordering or eliding access because it has no visibility into what other systems are
12*bb4ee6a4SAndroid Build Coastguard Worker //! doing with that hunk of memory.
13*bb4ee6a4SAndroid Build Coastguard Worker //!
14*bb4ee6a4SAndroid Build Coastguard Worker //! For the purposes of maintaining safety, volatile memory has some rules of its own:
15*bb4ee6a4SAndroid Build Coastguard Worker //! 1. No references or slices to volatile memory (`&` or `&mut`).
16*bb4ee6a4SAndroid Build Coastguard Worker //! 2. Access should always been done with a volatile read or write.
17*bb4ee6a4SAndroid Build Coastguard Worker //!
18*bb4ee6a4SAndroid Build Coastguard Worker //! The first rule is because having references of any kind to memory considered volatile would
19*bb4ee6a4SAndroid Build Coastguard Worker //! violate pointer aliasing. The second is because unvolatile accesses are inherently undefined if
20*bb4ee6a4SAndroid Build Coastguard Worker //! done concurrently without synchronization. With volatile access we know that the compiler has
21*bb4ee6a4SAndroid Build Coastguard Worker //! not reordered or elided the access.
22*bb4ee6a4SAndroid Build Coastguard Worker
23*bb4ee6a4SAndroid Build Coastguard Worker use std::cmp::min;
24*bb4ee6a4SAndroid Build Coastguard Worker use std::mem::size_of;
25*bb4ee6a4SAndroid Build Coastguard Worker use std::ptr::copy;
26*bb4ee6a4SAndroid Build Coastguard Worker use std::ptr::read_volatile;
27*bb4ee6a4SAndroid Build Coastguard Worker use std::ptr::write_bytes;
28*bb4ee6a4SAndroid Build Coastguard Worker use std::ptr::write_volatile;
29*bb4ee6a4SAndroid Build Coastguard Worker use std::result;
30*bb4ee6a4SAndroid Build Coastguard Worker use std::slice;
31*bb4ee6a4SAndroid Build Coastguard Worker
32*bb4ee6a4SAndroid Build Coastguard Worker use remain::sorted;
33*bb4ee6a4SAndroid Build Coastguard Worker use thiserror::Error;
34*bb4ee6a4SAndroid Build Coastguard Worker use zerocopy::AsBytes;
35*bb4ee6a4SAndroid Build Coastguard Worker use zerocopy::FromBytes;
36*bb4ee6a4SAndroid Build Coastguard Worker use zerocopy::Ref;
37*bb4ee6a4SAndroid Build Coastguard Worker
38*bb4ee6a4SAndroid Build Coastguard Worker use crate::IoBufMut;
39*bb4ee6a4SAndroid Build Coastguard Worker
40*bb4ee6a4SAndroid Build Coastguard Worker #[sorted]
41*bb4ee6a4SAndroid Build Coastguard Worker #[derive(Error, Eq, PartialEq, Debug)]
42*bb4ee6a4SAndroid Build Coastguard Worker pub enum VolatileMemoryError {
43*bb4ee6a4SAndroid Build Coastguard Worker /// `addr` is out of bounds of the volatile memory slice.
44*bb4ee6a4SAndroid Build Coastguard Worker #[error("address 0x{addr:x} is out of bounds")]
45*bb4ee6a4SAndroid Build Coastguard Worker OutOfBounds { addr: usize },
46*bb4ee6a4SAndroid Build Coastguard Worker /// Taking a slice at `base` with `offset` would overflow `usize`.
47*bb4ee6a4SAndroid Build Coastguard Worker #[error("address 0x{base:x} offset by 0x{offset:x} would overflow")]
48*bb4ee6a4SAndroid Build Coastguard Worker Overflow { base: usize, offset: usize },
49*bb4ee6a4SAndroid Build Coastguard Worker }
50*bb4ee6a4SAndroid Build Coastguard Worker
51*bb4ee6a4SAndroid Build Coastguard Worker pub type VolatileMemoryResult<T> = result::Result<T, VolatileMemoryError>;
52*bb4ee6a4SAndroid Build Coastguard Worker
53*bb4ee6a4SAndroid Build Coastguard Worker use crate::VolatileMemoryError as Error;
54*bb4ee6a4SAndroid Build Coastguard Worker type Result<T> = VolatileMemoryResult<T>;
55*bb4ee6a4SAndroid Build Coastguard Worker
56*bb4ee6a4SAndroid Build Coastguard Worker /// Trait for types that support raw volatile access to their data.
57*bb4ee6a4SAndroid Build Coastguard Worker pub trait VolatileMemory {
58*bb4ee6a4SAndroid Build Coastguard Worker /// Gets a slice of memory at `offset` that is `count` bytes in length and supports volatile
59*bb4ee6a4SAndroid Build Coastguard Worker /// access.
get_slice(&self, offset: usize, count: usize) -> Result<VolatileSlice>60*bb4ee6a4SAndroid Build Coastguard Worker fn get_slice(&self, offset: usize, count: usize) -> Result<VolatileSlice>;
61*bb4ee6a4SAndroid Build Coastguard Worker }
62*bb4ee6a4SAndroid Build Coastguard Worker
63*bb4ee6a4SAndroid Build Coastguard Worker /// A slice of raw memory that supports volatile access. Like `std::io::IoSliceMut`, this type is
64*bb4ee6a4SAndroid Build Coastguard Worker /// guaranteed to be ABI-compatible with `libc::iovec` but unlike `IoSliceMut`, it doesn't
65*bb4ee6a4SAndroid Build Coastguard Worker /// automatically deref to `&mut [u8]`.
66*bb4ee6a4SAndroid Build Coastguard Worker #[derive(Copy, Clone, Debug)]
67*bb4ee6a4SAndroid Build Coastguard Worker #[repr(transparent)]
68*bb4ee6a4SAndroid Build Coastguard Worker pub struct VolatileSlice<'a>(IoBufMut<'a>);
69*bb4ee6a4SAndroid Build Coastguard Worker
70*bb4ee6a4SAndroid Build Coastguard Worker impl<'a> VolatileSlice<'a> {
71*bb4ee6a4SAndroid Build Coastguard Worker /// Creates a slice of raw memory that must support volatile access.
new(buf: &mut [u8]) -> VolatileSlice72*bb4ee6a4SAndroid Build Coastguard Worker pub fn new(buf: &mut [u8]) -> VolatileSlice {
73*bb4ee6a4SAndroid Build Coastguard Worker VolatileSlice(IoBufMut::new(buf))
74*bb4ee6a4SAndroid Build Coastguard Worker }
75*bb4ee6a4SAndroid Build Coastguard Worker
76*bb4ee6a4SAndroid Build Coastguard Worker /// Creates a `VolatileSlice` from a pointer and a length.
77*bb4ee6a4SAndroid Build Coastguard Worker ///
78*bb4ee6a4SAndroid Build Coastguard Worker /// # Safety
79*bb4ee6a4SAndroid Build Coastguard Worker ///
80*bb4ee6a4SAndroid Build Coastguard Worker /// In order to use this method safely, `addr` must be valid for reads and writes of `len` bytes
81*bb4ee6a4SAndroid Build Coastguard Worker /// and should live for the entire duration of lifetime `'a`.
from_raw_parts(addr: *mut u8, len: usize) -> VolatileSlice<'a>82*bb4ee6a4SAndroid Build Coastguard Worker pub unsafe fn from_raw_parts(addr: *mut u8, len: usize) -> VolatileSlice<'a> {
83*bb4ee6a4SAndroid Build Coastguard Worker VolatileSlice(IoBufMut::from_raw_parts(addr, len))
84*bb4ee6a4SAndroid Build Coastguard Worker }
85*bb4ee6a4SAndroid Build Coastguard Worker
86*bb4ee6a4SAndroid Build Coastguard Worker /// Gets a const pointer to this slice's memory.
as_ptr(&self) -> *const u887*bb4ee6a4SAndroid Build Coastguard Worker pub fn as_ptr(&self) -> *const u8 {
88*bb4ee6a4SAndroid Build Coastguard Worker self.0.as_ptr()
89*bb4ee6a4SAndroid Build Coastguard Worker }
90*bb4ee6a4SAndroid Build Coastguard Worker
91*bb4ee6a4SAndroid Build Coastguard Worker /// Gets a mutable pointer to this slice's memory.
as_mut_ptr(&self) -> *mut u892*bb4ee6a4SAndroid Build Coastguard Worker pub fn as_mut_ptr(&self) -> *mut u8 {
93*bb4ee6a4SAndroid Build Coastguard Worker self.0.as_mut_ptr()
94*bb4ee6a4SAndroid Build Coastguard Worker }
95*bb4ee6a4SAndroid Build Coastguard Worker
96*bb4ee6a4SAndroid Build Coastguard Worker /// Gets the size of this slice.
size(&self) -> usize97*bb4ee6a4SAndroid Build Coastguard Worker pub fn size(&self) -> usize {
98*bb4ee6a4SAndroid Build Coastguard Worker self.0.len()
99*bb4ee6a4SAndroid Build Coastguard Worker }
100*bb4ee6a4SAndroid Build Coastguard Worker
101*bb4ee6a4SAndroid Build Coastguard Worker /// Advance the starting position of this slice.
102*bb4ee6a4SAndroid Build Coastguard Worker ///
103*bb4ee6a4SAndroid Build Coastguard Worker /// Panics if `count > self.size()`.
advance(&mut self, count: usize)104*bb4ee6a4SAndroid Build Coastguard Worker pub fn advance(&mut self, count: usize) {
105*bb4ee6a4SAndroid Build Coastguard Worker self.0.advance(count)
106*bb4ee6a4SAndroid Build Coastguard Worker }
107*bb4ee6a4SAndroid Build Coastguard Worker
108*bb4ee6a4SAndroid Build Coastguard Worker /// Shorten the length of the slice.
109*bb4ee6a4SAndroid Build Coastguard Worker ///
110*bb4ee6a4SAndroid Build Coastguard Worker /// Has no effect if `len > self.size()`.
truncate(&mut self, len: usize)111*bb4ee6a4SAndroid Build Coastguard Worker pub fn truncate(&mut self, len: usize) {
112*bb4ee6a4SAndroid Build Coastguard Worker self.0.truncate(len)
113*bb4ee6a4SAndroid Build Coastguard Worker }
114*bb4ee6a4SAndroid Build Coastguard Worker
115*bb4ee6a4SAndroid Build Coastguard Worker /// Returns this `VolatileSlice` as an `IoBufMut`.
as_iobuf(&self) -> &IoBufMut116*bb4ee6a4SAndroid Build Coastguard Worker pub fn as_iobuf(&self) -> &IoBufMut {
117*bb4ee6a4SAndroid Build Coastguard Worker &self.0
118*bb4ee6a4SAndroid Build Coastguard Worker }
119*bb4ee6a4SAndroid Build Coastguard Worker
120*bb4ee6a4SAndroid Build Coastguard Worker /// Converts a slice of `VolatileSlice`s into a slice of `IoBufMut`s
121*bb4ee6a4SAndroid Build Coastguard Worker #[allow(clippy::wrong_self_convention)]
as_iobufs<'mem, 'slice>( iovs: &'slice [VolatileSlice<'mem>], ) -> &'slice [IoBufMut<'mem>]122*bb4ee6a4SAndroid Build Coastguard Worker pub fn as_iobufs<'mem, 'slice>(
123*bb4ee6a4SAndroid Build Coastguard Worker iovs: &'slice [VolatileSlice<'mem>],
124*bb4ee6a4SAndroid Build Coastguard Worker ) -> &'slice [IoBufMut<'mem>] {
125*bb4ee6a4SAndroid Build Coastguard Worker // SAFETY:
126*bb4ee6a4SAndroid Build Coastguard Worker // Safe because `VolatileSlice` is ABI-compatible with `IoBufMut`.
127*bb4ee6a4SAndroid Build Coastguard Worker unsafe { slice::from_raw_parts(iovs.as_ptr() as *const IoBufMut, iovs.len()) }
128*bb4ee6a4SAndroid Build Coastguard Worker }
129*bb4ee6a4SAndroid Build Coastguard Worker
130*bb4ee6a4SAndroid Build Coastguard Worker /// Converts a mutable slice of `VolatileSlice`s into a mutable slice of `IoBufMut`s
131*bb4ee6a4SAndroid Build Coastguard Worker #[inline]
as_iobufs_mut<'mem, 'slice>( iovs: &'slice mut [VolatileSlice<'mem>], ) -> &'slice mut [IoBufMut<'mem>]132*bb4ee6a4SAndroid Build Coastguard Worker pub fn as_iobufs_mut<'mem, 'slice>(
133*bb4ee6a4SAndroid Build Coastguard Worker iovs: &'slice mut [VolatileSlice<'mem>],
134*bb4ee6a4SAndroid Build Coastguard Worker ) -> &'slice mut [IoBufMut<'mem>] {
135*bb4ee6a4SAndroid Build Coastguard Worker // SAFETY:
136*bb4ee6a4SAndroid Build Coastguard Worker // Safe because `VolatileSlice` is ABI-compatible with `IoBufMut`.
137*bb4ee6a4SAndroid Build Coastguard Worker unsafe { slice::from_raw_parts_mut(iovs.as_mut_ptr() as *mut IoBufMut, iovs.len()) }
138*bb4ee6a4SAndroid Build Coastguard Worker }
139*bb4ee6a4SAndroid Build Coastguard Worker
140*bb4ee6a4SAndroid Build Coastguard Worker /// Creates a copy of this slice with the address increased by `count` bytes, and the size
141*bb4ee6a4SAndroid Build Coastguard Worker /// reduced by `count` bytes.
offset(self, count: usize) -> Result<VolatileSlice<'a>>142*bb4ee6a4SAndroid Build Coastguard Worker pub fn offset(self, count: usize) -> Result<VolatileSlice<'a>> {
143*bb4ee6a4SAndroid Build Coastguard Worker let new_addr = (self.as_mut_ptr() as usize).checked_add(count).ok_or(
144*bb4ee6a4SAndroid Build Coastguard Worker VolatileMemoryError::Overflow {
145*bb4ee6a4SAndroid Build Coastguard Worker base: self.as_mut_ptr() as usize,
146*bb4ee6a4SAndroid Build Coastguard Worker offset: count,
147*bb4ee6a4SAndroid Build Coastguard Worker },
148*bb4ee6a4SAndroid Build Coastguard Worker )?;
149*bb4ee6a4SAndroid Build Coastguard Worker let new_size = self
150*bb4ee6a4SAndroid Build Coastguard Worker .size()
151*bb4ee6a4SAndroid Build Coastguard Worker .checked_sub(count)
152*bb4ee6a4SAndroid Build Coastguard Worker .ok_or(VolatileMemoryError::OutOfBounds { addr: new_addr })?;
153*bb4ee6a4SAndroid Build Coastguard Worker
154*bb4ee6a4SAndroid Build Coastguard Worker // SAFETY:
155*bb4ee6a4SAndroid Build Coastguard Worker // Safe because the memory has the same lifetime and points to a subset of the memory of the
156*bb4ee6a4SAndroid Build Coastguard Worker // original slice.
157*bb4ee6a4SAndroid Build Coastguard Worker unsafe { Ok(VolatileSlice::from_raw_parts(new_addr as *mut u8, new_size)) }
158*bb4ee6a4SAndroid Build Coastguard Worker }
159*bb4ee6a4SAndroid Build Coastguard Worker
160*bb4ee6a4SAndroid Build Coastguard Worker /// Similar to `get_slice` but the returned slice outlives this slice.
161*bb4ee6a4SAndroid Build Coastguard Worker ///
162*bb4ee6a4SAndroid Build Coastguard Worker /// The returned slice's lifetime is still limited by the underlying data's lifetime.
sub_slice(self, offset: usize, count: usize) -> Result<VolatileSlice<'a>>163*bb4ee6a4SAndroid Build Coastguard Worker pub fn sub_slice(self, offset: usize, count: usize) -> Result<VolatileSlice<'a>> {
164*bb4ee6a4SAndroid Build Coastguard Worker let mem_end = offset
165*bb4ee6a4SAndroid Build Coastguard Worker .checked_add(count)
166*bb4ee6a4SAndroid Build Coastguard Worker .ok_or(VolatileMemoryError::Overflow {
167*bb4ee6a4SAndroid Build Coastguard Worker base: offset,
168*bb4ee6a4SAndroid Build Coastguard Worker offset: count,
169*bb4ee6a4SAndroid Build Coastguard Worker })?;
170*bb4ee6a4SAndroid Build Coastguard Worker if mem_end > self.size() {
171*bb4ee6a4SAndroid Build Coastguard Worker return Err(Error::OutOfBounds { addr: mem_end });
172*bb4ee6a4SAndroid Build Coastguard Worker }
173*bb4ee6a4SAndroid Build Coastguard Worker let new_addr = (self.as_mut_ptr() as usize).checked_add(offset).ok_or(
174*bb4ee6a4SAndroid Build Coastguard Worker VolatileMemoryError::Overflow {
175*bb4ee6a4SAndroid Build Coastguard Worker base: self.as_mut_ptr() as usize,
176*bb4ee6a4SAndroid Build Coastguard Worker offset,
177*bb4ee6a4SAndroid Build Coastguard Worker },
178*bb4ee6a4SAndroid Build Coastguard Worker )?;
179*bb4ee6a4SAndroid Build Coastguard Worker
180*bb4ee6a4SAndroid Build Coastguard Worker // SAFETY:
181*bb4ee6a4SAndroid Build Coastguard Worker // Safe because we have verified that the new memory is a subset of the original slice.
182*bb4ee6a4SAndroid Build Coastguard Worker Ok(unsafe { VolatileSlice::from_raw_parts(new_addr as *mut u8, count) })
183*bb4ee6a4SAndroid Build Coastguard Worker }
184*bb4ee6a4SAndroid Build Coastguard Worker
185*bb4ee6a4SAndroid Build Coastguard Worker /// Sets each byte of this slice with the given byte, similar to `memset`.
186*bb4ee6a4SAndroid Build Coastguard Worker ///
187*bb4ee6a4SAndroid Build Coastguard Worker /// The bytes of this slice are accessed in an arbitray order.
188*bb4ee6a4SAndroid Build Coastguard Worker ///
189*bb4ee6a4SAndroid Build Coastguard Worker /// # Examples
190*bb4ee6a4SAndroid Build Coastguard Worker ///
191*bb4ee6a4SAndroid Build Coastguard Worker /// ```
192*bb4ee6a4SAndroid Build Coastguard Worker /// # use base::VolatileSlice;
193*bb4ee6a4SAndroid Build Coastguard Worker /// # fn test_write_45() -> Result<(), ()> {
194*bb4ee6a4SAndroid Build Coastguard Worker /// let mut mem = [0u8; 32];
195*bb4ee6a4SAndroid Build Coastguard Worker /// let vslice = VolatileSlice::new(&mut mem[..]);
196*bb4ee6a4SAndroid Build Coastguard Worker /// vslice.write_bytes(45);
197*bb4ee6a4SAndroid Build Coastguard Worker /// for &v in &mem[..] {
198*bb4ee6a4SAndroid Build Coastguard Worker /// assert_eq!(v, 45);
199*bb4ee6a4SAndroid Build Coastguard Worker /// }
200*bb4ee6a4SAndroid Build Coastguard Worker /// # Ok(())
201*bb4ee6a4SAndroid Build Coastguard Worker /// # }
write_bytes(&self, value: u8)202*bb4ee6a4SAndroid Build Coastguard Worker pub fn write_bytes(&self, value: u8) {
203*bb4ee6a4SAndroid Build Coastguard Worker // SAFETY:
204*bb4ee6a4SAndroid Build Coastguard Worker // Safe because the memory is valid and needs only byte alignment.
205*bb4ee6a4SAndroid Build Coastguard Worker unsafe {
206*bb4ee6a4SAndroid Build Coastguard Worker write_bytes(self.as_mut_ptr(), value, self.size());
207*bb4ee6a4SAndroid Build Coastguard Worker }
208*bb4ee6a4SAndroid Build Coastguard Worker }
209*bb4ee6a4SAndroid Build Coastguard Worker
210*bb4ee6a4SAndroid Build Coastguard Worker /// Copies `self.size()` or `buf.len()` times the size of `T` bytes, whichever is smaller, to
211*bb4ee6a4SAndroid Build Coastguard Worker /// `buf`.
212*bb4ee6a4SAndroid Build Coastguard Worker ///
213*bb4ee6a4SAndroid Build Coastguard Worker /// The copy happens from smallest to largest address in `T` sized chunks using volatile reads.
214*bb4ee6a4SAndroid Build Coastguard Worker ///
215*bb4ee6a4SAndroid Build Coastguard Worker /// # Examples
216*bb4ee6a4SAndroid Build Coastguard Worker ///
217*bb4ee6a4SAndroid Build Coastguard Worker /// ```
218*bb4ee6a4SAndroid Build Coastguard Worker /// # use std::fs::File;
219*bb4ee6a4SAndroid Build Coastguard Worker /// # use std::path::Path;
220*bb4ee6a4SAndroid Build Coastguard Worker /// # use base::VolatileSlice;
221*bb4ee6a4SAndroid Build Coastguard Worker /// # fn test_write_null() -> Result<(), ()> {
222*bb4ee6a4SAndroid Build Coastguard Worker /// let mut mem = [0u8; 32];
223*bb4ee6a4SAndroid Build Coastguard Worker /// let vslice = VolatileSlice::new(&mut mem[..]);
224*bb4ee6a4SAndroid Build Coastguard Worker /// let mut buf = [5u8; 16];
225*bb4ee6a4SAndroid Build Coastguard Worker /// vslice.copy_to(&mut buf[..]);
226*bb4ee6a4SAndroid Build Coastguard Worker /// for v in &buf[..] {
227*bb4ee6a4SAndroid Build Coastguard Worker /// assert_eq!(buf[0], 0);
228*bb4ee6a4SAndroid Build Coastguard Worker /// }
229*bb4ee6a4SAndroid Build Coastguard Worker /// # Ok(())
230*bb4ee6a4SAndroid Build Coastguard Worker /// # }
231*bb4ee6a4SAndroid Build Coastguard Worker /// ```
copy_to<T>(&self, buf: &mut [T]) where T: FromBytes + AsBytes + Copy,232*bb4ee6a4SAndroid Build Coastguard Worker pub fn copy_to<T>(&self, buf: &mut [T])
233*bb4ee6a4SAndroid Build Coastguard Worker where
234*bb4ee6a4SAndroid Build Coastguard Worker T: FromBytes + AsBytes + Copy,
235*bb4ee6a4SAndroid Build Coastguard Worker {
236*bb4ee6a4SAndroid Build Coastguard Worker let mut addr = self.as_mut_ptr() as *const u8;
237*bb4ee6a4SAndroid Build Coastguard Worker for v in buf.iter_mut().take(self.size() / size_of::<T>()) {
238*bb4ee6a4SAndroid Build Coastguard Worker // SAFETY: Safe because buf is valid, aligned to type `T` and is initialized.
239*bb4ee6a4SAndroid Build Coastguard Worker unsafe {
240*bb4ee6a4SAndroid Build Coastguard Worker *v = read_volatile(addr as *const T);
241*bb4ee6a4SAndroid Build Coastguard Worker addr = addr.add(size_of::<T>());
242*bb4ee6a4SAndroid Build Coastguard Worker }
243*bb4ee6a4SAndroid Build Coastguard Worker }
244*bb4ee6a4SAndroid Build Coastguard Worker }
245*bb4ee6a4SAndroid Build Coastguard Worker
246*bb4ee6a4SAndroid Build Coastguard Worker /// Copies `self.size()` or `slice.size()` bytes, whichever is smaller, to `slice`.
247*bb4ee6a4SAndroid Build Coastguard Worker ///
248*bb4ee6a4SAndroid Build Coastguard Worker /// The copies happen in an undefined order.
249*bb4ee6a4SAndroid Build Coastguard Worker /// # Examples
250*bb4ee6a4SAndroid Build Coastguard Worker ///
251*bb4ee6a4SAndroid Build Coastguard Worker /// ```
252*bb4ee6a4SAndroid Build Coastguard Worker /// # use base::VolatileMemory;
253*bb4ee6a4SAndroid Build Coastguard Worker /// # use base::VolatileSlice;
254*bb4ee6a4SAndroid Build Coastguard Worker /// # fn test_write_null() -> Result<(), ()> {
255*bb4ee6a4SAndroid Build Coastguard Worker /// let mut mem = [0u8; 32];
256*bb4ee6a4SAndroid Build Coastguard Worker /// let vslice = VolatileSlice::new(&mut mem[..]);
257*bb4ee6a4SAndroid Build Coastguard Worker /// vslice.copy_to_volatile_slice(vslice.get_slice(16, 16).map_err(|_| ())?);
258*bb4ee6a4SAndroid Build Coastguard Worker /// # Ok(())
259*bb4ee6a4SAndroid Build Coastguard Worker /// # }
260*bb4ee6a4SAndroid Build Coastguard Worker /// ```
copy_to_volatile_slice(&self, slice: VolatileSlice)261*bb4ee6a4SAndroid Build Coastguard Worker pub fn copy_to_volatile_slice(&self, slice: VolatileSlice) {
262*bb4ee6a4SAndroid Build Coastguard Worker // SAFETY: Safe because slice is valid and is byte aligned.
263*bb4ee6a4SAndroid Build Coastguard Worker unsafe {
264*bb4ee6a4SAndroid Build Coastguard Worker copy(
265*bb4ee6a4SAndroid Build Coastguard Worker self.as_mut_ptr() as *const u8,
266*bb4ee6a4SAndroid Build Coastguard Worker slice.as_mut_ptr(),
267*bb4ee6a4SAndroid Build Coastguard Worker min(self.size(), slice.size()),
268*bb4ee6a4SAndroid Build Coastguard Worker );
269*bb4ee6a4SAndroid Build Coastguard Worker }
270*bb4ee6a4SAndroid Build Coastguard Worker }
271*bb4ee6a4SAndroid Build Coastguard Worker
272*bb4ee6a4SAndroid Build Coastguard Worker /// Copies `self.size()` or `buf.len()` times the size of `T` bytes, whichever is smaller, to
273*bb4ee6a4SAndroid Build Coastguard Worker /// this slice's memory.
274*bb4ee6a4SAndroid Build Coastguard Worker ///
275*bb4ee6a4SAndroid Build Coastguard Worker /// The copy happens from smallest to largest address in `T` sized chunks using volatile writes.
276*bb4ee6a4SAndroid Build Coastguard Worker ///
277*bb4ee6a4SAndroid Build Coastguard Worker /// # Examples
278*bb4ee6a4SAndroid Build Coastguard Worker ///
279*bb4ee6a4SAndroid Build Coastguard Worker /// ```
280*bb4ee6a4SAndroid Build Coastguard Worker /// # use std::fs::File;
281*bb4ee6a4SAndroid Build Coastguard Worker /// # use std::path::Path;
282*bb4ee6a4SAndroid Build Coastguard Worker /// # use base::VolatileMemory;
283*bb4ee6a4SAndroid Build Coastguard Worker /// # use base::VolatileSlice;
284*bb4ee6a4SAndroid Build Coastguard Worker /// # fn test_write_null() -> Result<(), ()> {
285*bb4ee6a4SAndroid Build Coastguard Worker /// let mut mem = [0u8; 32];
286*bb4ee6a4SAndroid Build Coastguard Worker /// let vslice = VolatileSlice::new(&mut mem[..]);
287*bb4ee6a4SAndroid Build Coastguard Worker /// let buf = [5u8; 64];
288*bb4ee6a4SAndroid Build Coastguard Worker /// vslice.copy_from(&buf[..]);
289*bb4ee6a4SAndroid Build Coastguard Worker /// let mut copy_buf = [0u32; 4];
290*bb4ee6a4SAndroid Build Coastguard Worker /// vslice.copy_to(&mut copy_buf);
291*bb4ee6a4SAndroid Build Coastguard Worker /// for i in 0..4 {
292*bb4ee6a4SAndroid Build Coastguard Worker /// assert_eq!(copy_buf[i], 0x05050505);
293*bb4ee6a4SAndroid Build Coastguard Worker /// }
294*bb4ee6a4SAndroid Build Coastguard Worker /// # Ok(())
295*bb4ee6a4SAndroid Build Coastguard Worker /// # }
296*bb4ee6a4SAndroid Build Coastguard Worker /// ```
copy_from<T>(&self, buf: &[T]) where T: FromBytes + AsBytes,297*bb4ee6a4SAndroid Build Coastguard Worker pub fn copy_from<T>(&self, buf: &[T])
298*bb4ee6a4SAndroid Build Coastguard Worker where
299*bb4ee6a4SAndroid Build Coastguard Worker T: FromBytes + AsBytes,
300*bb4ee6a4SAndroid Build Coastguard Worker {
301*bb4ee6a4SAndroid Build Coastguard Worker let mut addr = self.as_mut_ptr();
302*bb4ee6a4SAndroid Build Coastguard Worker for v in buf.iter().take(self.size() / size_of::<T>()) {
303*bb4ee6a4SAndroid Build Coastguard Worker // SAFETY: Safe because buf is valid, aligned to type `T` and is mutable.
304*bb4ee6a4SAndroid Build Coastguard Worker unsafe {
305*bb4ee6a4SAndroid Build Coastguard Worker write_volatile(
306*bb4ee6a4SAndroid Build Coastguard Worker addr as *mut T,
307*bb4ee6a4SAndroid Build Coastguard Worker Ref::<_, T>::new(v.as_bytes()).unwrap().read(),
308*bb4ee6a4SAndroid Build Coastguard Worker );
309*bb4ee6a4SAndroid Build Coastguard Worker addr = addr.add(size_of::<T>());
310*bb4ee6a4SAndroid Build Coastguard Worker }
311*bb4ee6a4SAndroid Build Coastguard Worker }
312*bb4ee6a4SAndroid Build Coastguard Worker }
313*bb4ee6a4SAndroid Build Coastguard Worker
314*bb4ee6a4SAndroid Build Coastguard Worker /// Returns whether all bytes in this slice are zero or not.
315*bb4ee6a4SAndroid Build Coastguard Worker ///
316*bb4ee6a4SAndroid Build Coastguard Worker /// This is optimized for [VolatileSlice] aligned with 16 bytes.
317*bb4ee6a4SAndroid Build Coastguard Worker ///
318*bb4ee6a4SAndroid Build Coastguard Worker /// TODO(b/274840085): Use SIMD for better performance.
is_all_zero(&self) -> bool319*bb4ee6a4SAndroid Build Coastguard Worker pub fn is_all_zero(&self) -> bool {
320*bb4ee6a4SAndroid Build Coastguard Worker const MASK_4BIT: usize = 0x0f;
321*bb4ee6a4SAndroid Build Coastguard Worker let head_addr = self.as_ptr() as usize;
322*bb4ee6a4SAndroid Build Coastguard Worker // Round up by 16
323*bb4ee6a4SAndroid Build Coastguard Worker let aligned_head_addr = (head_addr + MASK_4BIT) & !MASK_4BIT;
324*bb4ee6a4SAndroid Build Coastguard Worker let tail_addr = head_addr + self.size();
325*bb4ee6a4SAndroid Build Coastguard Worker // Round down by 16
326*bb4ee6a4SAndroid Build Coastguard Worker let aligned_tail_addr = tail_addr & !MASK_4BIT;
327*bb4ee6a4SAndroid Build Coastguard Worker
328*bb4ee6a4SAndroid Build Coastguard Worker // Check 16 bytes at once. The addresses should be 16 bytes aligned for better performance.
329*bb4ee6a4SAndroid Build Coastguard Worker if (aligned_head_addr..aligned_tail_addr).step_by(16).any(
330*bb4ee6a4SAndroid Build Coastguard Worker |aligned_addr|
331*bb4ee6a4SAndroid Build Coastguard Worker // SAFETY: Each aligned_addr is within VolatileSlice
332*bb4ee6a4SAndroid Build Coastguard Worker unsafe { *(aligned_addr as *const u128) } != 0,
333*bb4ee6a4SAndroid Build Coastguard Worker ) {
334*bb4ee6a4SAndroid Build Coastguard Worker return false;
335*bb4ee6a4SAndroid Build Coastguard Worker }
336*bb4ee6a4SAndroid Build Coastguard Worker
337*bb4ee6a4SAndroid Build Coastguard Worker if head_addr == aligned_head_addr && tail_addr == aligned_tail_addr {
338*bb4ee6a4SAndroid Build Coastguard Worker // If head_addr and tail_addr are aligned, we can skip the unaligned part which contains
339*bb4ee6a4SAndroid Build Coastguard Worker // at least 2 conditional branches.
340*bb4ee6a4SAndroid Build Coastguard Worker true
341*bb4ee6a4SAndroid Build Coastguard Worker } else {
342*bb4ee6a4SAndroid Build Coastguard Worker // Check unaligned part.
343*bb4ee6a4SAndroid Build Coastguard Worker // SAFETY: The range [head_addr, aligned_head_addr) and [aligned_tail_addr, tail_addr)
344*bb4ee6a4SAndroid Build Coastguard Worker // are within VolatileSlice.
345*bb4ee6a4SAndroid Build Coastguard Worker unsafe {
346*bb4ee6a4SAndroid Build Coastguard Worker is_all_zero_naive(head_addr, aligned_head_addr)
347*bb4ee6a4SAndroid Build Coastguard Worker && is_all_zero_naive(aligned_tail_addr, tail_addr)
348*bb4ee6a4SAndroid Build Coastguard Worker }
349*bb4ee6a4SAndroid Build Coastguard Worker }
350*bb4ee6a4SAndroid Build Coastguard Worker }
351*bb4ee6a4SAndroid Build Coastguard Worker }
352*bb4ee6a4SAndroid Build Coastguard Worker
353*bb4ee6a4SAndroid Build Coastguard Worker /// Check whether every byte is zero.
354*bb4ee6a4SAndroid Build Coastguard Worker ///
355*bb4ee6a4SAndroid Build Coastguard Worker /// This checks byte by byte.
356*bb4ee6a4SAndroid Build Coastguard Worker ///
357*bb4ee6a4SAndroid Build Coastguard Worker /// # Safety
358*bb4ee6a4SAndroid Build Coastguard Worker ///
359*bb4ee6a4SAndroid Build Coastguard Worker /// * `head_addr` <= `tail_addr`
360*bb4ee6a4SAndroid Build Coastguard Worker /// * Bytes between `head_addr` and `tail_addr` is valid to access.
is_all_zero_naive(head_addr: usize, tail_addr: usize) -> bool361*bb4ee6a4SAndroid Build Coastguard Worker unsafe fn is_all_zero_naive(head_addr: usize, tail_addr: usize) -> bool {
362*bb4ee6a4SAndroid Build Coastguard Worker (head_addr..tail_addr).all(|addr| *(addr as *const u8) == 0)
363*bb4ee6a4SAndroid Build Coastguard Worker }
364*bb4ee6a4SAndroid Build Coastguard Worker
365*bb4ee6a4SAndroid Build Coastguard Worker impl<'a> VolatileMemory for VolatileSlice<'a> {
get_slice(&self, offset: usize, count: usize) -> Result<VolatileSlice>366*bb4ee6a4SAndroid Build Coastguard Worker fn get_slice(&self, offset: usize, count: usize) -> Result<VolatileSlice> {
367*bb4ee6a4SAndroid Build Coastguard Worker self.sub_slice(offset, count)
368*bb4ee6a4SAndroid Build Coastguard Worker }
369*bb4ee6a4SAndroid Build Coastguard Worker }
370*bb4ee6a4SAndroid Build Coastguard Worker
371*bb4ee6a4SAndroid Build Coastguard Worker impl PartialEq<VolatileSlice<'_>> for VolatileSlice<'_> {
eq(&self, other: &VolatileSlice) -> bool372*bb4ee6a4SAndroid Build Coastguard Worker fn eq(&self, other: &VolatileSlice) -> bool {
373*bb4ee6a4SAndroid Build Coastguard Worker let size = self.size();
374*bb4ee6a4SAndroid Build Coastguard Worker if size != other.size() {
375*bb4ee6a4SAndroid Build Coastguard Worker return false;
376*bb4ee6a4SAndroid Build Coastguard Worker }
377*bb4ee6a4SAndroid Build Coastguard Worker
378*bb4ee6a4SAndroid Build Coastguard Worker // SAFETY: We pass pointers into valid VolatileSlice regions, and size is checked above.
379*bb4ee6a4SAndroid Build Coastguard Worker let cmp = unsafe { libc::memcmp(self.as_ptr() as _, other.as_ptr() as _, size) };
380*bb4ee6a4SAndroid Build Coastguard Worker
381*bb4ee6a4SAndroid Build Coastguard Worker cmp == 0
382*bb4ee6a4SAndroid Build Coastguard Worker }
383*bb4ee6a4SAndroid Build Coastguard Worker }
384*bb4ee6a4SAndroid Build Coastguard Worker
385*bb4ee6a4SAndroid Build Coastguard Worker /// The `PartialEq` implementation for `VolatileSlice` is reflexive, symmetric, and transitive.
386*bb4ee6a4SAndroid Build Coastguard Worker impl Eq for VolatileSlice<'_> {}
387*bb4ee6a4SAndroid Build Coastguard Worker
388*bb4ee6a4SAndroid Build Coastguard Worker impl std::io::Write for VolatileSlice<'_> {
write(&mut self, buf: &[u8]) -> std::io::Result<usize>389*bb4ee6a4SAndroid Build Coastguard Worker fn write(&mut self, buf: &[u8]) -> std::io::Result<usize> {
390*bb4ee6a4SAndroid Build Coastguard Worker let len = buf.len().min(self.size());
391*bb4ee6a4SAndroid Build Coastguard Worker self.copy_from(&buf[..len]);
392*bb4ee6a4SAndroid Build Coastguard Worker self.advance(len);
393*bb4ee6a4SAndroid Build Coastguard Worker Ok(len)
394*bb4ee6a4SAndroid Build Coastguard Worker }
395*bb4ee6a4SAndroid Build Coastguard Worker
flush(&mut self) -> std::io::Result<()>396*bb4ee6a4SAndroid Build Coastguard Worker fn flush(&mut self) -> std::io::Result<()> {
397*bb4ee6a4SAndroid Build Coastguard Worker Ok(())
398*bb4ee6a4SAndroid Build Coastguard Worker }
399*bb4ee6a4SAndroid Build Coastguard Worker }
400*bb4ee6a4SAndroid Build Coastguard Worker
401*bb4ee6a4SAndroid Build Coastguard Worker #[cfg(test)]
402*bb4ee6a4SAndroid Build Coastguard Worker mod tests {
403*bb4ee6a4SAndroid Build Coastguard Worker use std::io::Write;
404*bb4ee6a4SAndroid Build Coastguard Worker use std::sync::Arc;
405*bb4ee6a4SAndroid Build Coastguard Worker use std::sync::Barrier;
406*bb4ee6a4SAndroid Build Coastguard Worker use std::thread::spawn;
407*bb4ee6a4SAndroid Build Coastguard Worker
408*bb4ee6a4SAndroid Build Coastguard Worker use super::*;
409*bb4ee6a4SAndroid Build Coastguard Worker
410*bb4ee6a4SAndroid Build Coastguard Worker #[derive(Clone)]
411*bb4ee6a4SAndroid Build Coastguard Worker struct VecMem {
412*bb4ee6a4SAndroid Build Coastguard Worker mem: Arc<Vec<u8>>,
413*bb4ee6a4SAndroid Build Coastguard Worker }
414*bb4ee6a4SAndroid Build Coastguard Worker
415*bb4ee6a4SAndroid Build Coastguard Worker impl VecMem {
new(size: usize) -> VecMem416*bb4ee6a4SAndroid Build Coastguard Worker fn new(size: usize) -> VecMem {
417*bb4ee6a4SAndroid Build Coastguard Worker VecMem {
418*bb4ee6a4SAndroid Build Coastguard Worker mem: Arc::new(vec![0u8; size]),
419*bb4ee6a4SAndroid Build Coastguard Worker }
420*bb4ee6a4SAndroid Build Coastguard Worker }
421*bb4ee6a4SAndroid Build Coastguard Worker }
422*bb4ee6a4SAndroid Build Coastguard Worker
423*bb4ee6a4SAndroid Build Coastguard Worker impl VolatileMemory for VecMem {
get_slice(&self, offset: usize, count: usize) -> Result<VolatileSlice>424*bb4ee6a4SAndroid Build Coastguard Worker fn get_slice(&self, offset: usize, count: usize) -> Result<VolatileSlice> {
425*bb4ee6a4SAndroid Build Coastguard Worker let mem_end = offset
426*bb4ee6a4SAndroid Build Coastguard Worker .checked_add(count)
427*bb4ee6a4SAndroid Build Coastguard Worker .ok_or(VolatileMemoryError::Overflow {
428*bb4ee6a4SAndroid Build Coastguard Worker base: offset,
429*bb4ee6a4SAndroid Build Coastguard Worker offset: count,
430*bb4ee6a4SAndroid Build Coastguard Worker })?;
431*bb4ee6a4SAndroid Build Coastguard Worker if mem_end > self.mem.len() {
432*bb4ee6a4SAndroid Build Coastguard Worker return Err(Error::OutOfBounds { addr: mem_end });
433*bb4ee6a4SAndroid Build Coastguard Worker }
434*bb4ee6a4SAndroid Build Coastguard Worker
435*bb4ee6a4SAndroid Build Coastguard Worker let new_addr = (self.mem.as_ptr() as usize).checked_add(offset).ok_or(
436*bb4ee6a4SAndroid Build Coastguard Worker VolatileMemoryError::Overflow {
437*bb4ee6a4SAndroid Build Coastguard Worker base: self.mem.as_ptr() as usize,
438*bb4ee6a4SAndroid Build Coastguard Worker offset,
439*bb4ee6a4SAndroid Build Coastguard Worker },
440*bb4ee6a4SAndroid Build Coastguard Worker )?;
441*bb4ee6a4SAndroid Build Coastguard Worker
442*bb4ee6a4SAndroid Build Coastguard Worker Ok(
443*bb4ee6a4SAndroid Build Coastguard Worker // SAFETY: trivially safe
444*bb4ee6a4SAndroid Build Coastguard Worker unsafe { VolatileSlice::from_raw_parts(new_addr as *mut u8, count) },
445*bb4ee6a4SAndroid Build Coastguard Worker )
446*bb4ee6a4SAndroid Build Coastguard Worker }
447*bb4ee6a4SAndroid Build Coastguard Worker }
448*bb4ee6a4SAndroid Build Coastguard Worker
449*bb4ee6a4SAndroid Build Coastguard Worker #[test]
observe_mutate()450*bb4ee6a4SAndroid Build Coastguard Worker fn observe_mutate() {
451*bb4ee6a4SAndroid Build Coastguard Worker let a = VecMem::new(1);
452*bb4ee6a4SAndroid Build Coastguard Worker let a_clone = a.clone();
453*bb4ee6a4SAndroid Build Coastguard Worker a.get_slice(0, 1).unwrap().write_bytes(99);
454*bb4ee6a4SAndroid Build Coastguard Worker
455*bb4ee6a4SAndroid Build Coastguard Worker let start_barrier = Arc::new(Barrier::new(2));
456*bb4ee6a4SAndroid Build Coastguard Worker let thread_start_barrier = start_barrier.clone();
457*bb4ee6a4SAndroid Build Coastguard Worker let end_barrier = Arc::new(Barrier::new(2));
458*bb4ee6a4SAndroid Build Coastguard Worker let thread_end_barrier = end_barrier.clone();
459*bb4ee6a4SAndroid Build Coastguard Worker spawn(move || {
460*bb4ee6a4SAndroid Build Coastguard Worker thread_start_barrier.wait();
461*bb4ee6a4SAndroid Build Coastguard Worker a_clone.get_slice(0, 1).unwrap().write_bytes(0);
462*bb4ee6a4SAndroid Build Coastguard Worker thread_end_barrier.wait();
463*bb4ee6a4SAndroid Build Coastguard Worker });
464*bb4ee6a4SAndroid Build Coastguard Worker
465*bb4ee6a4SAndroid Build Coastguard Worker let mut byte = [0u8; 1];
466*bb4ee6a4SAndroid Build Coastguard Worker a.get_slice(0, 1).unwrap().copy_to(&mut byte);
467*bb4ee6a4SAndroid Build Coastguard Worker assert_eq!(byte[0], 99);
468*bb4ee6a4SAndroid Build Coastguard Worker
469*bb4ee6a4SAndroid Build Coastguard Worker start_barrier.wait();
470*bb4ee6a4SAndroid Build Coastguard Worker end_barrier.wait();
471*bb4ee6a4SAndroid Build Coastguard Worker
472*bb4ee6a4SAndroid Build Coastguard Worker a.get_slice(0, 1).unwrap().copy_to(&mut byte);
473*bb4ee6a4SAndroid Build Coastguard Worker assert_eq!(byte[0], 0);
474*bb4ee6a4SAndroid Build Coastguard Worker }
475*bb4ee6a4SAndroid Build Coastguard Worker
476*bb4ee6a4SAndroid Build Coastguard Worker #[test]
slice_size()477*bb4ee6a4SAndroid Build Coastguard Worker fn slice_size() {
478*bb4ee6a4SAndroid Build Coastguard Worker let a = VecMem::new(100);
479*bb4ee6a4SAndroid Build Coastguard Worker let s = a.get_slice(0, 27).unwrap();
480*bb4ee6a4SAndroid Build Coastguard Worker assert_eq!(s.size(), 27);
481*bb4ee6a4SAndroid Build Coastguard Worker
482*bb4ee6a4SAndroid Build Coastguard Worker let s = a.get_slice(34, 27).unwrap();
483*bb4ee6a4SAndroid Build Coastguard Worker assert_eq!(s.size(), 27);
484*bb4ee6a4SAndroid Build Coastguard Worker
485*bb4ee6a4SAndroid Build Coastguard Worker let s = s.get_slice(20, 5).unwrap();
486*bb4ee6a4SAndroid Build Coastguard Worker assert_eq!(s.size(), 5);
487*bb4ee6a4SAndroid Build Coastguard Worker }
488*bb4ee6a4SAndroid Build Coastguard Worker
489*bb4ee6a4SAndroid Build Coastguard Worker #[test]
slice_overflow_error()490*bb4ee6a4SAndroid Build Coastguard Worker fn slice_overflow_error() {
491*bb4ee6a4SAndroid Build Coastguard Worker let a = VecMem::new(1);
492*bb4ee6a4SAndroid Build Coastguard Worker let res = a.get_slice(usize::MAX, 1).unwrap_err();
493*bb4ee6a4SAndroid Build Coastguard Worker assert_eq!(
494*bb4ee6a4SAndroid Build Coastguard Worker res,
495*bb4ee6a4SAndroid Build Coastguard Worker Error::Overflow {
496*bb4ee6a4SAndroid Build Coastguard Worker base: usize::MAX,
497*bb4ee6a4SAndroid Build Coastguard Worker offset: 1,
498*bb4ee6a4SAndroid Build Coastguard Worker }
499*bb4ee6a4SAndroid Build Coastguard Worker );
500*bb4ee6a4SAndroid Build Coastguard Worker }
501*bb4ee6a4SAndroid Build Coastguard Worker
502*bb4ee6a4SAndroid Build Coastguard Worker #[test]
slice_oob_error()503*bb4ee6a4SAndroid Build Coastguard Worker fn slice_oob_error() {
504*bb4ee6a4SAndroid Build Coastguard Worker let a = VecMem::new(100);
505*bb4ee6a4SAndroid Build Coastguard Worker a.get_slice(50, 50).unwrap();
506*bb4ee6a4SAndroid Build Coastguard Worker let res = a.get_slice(55, 50).unwrap_err();
507*bb4ee6a4SAndroid Build Coastguard Worker assert_eq!(res, Error::OutOfBounds { addr: 105 });
508*bb4ee6a4SAndroid Build Coastguard Worker }
509*bb4ee6a4SAndroid Build Coastguard Worker
510*bb4ee6a4SAndroid Build Coastguard Worker #[test]
is_all_zero_16bytes_aligned()511*bb4ee6a4SAndroid Build Coastguard Worker fn is_all_zero_16bytes_aligned() {
512*bb4ee6a4SAndroid Build Coastguard Worker let a = VecMem::new(1024);
513*bb4ee6a4SAndroid Build Coastguard Worker let slice = a.get_slice(0, 1024).unwrap();
514*bb4ee6a4SAndroid Build Coastguard Worker
515*bb4ee6a4SAndroid Build Coastguard Worker assert!(slice.is_all_zero());
516*bb4ee6a4SAndroid Build Coastguard Worker a.get_slice(129, 1).unwrap().write_bytes(1);
517*bb4ee6a4SAndroid Build Coastguard Worker assert!(!slice.is_all_zero());
518*bb4ee6a4SAndroid Build Coastguard Worker }
519*bb4ee6a4SAndroid Build Coastguard Worker
520*bb4ee6a4SAndroid Build Coastguard Worker #[test]
is_all_zero_head_not_aligned()521*bb4ee6a4SAndroid Build Coastguard Worker fn is_all_zero_head_not_aligned() {
522*bb4ee6a4SAndroid Build Coastguard Worker let a = VecMem::new(1024);
523*bb4ee6a4SAndroid Build Coastguard Worker let slice = a.get_slice(1, 1023).unwrap();
524*bb4ee6a4SAndroid Build Coastguard Worker
525*bb4ee6a4SAndroid Build Coastguard Worker assert!(slice.is_all_zero());
526*bb4ee6a4SAndroid Build Coastguard Worker a.get_slice(0, 1).unwrap().write_bytes(1);
527*bb4ee6a4SAndroid Build Coastguard Worker assert!(slice.is_all_zero());
528*bb4ee6a4SAndroid Build Coastguard Worker a.get_slice(1, 1).unwrap().write_bytes(1);
529*bb4ee6a4SAndroid Build Coastguard Worker assert!(!slice.is_all_zero());
530*bb4ee6a4SAndroid Build Coastguard Worker a.get_slice(1, 1).unwrap().write_bytes(0);
531*bb4ee6a4SAndroid Build Coastguard Worker a.get_slice(129, 1).unwrap().write_bytes(1);
532*bb4ee6a4SAndroid Build Coastguard Worker assert!(!slice.is_all_zero());
533*bb4ee6a4SAndroid Build Coastguard Worker }
534*bb4ee6a4SAndroid Build Coastguard Worker
535*bb4ee6a4SAndroid Build Coastguard Worker #[test]
is_all_zero_tail_not_aligned()536*bb4ee6a4SAndroid Build Coastguard Worker fn is_all_zero_tail_not_aligned() {
537*bb4ee6a4SAndroid Build Coastguard Worker let a = VecMem::new(1024);
538*bb4ee6a4SAndroid Build Coastguard Worker let slice = a.get_slice(0, 1023).unwrap();
539*bb4ee6a4SAndroid Build Coastguard Worker
540*bb4ee6a4SAndroid Build Coastguard Worker assert!(slice.is_all_zero());
541*bb4ee6a4SAndroid Build Coastguard Worker a.get_slice(1023, 1).unwrap().write_bytes(1);
542*bb4ee6a4SAndroid Build Coastguard Worker assert!(slice.is_all_zero());
543*bb4ee6a4SAndroid Build Coastguard Worker a.get_slice(1022, 1).unwrap().write_bytes(1);
544*bb4ee6a4SAndroid Build Coastguard Worker assert!(!slice.is_all_zero());
545*bb4ee6a4SAndroid Build Coastguard Worker a.get_slice(1022, 1).unwrap().write_bytes(0);
546*bb4ee6a4SAndroid Build Coastguard Worker a.get_slice(0, 1).unwrap().write_bytes(1);
547*bb4ee6a4SAndroid Build Coastguard Worker assert!(!slice.is_all_zero());
548*bb4ee6a4SAndroid Build Coastguard Worker }
549*bb4ee6a4SAndroid Build Coastguard Worker
550*bb4ee6a4SAndroid Build Coastguard Worker #[test]
is_all_zero_no_aligned_16bytes()551*bb4ee6a4SAndroid Build Coastguard Worker fn is_all_zero_no_aligned_16bytes() {
552*bb4ee6a4SAndroid Build Coastguard Worker let a = VecMem::new(1024);
553*bb4ee6a4SAndroid Build Coastguard Worker let slice = a.get_slice(1, 16).unwrap();
554*bb4ee6a4SAndroid Build Coastguard Worker
555*bb4ee6a4SAndroid Build Coastguard Worker assert!(slice.is_all_zero());
556*bb4ee6a4SAndroid Build Coastguard Worker a.get_slice(0, 1).unwrap().write_bytes(1);
557*bb4ee6a4SAndroid Build Coastguard Worker assert!(slice.is_all_zero());
558*bb4ee6a4SAndroid Build Coastguard Worker for i in 1..17 {
559*bb4ee6a4SAndroid Build Coastguard Worker a.get_slice(i, 1).unwrap().write_bytes(1);
560*bb4ee6a4SAndroid Build Coastguard Worker assert!(!slice.is_all_zero());
561*bb4ee6a4SAndroid Build Coastguard Worker a.get_slice(i, 1).unwrap().write_bytes(0);
562*bb4ee6a4SAndroid Build Coastguard Worker }
563*bb4ee6a4SAndroid Build Coastguard Worker a.get_slice(17, 1).unwrap().write_bytes(1);
564*bb4ee6a4SAndroid Build Coastguard Worker assert!(slice.is_all_zero());
565*bb4ee6a4SAndroid Build Coastguard Worker }
566*bb4ee6a4SAndroid Build Coastguard Worker
567*bb4ee6a4SAndroid Build Coastguard Worker #[test]
write_partial()568*bb4ee6a4SAndroid Build Coastguard Worker fn write_partial() {
569*bb4ee6a4SAndroid Build Coastguard Worker let mem = VecMem::new(1024);
570*bb4ee6a4SAndroid Build Coastguard Worker let mut slice = mem.get_slice(1, 16).unwrap();
571*bb4ee6a4SAndroid Build Coastguard Worker slice.write_bytes(0xCC);
572*bb4ee6a4SAndroid Build Coastguard Worker
573*bb4ee6a4SAndroid Build Coastguard Worker // Writing 4 bytes should succeed and advance the slice by 4 bytes.
574*bb4ee6a4SAndroid Build Coastguard Worker let write_len = slice.write(&[1, 2, 3, 4]).unwrap();
575*bb4ee6a4SAndroid Build Coastguard Worker assert_eq!(write_len, 4);
576*bb4ee6a4SAndroid Build Coastguard Worker assert_eq!(slice.size(), 16 - 4);
577*bb4ee6a4SAndroid Build Coastguard Worker
578*bb4ee6a4SAndroid Build Coastguard Worker // The written data should appear in the memory at offset 1.
579*bb4ee6a4SAndroid Build Coastguard Worker assert_eq!(mem.mem[1..=4], [1, 2, 3, 4]);
580*bb4ee6a4SAndroid Build Coastguard Worker
581*bb4ee6a4SAndroid Build Coastguard Worker // The next byte of the slice should be unmodified.
582*bb4ee6a4SAndroid Build Coastguard Worker assert_eq!(mem.mem[5], 0xCC);
583*bb4ee6a4SAndroid Build Coastguard Worker }
584*bb4ee6a4SAndroid Build Coastguard Worker
585*bb4ee6a4SAndroid Build Coastguard Worker #[test]
write_multiple()586*bb4ee6a4SAndroid Build Coastguard Worker fn write_multiple() {
587*bb4ee6a4SAndroid Build Coastguard Worker let mem = VecMem::new(1024);
588*bb4ee6a4SAndroid Build Coastguard Worker let mut slice = mem.get_slice(1, 16).unwrap();
589*bb4ee6a4SAndroid Build Coastguard Worker slice.write_bytes(0xCC);
590*bb4ee6a4SAndroid Build Coastguard Worker
591*bb4ee6a4SAndroid Build Coastguard Worker // Writing 4 bytes should succeed and advance the slice by 4 bytes.
592*bb4ee6a4SAndroid Build Coastguard Worker let write_len = slice.write(&[1, 2, 3, 4]).unwrap();
593*bb4ee6a4SAndroid Build Coastguard Worker assert_eq!(write_len, 4);
594*bb4ee6a4SAndroid Build Coastguard Worker assert_eq!(slice.size(), 16 - 4);
595*bb4ee6a4SAndroid Build Coastguard Worker
596*bb4ee6a4SAndroid Build Coastguard Worker // The next byte of the slice should be unmodified.
597*bb4ee6a4SAndroid Build Coastguard Worker assert_eq!(mem.mem[5], 0xCC);
598*bb4ee6a4SAndroid Build Coastguard Worker
599*bb4ee6a4SAndroid Build Coastguard Worker // Writing another 4 bytes should succeed and advance the slice again.
600*bb4ee6a4SAndroid Build Coastguard Worker let write2_len = slice.write(&[5, 6, 7, 8]).unwrap();
601*bb4ee6a4SAndroid Build Coastguard Worker assert_eq!(write2_len, 4);
602*bb4ee6a4SAndroid Build Coastguard Worker assert_eq!(slice.size(), 16 - 4 - 4);
603*bb4ee6a4SAndroid Build Coastguard Worker
604*bb4ee6a4SAndroid Build Coastguard Worker // The written data should appear in the memory at offset 1.
605*bb4ee6a4SAndroid Build Coastguard Worker assert_eq!(mem.mem[1..=8], [1, 2, 3, 4, 5, 6, 7, 8]);
606*bb4ee6a4SAndroid Build Coastguard Worker
607*bb4ee6a4SAndroid Build Coastguard Worker // The next byte of the slice should be unmodified.
608*bb4ee6a4SAndroid Build Coastguard Worker assert_eq!(mem.mem[9], 0xCC);
609*bb4ee6a4SAndroid Build Coastguard Worker }
610*bb4ee6a4SAndroid Build Coastguard Worker
611*bb4ee6a4SAndroid Build Coastguard Worker #[test]
write_exact_slice_size()612*bb4ee6a4SAndroid Build Coastguard Worker fn write_exact_slice_size() {
613*bb4ee6a4SAndroid Build Coastguard Worker let mem = VecMem::new(1024);
614*bb4ee6a4SAndroid Build Coastguard Worker let mut slice = mem.get_slice(1, 4).unwrap();
615*bb4ee6a4SAndroid Build Coastguard Worker slice.write_bytes(0xCC);
616*bb4ee6a4SAndroid Build Coastguard Worker
617*bb4ee6a4SAndroid Build Coastguard Worker // Writing 4 bytes should succeed and consume the entire slice.
618*bb4ee6a4SAndroid Build Coastguard Worker let write_len = slice.write(&[1, 2, 3, 4]).unwrap();
619*bb4ee6a4SAndroid Build Coastguard Worker assert_eq!(write_len, 4);
620*bb4ee6a4SAndroid Build Coastguard Worker assert_eq!(slice.size(), 0);
621*bb4ee6a4SAndroid Build Coastguard Worker
622*bb4ee6a4SAndroid Build Coastguard Worker // The written data should appear in the memory at offset 1.
623*bb4ee6a4SAndroid Build Coastguard Worker assert_eq!(mem.mem[1..=4], [1, 2, 3, 4]);
624*bb4ee6a4SAndroid Build Coastguard Worker
625*bb4ee6a4SAndroid Build Coastguard Worker // The byte after the slice should be unmodified.
626*bb4ee6a4SAndroid Build Coastguard Worker assert_eq!(mem.mem[5], 0);
627*bb4ee6a4SAndroid Build Coastguard Worker }
628*bb4ee6a4SAndroid Build Coastguard Worker
629*bb4ee6a4SAndroid Build Coastguard Worker #[test]
write_more_than_slice_size()630*bb4ee6a4SAndroid Build Coastguard Worker fn write_more_than_slice_size() {
631*bb4ee6a4SAndroid Build Coastguard Worker let mem = VecMem::new(1024);
632*bb4ee6a4SAndroid Build Coastguard Worker let mut slice = mem.get_slice(1, 4).unwrap();
633*bb4ee6a4SAndroid Build Coastguard Worker slice.write_bytes(0xCC);
634*bb4ee6a4SAndroid Build Coastguard Worker
635*bb4ee6a4SAndroid Build Coastguard Worker // Attempting to write 5 bytes should succeed but only write 4 bytes.
636*bb4ee6a4SAndroid Build Coastguard Worker let write_len = slice.write(&[1, 2, 3, 4, 5]).unwrap();
637*bb4ee6a4SAndroid Build Coastguard Worker assert_eq!(write_len, 4);
638*bb4ee6a4SAndroid Build Coastguard Worker assert_eq!(slice.size(), 0);
639*bb4ee6a4SAndroid Build Coastguard Worker
640*bb4ee6a4SAndroid Build Coastguard Worker // The written data should appear in the memory at offset 1.
641*bb4ee6a4SAndroid Build Coastguard Worker assert_eq!(mem.mem[1..=4], [1, 2, 3, 4]);
642*bb4ee6a4SAndroid Build Coastguard Worker
643*bb4ee6a4SAndroid Build Coastguard Worker // The byte after the slice should be unmodified.
644*bb4ee6a4SAndroid Build Coastguard Worker assert_eq!(mem.mem[5], 0);
645*bb4ee6a4SAndroid Build Coastguard Worker }
646*bb4ee6a4SAndroid Build Coastguard Worker
647*bb4ee6a4SAndroid Build Coastguard Worker #[test]
write_empty_slice()648*bb4ee6a4SAndroid Build Coastguard Worker fn write_empty_slice() {
649*bb4ee6a4SAndroid Build Coastguard Worker let mem = VecMem::new(1024);
650*bb4ee6a4SAndroid Build Coastguard Worker let mut slice = mem.get_slice(1, 0).unwrap();
651*bb4ee6a4SAndroid Build Coastguard Worker
652*bb4ee6a4SAndroid Build Coastguard Worker // Writing to an empty slice should always return 0.
653*bb4ee6a4SAndroid Build Coastguard Worker assert_eq!(slice.write(&[1, 2, 3, 4]).unwrap(), 0);
654*bb4ee6a4SAndroid Build Coastguard Worker assert_eq!(slice.write(&[5, 6, 7, 8]).unwrap(), 0);
655*bb4ee6a4SAndroid Build Coastguard Worker assert_eq!(slice.write(&[]).unwrap(), 0);
656*bb4ee6a4SAndroid Build Coastguard Worker }
657*bb4ee6a4SAndroid Build Coastguard Worker }
658