1 use core::convert::TryInto;
2 use core::convert::TryFrom;
3 
4 #[allow(missing_docs)]
5 pub struct Bytes<'a> {
6     start: *const u8,
7     end: *const u8,
8     /// INVARIANT: start <= cursor && cursor <= end
9     cursor: *const u8,
10     phantom: core::marker::PhantomData<&'a ()>,
11 }
12 
13 #[allow(missing_docs)]
14 impl<'a> Bytes<'a> {
15     #[inline]
new(slice: &'a [u8]) -> Bytes<'a>16     pub fn new(slice: &'a [u8]) -> Bytes<'a> {
17         let start = slice.as_ptr();
18         // SAFETY: obtain pointer to slice end; start points to slice start.
19         let end = unsafe { start.add(slice.len()) };
20         let cursor = start;
21         Bytes {
22             start,
23             end,
24             cursor,
25             phantom: core::marker::PhantomData,
26         }
27     }
28 
29     #[inline]
pos(&self) -> usize30     pub fn pos(&self) -> usize {
31         self.cursor as usize - self.start as usize
32     }
33 
34     #[inline]
peek(&self) -> Option<u8>35     pub fn peek(&self) -> Option<u8> {
36         if self.cursor < self.end {
37             // SAFETY:  bounds checked
38             Some(unsafe { *self.cursor })
39         } else {
40             None
41         }
42     }
43 
44     #[inline]
peek_ahead(&self, n: usize) -> Option<u8>45     pub fn peek_ahead(&self, n: usize) -> Option<u8> {
46         // SAFETY: obtain a potentially OOB pointer that is later compared against the `self.end`
47         // pointer.
48         let ptr = self.cursor.wrapping_add(n);
49         if ptr < self.end {
50             // SAFETY: bounds checked pointer dereference is safe
51             Some(unsafe { *ptr })
52         } else {
53             None
54         }
55     }
56 
57     #[inline]
peek_n<'b: 'a, U: TryFrom<&'a [u8]>>(&'b self, n: usize) -> Option<U>58     pub fn peek_n<'b: 'a, U: TryFrom<&'a [u8]>>(&'b self, n: usize) -> Option<U> {
59         // TODO: once we bump MSRC, use const generics to allow only [u8; N] reads
60         // TODO: drop `n` arg in favour of const
61         // let n = core::mem::size_of::<U>();
62         self.as_ref().get(..n)?.try_into().ok()
63     }
64 
65     /// Advance by 1, equivalent to calling `advance(1)`.
66     ///
67     /// # Safety
68     ///
69     /// Caller must ensure that Bytes hasn't been advanced/bumped by more than [`Bytes::len()`].
70     #[inline]
bump(&mut self)71     pub unsafe fn bump(&mut self) {
72         self.advance(1)
73     }
74 
75     /// Advance cursor by `n`
76     ///
77     /// # Safety
78     ///
79     /// Caller must ensure that Bytes hasn't been advanced/bumped by more than [`Bytes::len()`].
80     #[inline]
advance(&mut self, n: usize)81     pub unsafe fn advance(&mut self, n: usize) {
82         self.cursor = self.cursor.add(n);
83         debug_assert!(self.cursor <= self.end, "overflow");
84     }
85 
86     #[inline]
len(&self) -> usize87     pub fn len(&self) -> usize {
88         self.end as usize - self.cursor as usize
89     }
90 
91     #[inline]
is_empty(&self) -> bool92     pub fn is_empty(&self) -> bool {
93         self.len() == 0
94     }
95 
96     #[inline]
slice(&mut self) -> &'a [u8]97     pub fn slice(&mut self) -> &'a [u8] {
98         // SAFETY: not moving position at all, so it's safe
99         let slice = unsafe { slice_from_ptr_range(self.start, self.cursor) };
100         self.commit();
101         slice
102     }
103 
104     // TODO: this is an anti-pattern, should be removed
105     /// Deprecated. Do not use!
106     /// # Safety
107     ///
108     /// Caller must ensure that `skip` is at most the number of advances (i.e., `bytes.advance(3)`
109     /// implies a skip of at most 3).
110     #[inline]
slice_skip(&mut self, skip: usize) -> &'a [u8]111     pub unsafe fn slice_skip(&mut self, skip: usize) -> &'a [u8] {
112         debug_assert!(self.cursor.sub(skip) >= self.start);
113         let head = slice_from_ptr_range(self.start, self.cursor.sub(skip));
114         self.commit();
115         head
116     }
117 
118     #[inline]
commit(&mut self)119     pub fn commit(&mut self) {
120         self.start = self.cursor
121     }
122 
123     /// # Safety
124     ///
125     /// see [`Bytes::advance`] safety comment.
126     #[inline]
advance_and_commit(&mut self, n: usize)127     pub unsafe fn advance_and_commit(&mut self, n: usize) {
128         self.advance(n);
129         self.commit();
130     }
131 
132     #[inline]
as_ptr(&self) -> *const u8133     pub fn as_ptr(&self) -> *const u8 {
134         self.cursor
135     }
136 
137     #[inline]
start(&self) -> *const u8138     pub fn start(&self) -> *const u8 {
139         self.start
140     }
141 
142     #[inline]
end(&self) -> *const u8143     pub fn end(&self) -> *const u8 {
144         self.end
145     }
146 
147     /// # Safety
148     ///
149     /// Must ensure invariant `bytes.start() <= ptr && ptr <= bytes.end()`.
150     #[inline]
set_cursor(&mut self, ptr: *const u8)151     pub unsafe fn set_cursor(&mut self, ptr: *const u8) {
152         debug_assert!(ptr >= self.start);
153         debug_assert!(ptr <= self.end);
154         self.cursor = ptr;
155     }
156 }
157 
158 impl<'a> AsRef<[u8]> for Bytes<'a> {
159     #[inline]
as_ref(&self) -> &[u8]160     fn as_ref(&self) -> &[u8] {
161         // SAFETY: not moving position at all, so it's safe
162         unsafe { slice_from_ptr_range(self.cursor, self.end) }
163     }
164 }
165 
166 /// # Safety
167 ///
168 /// Must ensure start and end point to the same memory object to uphold memory safety.
169 #[inline]
slice_from_ptr_range<'a>(start: *const u8, end: *const u8) -> &'a [u8]170 unsafe fn slice_from_ptr_range<'a>(start: *const u8, end: *const u8) -> &'a [u8] {
171     debug_assert!(start <= end);
172     core::slice::from_raw_parts(start, end as usize - start as usize)
173 }
174 
175 impl<'a> Iterator for Bytes<'a> {
176     type Item = u8;
177 
178     #[inline]
next(&mut self) -> Option<u8>179     fn next(&mut self) -> Option<u8> {
180         if self.cursor < self.end {
181             // SAFETY: bounds checked dereference
182             unsafe {
183                 let b = *self.cursor;
184                 self.bump();
185                 Some(b)
186             }
187         } else {
188             None
189         }
190     }
191 }
192