1 use std::fmt; 2 use std::fmt::Formatter; 3 use std::mem::MaybeUninit; 4 use std::slice; 5 6 use crate::misc::maybe_uninit_write_slice; 7 8 pub(crate) struct OutputBuffer { 9 // Actual buffer is owned by `OutputTarget`, 10 // and here we alias the buffer so access to the buffer is branchless: 11 // access does not require switch by actual target type: `&[], `Vec`, `Write` etc. 12 // We don't access the actual buffer in `OutputTarget` except when 13 // we initialize `buffer` field here. 14 buffer: *mut [MaybeUninit<u8>], 15 /// Position within the buffer. 16 /// Always correct. 17 pos_within_buf: usize, 18 } 19 20 impl fmt::Debug for OutputBuffer { fmt(&self, f: &mut Formatter<'_>) -> fmt::Result21 fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result { 22 f.debug_struct("OutputBuffer") 23 .field("buffer.len", &self.buffer().len()) 24 .field("pos_within_buf", &self.pos_within_buf) 25 .finish() 26 } 27 } 28 29 impl OutputBuffer { 30 #[inline] new(buffer: *mut [MaybeUninit<u8>]) -> OutputBuffer31 pub(crate) fn new(buffer: *mut [MaybeUninit<u8>]) -> OutputBuffer { 32 Self { 33 buffer, 34 pos_within_buf: 0, 35 } 36 } 37 38 /// Whole buffer: written data + unwritten data. 39 #[inline] buffer(&self) -> &[MaybeUninit<u8>]40 pub(crate) fn buffer(&self) -> &[MaybeUninit<u8>] { 41 unsafe { &*self.buffer } 42 } 43 44 #[inline] buffer_mut(&mut self) -> &mut [MaybeUninit<u8>]45 fn buffer_mut(&mut self) -> &mut [MaybeUninit<u8>] { 46 unsafe { &mut *self.buffer } 47 } 48 49 #[inline] pos_within_buf(&self) -> usize50 pub(crate) fn pos_within_buf(&self) -> usize { 51 self.pos_within_buf 52 } 53 54 #[inline] filled(&self) -> &[u8]55 pub(crate) fn filled(&self) -> &[u8] { 56 // SAFETY: This type invariant is data is filled up to `pos_within_buf`. 57 unsafe { slice::from_raw_parts_mut(self.buffer as *mut u8, self.pos_within_buf) } 58 } 59 60 #[inline] unfilled(&mut self) -> &mut [MaybeUninit<u8>]61 pub(crate) fn unfilled(&mut self) -> &mut [MaybeUninit<u8>] { 62 // SAFETY: This type invariant is `pos_within_buf` is smaller than buffer length. 63 let pos_within_buf = self.pos_within_buf; 64 unsafe { self.buffer_mut().get_unchecked_mut(pos_within_buf..) } 65 } 66 67 #[inline] unfilled_len(&self) -> usize68 pub(crate) fn unfilled_len(&self) -> usize { 69 self.buffer().len() - self.pos_within_buf 70 } 71 72 #[inline] advance(&mut self, n: usize)73 pub(crate) unsafe fn advance(&mut self, n: usize) { 74 debug_assert!(n <= self.unfilled_len()); 75 self.pos_within_buf += n; 76 } 77 78 #[inline] rewind(&mut self)79 pub(crate) fn rewind(&mut self) { 80 self.pos_within_buf = 0; 81 } 82 83 #[inline] replace_buffer_keep_pos(&mut self, buffer: *mut [MaybeUninit<u8>])84 pub(crate) fn replace_buffer_keep_pos(&mut self, buffer: *mut [MaybeUninit<u8>]) { 85 unsafe { 86 assert!(self.pos_within_buf <= (&*buffer).len()); 87 } 88 self.buffer = buffer; 89 } 90 91 #[inline] write_byte(&mut self, b: u8)92 pub(crate) unsafe fn write_byte(&mut self, b: u8) { 93 debug_assert!(self.unfilled_len() >= 1); 94 // SAFETY: caller is responsible for ensuring that byte fits in the buffer. 95 let pos_within_buf = self.pos_within_buf; 96 self.buffer_mut().get_unchecked_mut(pos_within_buf).write(b); 97 self.pos_within_buf += 1; 98 } 99 100 #[inline] write_bytes(&mut self, bytes: &[u8])101 pub(crate) unsafe fn write_bytes(&mut self, bytes: &[u8]) { 102 debug_assert!(self.unfilled_len() >= bytes.len()); 103 let bottom = self.pos_within_buf as usize; 104 let top = bottom + (bytes.len() as usize); 105 // SAFETY: caller is responsible for ensuring that `bytes` fits in the buffer. 106 let buffer = self.buffer_mut().get_unchecked_mut(bottom..top); 107 maybe_uninit_write_slice(buffer, bytes); 108 self.pos_within_buf += bytes.len(); 109 } 110 } 111