1#![no_std]
37#![doc(
38 html_logo_url = "https://raw.githubusercontent.com/RustCrypto/media/6ee8e381/logo.svg",
39 html_favicon_url = "https://raw.githubusercontent.com/RustCrypto/media/6ee8e381/logo.svg"
40)]
41#![warn(missing_docs)]
42
43pub use hybrid_array as array;
44
45use array::{Array, ArraySize, typenum::Sum};
46use core::{fmt, mem::MaybeUninit, ptr, slice};
47
48#[cfg(feature = "zeroize")]
49use zeroize::{Zeroize, ZeroizeOnDrop};
50
51mod read;
52mod sealed;
53
54pub use read::ReadBuffer;
55
56pub trait BufferKind: sealed::Sealed {}
58
59#[derive(Copy, Clone, Debug, Default)]
62pub struct Eager {}
63
64#[derive(Copy, Clone, Debug, Default)]
67pub struct Lazy {}
68
69impl BufferKind for Eager {}
70
71impl BufferKind for Lazy {}
72
73pub type EagerBuffer<B> = BlockBuffer<B, Eager>;
75pub type LazyBuffer<B> = BlockBuffer<B, Lazy>;
77
78#[derive(Copy, Clone, Eq, PartialEq, Debug)]
80pub struct Error;
81
82impl fmt::Display for Error {
83 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> Result<(), fmt::Error> {
84 f.write_str("Block buffer error")
85 }
86}
87
88pub struct BlockBuffer<BS: ArraySize, K: BufferKind> {
90 buffer: MaybeUninit<Array<u8, BS>>,
91 pos: K::Pos,
92}
93
94impl<BS: ArraySize, K: BufferKind> BlockBuffer<BS, K> {
95 const BLOCK_SIZE_ASSERT: bool = {
97 if BS::USIZE == 0 {
98 panic!("Block size can not be equal to zero!");
99 }
100 if BS::USIZE > 255 {
101 panic!("Block size can not be bigger than 255!");
102 }
103 true
104 };
105}
106
107impl<BS: ArraySize, K: BufferKind> Default for BlockBuffer<BS, K> {
108 #[inline]
109 fn default() -> Self {
110 assert!(Self::BLOCK_SIZE_ASSERT);
111 let mut buffer = MaybeUninit::uninit();
112 let mut pos = Default::default();
113 K::set_pos(&mut buffer, &mut pos, 0);
114 Self { buffer, pos }
115 }
116}
117
118impl<BS: ArraySize, K: BufferKind> Clone for BlockBuffer<BS, K> {
119 #[inline]
120 fn clone(&self) -> Self {
121 unsafe { ptr::read(self) }
124 }
125}
126
127impl<BS: ArraySize, K: BufferKind> fmt::Debug for BlockBuffer<BS, K> {
128 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> Result<(), fmt::Error> {
129 f.debug_struct(K::NAME)
130 .field("pos", &self.get_pos())
131 .field("block_size", &BS::USIZE)
132 .field("data", &self.get_data())
133 .finish()
134 }
135}
136
137impl<BS: ArraySize, K: BufferKind> BlockBuffer<BS, K> {
138 #[inline(always)]
143 pub fn new(buf: &[u8]) -> Self {
144 Self::try_new(buf).unwrap()
145 }
146
147 #[inline(always)]
151 pub fn try_new(buf: &[u8]) -> Result<Self, Error> {
152 assert!(Self::BLOCK_SIZE_ASSERT);
153 if !K::invariant(buf.len(), BS::USIZE) {
154 return Err(Error);
155 }
156 let mut res = Self::default();
157 unsafe {
159 res.set_data_unchecked(buf);
160 }
161 Ok(res)
162 }
163
164 #[inline]
167 pub fn digest_blocks(&mut self, mut input: &[u8], mut compress: impl FnMut(&[Array<u8, BS>])) {
168 let pos = self.get_pos();
169 let rem = self.size() - pos;
172 let n = input.len();
173 if K::invariant(n, rem) {
181 unsafe {
185 let buf_ptr = self.buffer.as_mut_ptr().cast::<u8>().add(pos);
186 ptr::copy_nonoverlapping(input.as_ptr(), buf_ptr, input.len());
187 self.set_pos_unchecked(pos + input.len());
188 }
189 return;
190 }
191 if pos != 0 {
192 let (left, right) = input.split_at(rem);
193 input = right;
194 let block = unsafe {
197 let buf_ptr = self.buffer.as_mut_ptr().cast::<u8>().add(pos);
198 ptr::copy_nonoverlapping(left.as_ptr(), buf_ptr, left.len());
199 self.buffer.assume_init_ref()
200 };
201 compress(slice::from_ref(block));
202 }
203
204 let (blocks, leftover) = K::split_blocks(input);
205 if !blocks.is_empty() {
206 compress(blocks);
207 }
208
209 unsafe {
212 self.set_data_unchecked(leftover);
213 }
214 }
215
216 #[inline(always)]
218 pub fn reset(&mut self) {
219 unsafe {
221 self.set_pos_unchecked(0);
222 }
223 }
224
225 #[inline(always)]
227 pub fn pad_with_zeros(&mut self) -> Array<u8, BS> {
228 let mut res = Array::<u8, BS>::default();
229 let data = self.get_data();
230 res[..data.len()].copy_from_slice(data);
231 self.reset();
232 res
233 }
234
235 #[inline(always)]
237 pub fn get_pos(&self) -> usize {
238 let pos = K::get_pos(&self.buffer, &self.pos);
239 if !K::invariant(pos, BS::USIZE) {
240 debug_assert!(false);
241 unsafe {
243 core::hint::unreachable_unchecked();
244 }
245 }
246 pos
247 }
248
249 #[inline(always)]
251 pub fn get_data(&self) -> &[u8] {
252 unsafe { slice::from_raw_parts(self.buffer.as_ptr().cast(), self.get_pos()) }
255 }
256
257 #[inline]
262 pub fn set(&mut self, buf: Array<u8, BS>, pos: usize) {
263 assert!(K::invariant(pos, BS::USIZE));
264 self.buffer = MaybeUninit::new(buf);
265 unsafe {
268 self.set_pos_unchecked(pos);
269 }
270 }
271
272 #[inline(always)]
274 pub fn size(&self) -> usize {
275 BS::USIZE
276 }
277
278 #[inline(always)]
280 pub fn remaining(&self) -> usize {
281 self.size() - self.get_pos()
282 }
283
284 #[inline(always)]
293 unsafe fn set_pos_unchecked(&mut self, pos: usize) {
294 debug_assert!(K::invariant(pos, BS::USIZE));
295 K::set_pos(&mut self.buffer, &mut self.pos, pos)
296 }
297
298 #[inline(always)]
305 unsafe fn set_data_unchecked(&mut self, buf: &[u8]) {
306 unsafe {
307 self.set_pos_unchecked(buf.len());
308 let dst_ptr: *mut u8 = self.buffer.as_mut_ptr().cast();
309 ptr::copy_nonoverlapping(buf.as_ptr(), dst_ptr, buf.len());
310 }
311 }
312}
313
314pub type SerializedBufferSize<BS, K> = Sum<BS, <K as sealed::Sealed>::Overhead>;
316pub type SerializedBuffer<BS, K> = Array<u8, SerializedBufferSize<BS, K>>;
318
319impl<BS: ArraySize, K: BufferKind> BlockBuffer<BS, K>
320where
321 BS: core::ops::Add<K::Overhead>,
322 Sum<BS, K::Overhead>: ArraySize,
323{
324 pub fn serialize(&self) -> SerializedBuffer<BS, K> {
326 let mut buf = SerializedBuffer::<BS, K>::default();
327 let data = self.get_data();
328 let (pos, block) = buf.split_at_mut(1);
329 pos[0] = u8::try_from(data.len()).expect("buffer size is smaller than 256");
330 block[..data.len()].copy_from_slice(data);
331 buf
332 }
333
334 pub fn deserialize(buf: &SerializedBuffer<BS, K>) -> Result<Self, Error> {
336 let (pos, block) = buf.split_at(1);
337 let pos = usize::from(pos[0]);
338
339 if !<K as sealed::Sealed>::invariant(pos, BS::USIZE) {
340 return Err(Error);
341 }
342
343 let (data, tail) = block.split_at(pos);
344
345 if tail.iter().any(|&b| b != 0) {
346 return Err(Error);
347 }
348
349 let mut res = Self::default();
350 unsafe { res.set_data_unchecked(data) };
351 Ok(res)
352 }
353}
354
355impl<BS: ArraySize> BlockBuffer<BS, Eager> {
356 #[inline(always)]
363 pub fn digest_pad(
364 &mut self,
365 delim: u8,
366 suffix: &[u8],
367 mut compress: impl FnMut(&Array<u8, BS>),
368 ) {
369 if suffix.len() > BS::USIZE {
370 panic!("suffix is too long");
371 }
372 let pos = self.get_pos();
373 let mut buf = self.pad_with_zeros();
374 buf[pos] = delim;
375
376 let n = self.size() - suffix.len();
377 if self.size() - pos - 1 < suffix.len() {
378 compress(&buf);
379 buf.fill(0);
380 buf[n..].copy_from_slice(suffix);
381 compress(&buf);
382 } else {
383 buf[n..].copy_from_slice(suffix);
384 compress(&buf);
385 }
386 self.reset();
387 }
388
389 #[inline]
392 pub fn len64_padding_be(&mut self, data_len: u64, compress: impl FnMut(&Array<u8, BS>)) {
393 self.digest_pad(0x80, &data_len.to_be_bytes(), compress);
394 }
395
396 #[inline]
399 pub fn len64_padding_le(&mut self, data_len: u64, compress: impl FnMut(&Array<u8, BS>)) {
400 self.digest_pad(0x80, &data_len.to_le_bytes(), compress);
401 }
402
403 #[inline]
406 pub fn len128_padding_be(&mut self, data_len: u128, compress: impl FnMut(&Array<u8, BS>)) {
407 self.digest_pad(0x80, &data_len.to_be_bytes(), compress);
408 }
409}
410
411#[cfg(feature = "zeroize")]
412impl<BS: ArraySize, K: BufferKind> Zeroize for BlockBuffer<BS, K> {
413 #[inline]
414 fn zeroize(&mut self) {
415 self.buffer.zeroize();
416 self.pos.zeroize();
417 }
418}
419
420impl<BS: ArraySize, K: BufferKind> Drop for BlockBuffer<BS, K> {
421 #[inline]
422 fn drop(&mut self) {
423 #[cfg(feature = "zeroize")]
424 self.zeroize();
425 }
426}
427
428#[cfg(feature = "zeroize")]
429impl<BS: ArraySize, K: BufferKind> ZeroizeOnDrop for BlockBuffer<BS, K> {}