1#![no_std]
37#![doc(
38 html_logo_url = "https://raw.githubusercontent.com/RustCrypto/media/6ee8e381/logo.svg",
39 html_favicon_url = "https://raw.githubusercontent.com/RustCrypto/media/6ee8e381/logo.svg"
40)]
41#![allow(clippy::undocumented_unsafe_blocks)] pub use hybrid_array as array;
44
45use array::{Array, ArraySize, typenum::Sum};
46use core::{fmt, mem::MaybeUninit, ptr, slice};
47
48#[cfg(feature = "zeroize")]
49use zeroize::{Zeroize, ZeroizeOnDrop};
50
51mod read;
52mod sealed;
53
54pub use read::ReadBuffer;
55
56pub trait BlockSizes: ArraySize + sealed::BlockSizes {}
58
59impl<T: ArraySize + sealed::BlockSizes> BlockSizes for T {}
60
61pub trait BufferKind: sealed::Sealed {}
63
64#[derive(Copy, Clone, Debug, Default)]
67pub struct Eager {}
68
69#[derive(Copy, Clone, Debug, Default)]
72pub struct Lazy {}
73
74impl BufferKind for Eager {}
75
76impl BufferKind for Lazy {}
77
78pub type EagerBuffer<B> = BlockBuffer<B, Eager>;
80pub type LazyBuffer<B> = BlockBuffer<B, Lazy>;
82
83#[derive(Copy, Clone, Eq, PartialEq, Debug)]
85pub struct Error;
86
87impl fmt::Display for Error {
88 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> Result<(), fmt::Error> {
89 f.write_str("Block buffer error")
90 }
91}
92
93pub struct BlockBuffer<BS: BlockSizes, K: BufferKind> {
95 buffer: MaybeUninit<Array<u8, BS>>,
96 pos: K::Pos,
97}
98
99impl<BS: BlockSizes, K: BufferKind> Default for BlockBuffer<BS, K> {
100 #[inline]
101 fn default() -> Self {
102 let mut buffer = MaybeUninit::uninit();
103 let mut pos = Default::default();
104 K::set_pos(&mut buffer, &mut pos, 0);
105 Self { buffer, pos }
106 }
107}
108
109impl<BS: BlockSizes, K: BufferKind> Clone for BlockBuffer<BS, K> {
110 #[inline]
111 fn clone(&self) -> Self {
112 unsafe { ptr::read(self) }
115 }
116}
117
118impl<BS: BlockSizes, K: BufferKind> fmt::Debug for BlockBuffer<BS, K> {
119 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> Result<(), fmt::Error> {
120 f.debug_struct(K::NAME)
121 .field("pos", &self.get_pos())
122 .field("block_size", &BS::USIZE)
123 .field("data", &self.get_data())
124 .finish()
125 }
126}
127
128impl<BS: BlockSizes, K: BufferKind> BlockBuffer<BS, K> {
129 #[inline(always)]
134 #[must_use]
135 #[track_caller]
136 pub fn new(buf: &[u8]) -> Self {
137 Self::try_new(buf).expect("invalid slice length for buffer kind")
138 }
139
140 #[inline(always)]
145 pub fn try_new(buf: &[u8]) -> Result<Self, Error> {
146 if !K::invariant(buf.len(), BS::USIZE) {
147 return Err(Error);
148 }
149 let mut res = Self::default();
150 unsafe {
152 res.set_data_unchecked(buf);
153 }
154 Ok(res)
155 }
156
157 #[inline]
160 pub fn digest_blocks(&mut self, mut input: &[u8], mut compress: impl FnMut(&[Array<u8, BS>])) {
161 let pos = self.get_pos();
162 let rem = self.size() - pos;
165 let n = input.len();
166 if K::invariant(n, rem) {
174 unsafe {
178 let buf_ptr = self.buffer.as_mut_ptr().cast::<u8>().add(pos);
179 ptr::copy_nonoverlapping(input.as_ptr(), buf_ptr, input.len());
180 self.set_pos_unchecked(pos + input.len());
181 }
182 return;
183 }
184 if pos != 0 {
185 let (left, right) = input.split_at(rem);
186 input = right;
187 let block = unsafe {
190 let buf_ptr = self.buffer.as_mut_ptr().cast::<u8>().add(pos);
191 ptr::copy_nonoverlapping(left.as_ptr(), buf_ptr, left.len());
192 self.buffer.assume_init_ref()
193 };
194 compress(slice::from_ref(block));
195 }
196
197 let (blocks, leftover) = K::split_blocks(input);
198 if !blocks.is_empty() {
199 compress(blocks);
200 }
201
202 unsafe {
205 self.set_data_unchecked(leftover);
206 }
207 }
208
209 #[inline(always)]
211 pub fn reset(&mut self) {
212 unsafe {
214 self.set_pos_unchecked(0);
215 }
216 }
217
218 #[inline(always)]
220 pub fn pad_with_zeros(&mut self) -> Array<u8, BS> {
221 let mut res = Array::<u8, BS>::default();
222 let data = self.get_data();
223 res[..data.len()].copy_from_slice(data);
224 self.reset();
225 res
226 }
227
228 #[inline(always)]
230 pub fn get_pos(&self) -> usize {
231 let pos = K::get_pos(&self.buffer, &self.pos);
232 if !K::invariant(pos, BS::USIZE) {
233 debug_assert!(false);
234 unsafe {
236 core::hint::unreachable_unchecked();
237 }
238 }
239 pos
240 }
241
242 #[inline(always)]
244 pub fn get_data(&self) -> &[u8] {
245 unsafe { slice::from_raw_parts(self.buffer.as_ptr().cast(), self.get_pos()) }
248 }
249
250 #[inline]
255 pub fn set(&mut self, buf: Array<u8, BS>, pos: usize) {
256 assert!(K::invariant(pos, BS::USIZE));
257 self.buffer = MaybeUninit::new(buf);
258 unsafe {
261 self.set_pos_unchecked(pos);
262 }
263 }
264
265 #[inline(always)]
267 pub fn size(&self) -> usize {
268 BS::USIZE
269 }
270
271 #[inline(always)]
273 pub fn remaining(&self) -> usize {
274 self.size() - self.get_pos()
275 }
276
277 #[inline(always)]
286 unsafe fn set_pos_unchecked(&mut self, pos: usize) {
287 debug_assert!(K::invariant(pos, BS::USIZE));
288 K::set_pos(&mut self.buffer, &mut self.pos, pos);
289 }
290
291 #[inline(always)]
298 unsafe fn set_data_unchecked(&mut self, buf: &[u8]) {
299 unsafe {
300 self.set_pos_unchecked(buf.len());
301 let dst_ptr: *mut u8 = self.buffer.as_mut_ptr().cast();
302 ptr::copy_nonoverlapping(buf.as_ptr(), dst_ptr, buf.len());
303 }
304 }
305}
306
307pub type SerializedBufferSize<BS, K> = Sum<BS, <K as sealed::Sealed>::Overhead>;
309pub type SerializedBuffer<BS, K> = Array<u8, SerializedBufferSize<BS, K>>;
311
312impl<BS: BlockSizes, K: BufferKind> BlockBuffer<BS, K>
313where
314 BS: core::ops::Add<K::Overhead>,
315 Sum<BS, K::Overhead>: ArraySize,
316{
317 #[allow(clippy::missing_panics_doc)]
319 pub fn serialize(&self) -> SerializedBuffer<BS, K> {
320 let mut buf = SerializedBuffer::<BS, K>::default();
321 let data = self.get_data();
322 let (pos, block) = buf.split_at_mut(1);
323 pos[0] = u8::try_from(data.len()).expect("buffer size is smaller than 256");
324 block[..data.len()].copy_from_slice(data);
325 buf
326 }
327
328 pub fn deserialize(buf: &SerializedBuffer<BS, K>) -> Result<Self, Error> {
333 let (pos, block) = buf.split_at(1);
334 let pos = usize::from(pos[0]);
335
336 if !<K as sealed::Sealed>::invariant(pos, BS::USIZE) {
337 return Err(Error);
338 }
339
340 let (data, tail) = block.split_at(pos);
341
342 if tail.iter().any(|&b| b != 0) {
343 return Err(Error);
344 }
345
346 let mut res = Self::default();
347 unsafe { res.set_data_unchecked(data) };
348 Ok(res)
349 }
350}
351
352impl<BS: BlockSizes> BlockBuffer<BS, Eager> {
353 #[inline(always)]
360 pub fn digest_pad(
361 &mut self,
362 delim: u8,
363 suffix: &[u8],
364 mut compress: impl FnMut(&Array<u8, BS>),
365 ) {
366 assert!(suffix.len() <= BS::USIZE, "suffix is too long");
367 let pos = self.get_pos();
368 let mut buf = self.pad_with_zeros();
369 buf[pos] = delim;
370
371 let n = self.size() - suffix.len();
372 if self.size() - pos - 1 < suffix.len() {
373 compress(&buf);
374 buf.fill(0);
375 buf[n..].copy_from_slice(suffix);
376 compress(&buf);
377 } else {
378 buf[n..].copy_from_slice(suffix);
379 compress(&buf);
380 }
381 self.reset();
382 }
383
384 #[inline]
387 pub fn len64_padding_be(&mut self, data_len: u64, compress: impl FnMut(&Array<u8, BS>)) {
388 self.digest_pad(0x80, &data_len.to_be_bytes(), compress);
389 }
390
391 #[inline]
394 pub fn len64_padding_le(&mut self, data_len: u64, compress: impl FnMut(&Array<u8, BS>)) {
395 self.digest_pad(0x80, &data_len.to_le_bytes(), compress);
396 }
397
398 #[inline]
401 pub fn len128_padding_be(&mut self, data_len: u128, compress: impl FnMut(&Array<u8, BS>)) {
402 self.digest_pad(0x80, &data_len.to_be_bytes(), compress);
403 }
404}
405
406#[cfg(feature = "zeroize")]
407impl<BS: BlockSizes, K: BufferKind> Zeroize for BlockBuffer<BS, K> {
408 #[inline]
409 fn zeroize(&mut self) {
410 self.buffer.zeroize();
411 self.pos.zeroize();
412 }
413}
414
415impl<BS: BlockSizes, K: BufferKind> Drop for BlockBuffer<BS, K> {
416 #[inline]
417 fn drop(&mut self) {
418 #[cfg(feature = "zeroize")]
419 self.zeroize();
420 }
421}
422
423#[cfg(feature = "zeroize")]
424impl<BS: BlockSizes, K: BufferKind> ZeroizeOnDrop for BlockBuffer<BS, K> {}