#![no_std]
#![doc(
html_logo_url = "https://raw.githubusercontent.com/RustCrypto/media/6ee8e381/logo.svg",
html_favicon_url = "https://raw.githubusercontent.com/RustCrypto/media/6ee8e381/logo.svg"
)]
#![allow(clippy::undocumented_unsafe_blocks)]
pub use hybrid_array as array;
use array::{Array, ArraySize, typenum::Sum};
use core::{fmt, mem::MaybeUninit, ptr, slice};
#[cfg(feature = "zeroize")]
use zeroize::{Zeroize, ZeroizeOnDrop};
mod read;
mod sealed;
pub use read::ReadBuffer;
pub trait BlockSizes: ArraySize + sealed::BlockSizes {}
impl<T: ArraySize + sealed::BlockSizes> BlockSizes for T {}
pub trait BufferKind: sealed::Sealed {}
#[derive(Copy, Clone, Debug, Default)]
pub struct Eager {}
#[derive(Copy, Clone, Debug, Default)]
pub struct Lazy {}
impl BufferKind for Eager {}
impl BufferKind for Lazy {}
pub type EagerBuffer<B> = BlockBuffer<B, Eager>;
pub type LazyBuffer<B> = BlockBuffer<B, Lazy>;
#[derive(Copy, Clone, Eq, PartialEq, Debug)]
pub struct Error;
impl fmt::Display for Error {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> Result<(), fmt::Error> {
f.write_str("Block buffer error")
}
}
pub struct BlockBuffer<BS: BlockSizes, K: BufferKind> {
buffer: MaybeUninit<Array<u8, BS>>,
pos: K::Pos,
}
impl<BS: BlockSizes, K: BufferKind> Default for BlockBuffer<BS, K> {
#[inline]
fn default() -> Self {
let mut buffer = MaybeUninit::uninit();
let mut pos = Default::default();
K::set_pos(&mut buffer, &mut pos, 0);
Self { buffer, pos }
}
}
impl<BS: BlockSizes, K: BufferKind> Clone for BlockBuffer<BS, K> {
#[inline]
fn clone(&self) -> Self {
unsafe { ptr::read(self) }
}
}
impl<BS: BlockSizes, K: BufferKind> fmt::Debug for BlockBuffer<BS, K> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> Result<(), fmt::Error> {
f.debug_struct(K::NAME)
.field("pos", &self.get_pos())
.field("block_size", &BS::USIZE)
.field("data", &self.get_data())
.finish()
}
}
impl<BS: BlockSizes, K: BufferKind> BlockBuffer<BS, K> {
#[inline(always)]
#[must_use]
#[track_caller]
pub fn new(buf: &[u8]) -> Self {
Self::try_new(buf).expect("invalid slice length for buffer kind")
}
#[inline(always)]
pub fn try_new(buf: &[u8]) -> Result<Self, Error> {
if !K::invariant(buf.len(), BS::USIZE) {
return Err(Error);
}
let mut res = Self::default();
unsafe {
res.set_data_unchecked(buf);
}
Ok(res)
}
#[inline]
pub fn digest_blocks(&mut self, mut input: &[u8], mut compress: impl FnMut(&[Array<u8, BS>])) {
let pos = self.get_pos();
let rem = self.size() - pos;
let n = input.len();
if K::invariant(n, rem) {
unsafe {
let buf_ptr = self.buffer.as_mut_ptr().cast::<u8>().add(pos);
ptr::copy_nonoverlapping(input.as_ptr(), buf_ptr, input.len());
self.set_pos_unchecked(pos + input.len());
}
return;
}
if pos != 0 {
let (left, right) = input.split_at(rem);
input = right;
let block = unsafe {
let buf_ptr = self.buffer.as_mut_ptr().cast::<u8>().add(pos);
ptr::copy_nonoverlapping(left.as_ptr(), buf_ptr, left.len());
self.buffer.assume_init_ref()
};
compress(slice::from_ref(block));
}
let (blocks, leftover) = K::split_blocks(input);
if !blocks.is_empty() {
compress(blocks);
}
unsafe {
self.set_data_unchecked(leftover);
}
}
#[inline(always)]
pub fn reset(&mut self) {
unsafe {
self.set_pos_unchecked(0);
}
}
#[inline(always)]
pub fn pad_with_zeros(&mut self) -> Array<u8, BS> {
let mut res = Array::<u8, BS>::default();
let data = self.get_data();
res[..data.len()].copy_from_slice(data);
self.reset();
res
}
#[inline(always)]
pub fn get_pos(&self) -> usize {
let pos = K::get_pos(&self.buffer, &self.pos);
if !K::invariant(pos, BS::USIZE) {
debug_assert!(false);
unsafe {
core::hint::unreachable_unchecked();
}
}
pos
}
#[inline(always)]
pub fn get_data(&self) -> &[u8] {
unsafe { slice::from_raw_parts(self.buffer.as_ptr().cast(), self.get_pos()) }
}
#[inline]
pub fn set(&mut self, buf: Array<u8, BS>, pos: usize) {
assert!(K::invariant(pos, BS::USIZE));
self.buffer = MaybeUninit::new(buf);
unsafe {
self.set_pos_unchecked(pos);
}
}
#[inline(always)]
pub fn size(&self) -> usize {
BS::USIZE
}
#[inline(always)]
pub fn remaining(&self) -> usize {
self.size() - self.get_pos()
}
#[inline(always)]
unsafe fn set_pos_unchecked(&mut self, pos: usize) {
debug_assert!(K::invariant(pos, BS::USIZE));
K::set_pos(&mut self.buffer, &mut self.pos, pos);
}
#[inline(always)]
unsafe fn set_data_unchecked(&mut self, buf: &[u8]) {
unsafe {
self.set_pos_unchecked(buf.len());
let dst_ptr: *mut u8 = self.buffer.as_mut_ptr().cast();
ptr::copy_nonoverlapping(buf.as_ptr(), dst_ptr, buf.len());
}
}
}
pub type SerializedBufferSize<BS, K> = Sum<BS, <K as sealed::Sealed>::Overhead>;
pub type SerializedBuffer<BS, K> = Array<u8, SerializedBufferSize<BS, K>>;
impl<BS: BlockSizes, K: BufferKind> BlockBuffer<BS, K>
where
BS: core::ops::Add<K::Overhead>,
Sum<BS, K::Overhead>: ArraySize,
{
#[allow(clippy::missing_panics_doc)]
pub fn serialize(&self) -> SerializedBuffer<BS, K> {
let mut buf = SerializedBuffer::<BS, K>::default();
let data = self.get_data();
let (pos, block) = buf.split_at_mut(1);
pos[0] = u8::try_from(data.len()).expect("buffer size is smaller than 256");
block[..data.len()].copy_from_slice(data);
buf
}
pub fn deserialize(buf: &SerializedBuffer<BS, K>) -> Result<Self, Error> {
let (pos, block) = buf.split_at(1);
let pos = usize::from(pos[0]);
if !<K as sealed::Sealed>::invariant(pos, BS::USIZE) {
return Err(Error);
}
let (data, tail) = block.split_at(pos);
if tail.iter().any(|&b| b != 0) {
return Err(Error);
}
let mut res = Self::default();
unsafe { res.set_data_unchecked(data) };
Ok(res)
}
}
impl<BS: BlockSizes> BlockBuffer<BS, Eager> {
#[inline(always)]
pub fn digest_pad(
&mut self,
delim: u8,
suffix: &[u8],
mut compress: impl FnMut(&Array<u8, BS>),
) {
assert!(suffix.len() <= BS::USIZE, "suffix is too long");
let pos = self.get_pos();
let mut buf = self.pad_with_zeros();
buf[pos] = delim;
let n = self.size() - suffix.len();
if self.size() - pos - 1 < suffix.len() {
compress(&buf);
buf.fill(0);
buf[n..].copy_from_slice(suffix);
compress(&buf);
} else {
buf[n..].copy_from_slice(suffix);
compress(&buf);
}
self.reset();
}
#[inline]
pub fn len64_padding_be(&mut self, data_len: u64, compress: impl FnMut(&Array<u8, BS>)) {
self.digest_pad(0x80, &data_len.to_be_bytes(), compress);
}
#[inline]
pub fn len64_padding_le(&mut self, data_len: u64, compress: impl FnMut(&Array<u8, BS>)) {
self.digest_pad(0x80, &data_len.to_le_bytes(), compress);
}
#[inline]
pub fn len128_padding_be(&mut self, data_len: u128, compress: impl FnMut(&Array<u8, BS>)) {
self.digest_pad(0x80, &data_len.to_be_bytes(), compress);
}
}
#[cfg(feature = "zeroize")]
impl<BS: BlockSizes, K: BufferKind> Zeroize for BlockBuffer<BS, K> {
#[inline]
fn zeroize(&mut self) {
self.buffer.zeroize();
self.pos.zeroize();
}
}
impl<BS: BlockSizes, K: BufferKind> Drop for BlockBuffer<BS, K> {
#[inline]
fn drop(&mut self) {
#[cfg(feature = "zeroize")]
self.zeroize();
}
}
#[cfg(feature = "zeroize")]
impl<BS: BlockSizes, K: BufferKind> ZeroizeOnDrop for BlockBuffer<BS, K> {}