use crate::{Config, Scheme};
use bytes::{Buf, BufMut, Bytes};
use commonware_codec::{BufsMut, EncodeSize, FixedSize, RangeCfg, Read, ReadExt, Write};
use commonware_cryptography::{Digest, Hasher};
use commonware_parallel::Strategy;
use commonware_storage::bmt::{self, Builder};
use commonware_utils::Cached;
use reed_solomon_simd::{Error as RsError, ReedSolomonDecoder, ReedSolomonEncoder};
use std::marker::PhantomData;
use thiserror::Error;
commonware_utils::thread_local_cache!(static CACHED_ENCODER: ReedSolomonEncoder);
commonware_utils::thread_local_cache!(static CACHED_DECODER: ReedSolomonDecoder);
#[derive(Error, Debug)]
pub enum Error {
#[error("reed-solomon error: {0}")]
ReedSolomon(#[from] RsError),
#[error("inconsistent")]
Inconsistent,
#[error("invalid proof")]
InvalidProof,
#[error("not enough chunks")]
NotEnoughChunks,
#[error("duplicate chunk index: {0}")]
DuplicateIndex(u16),
#[error("invalid data length: {0}")]
InvalidDataLength(usize),
#[error("invalid index: {0}")]
InvalidIndex(u16),
#[error("too many total shards: {0}")]
TooManyTotalShards(u32),
#[error("checked shard commitment does not match decode commitment")]
CommitmentMismatch,
}
fn total_shards(config: &Config) -> Result<u16, Error> {
let total = config.total_shards();
total
.try_into()
.map_err(|_| Error::TooManyTotalShards(total))
}
#[derive(Debug, Clone)]
pub struct Chunk<D: Digest> {
shard: Bytes,
index: u16,
proof: bmt::Proof<D>,
}
impl<D: Digest> Chunk<D> {
const fn new(shard: Bytes, index: u16, proof: bmt::Proof<D>) -> Self {
Self {
shard,
index,
proof,
}
}
fn verify<H: Hasher<Digest = D>>(&self, index: u16, root: &D) -> Option<CheckedChunk<D>> {
if index != self.index {
return None;
}
let mut hasher = H::new();
hasher.update(&self.shard);
let shard_digest = hasher.finalize();
self.proof
.verify_element_inclusion(&mut hasher, &shard_digest, self.index as u32, root)
.ok()?;
Some(CheckedChunk::new(
*root,
self.shard.clone(),
self.index,
shard_digest,
))
}
}
#[derive(Clone, Debug, PartialEq, Eq)]
pub struct CheckedChunk<D: Digest> {
root: D,
shard: Bytes,
index: u16,
digest: D,
}
impl<D: Digest> CheckedChunk<D> {
const fn new(root: D, shard: Bytes, index: u16, digest: D) -> Self {
Self {
root,
shard,
index,
digest,
}
}
}
impl<D: Digest> Write for Chunk<D> {
fn write(&self, writer: &mut impl BufMut) {
self.shard.write(writer);
self.index.write(writer);
self.proof.write(writer);
}
fn write_bufs(&self, buf: &mut impl BufsMut) {
self.shard.write_bufs(buf);
self.index.write(buf);
self.proof.write(buf);
}
}
impl<D: Digest> Read for Chunk<D> {
type Cfg = crate::CodecConfig;
fn read_cfg(reader: &mut impl Buf, cfg: &Self::Cfg) -> Result<Self, commonware_codec::Error> {
let shard = Bytes::read_cfg(reader, &RangeCfg::new(..=cfg.maximum_shard_size))?;
let index = u16::read(reader)?;
let proof = bmt::Proof::<D>::read_cfg(reader, &1)?;
Ok(Self {
shard,
index,
proof,
})
}
}
impl<D: Digest> EncodeSize for Chunk<D> {
fn encode_size(&self) -> usize {
self.shard.encode_size() + self.index.encode_size() + self.proof.encode_size()
}
fn encode_inline_size(&self) -> usize {
self.shard.encode_inline_size() + self.index.encode_size() + self.proof.encode_size()
}
}
impl<D: Digest> PartialEq for Chunk<D> {
fn eq(&self, other: &Self) -> bool {
self.shard == other.shard && self.index == other.index && self.proof == other.proof
}
}
impl<D: Digest> Eq for Chunk<D> {}
#[cfg(feature = "arbitrary")]
impl<D: Digest> arbitrary::Arbitrary<'_> for Chunk<D>
where
D: for<'a> arbitrary::Arbitrary<'a>,
{
fn arbitrary(u: &mut arbitrary::Unstructured<'_>) -> arbitrary::Result<Self> {
Ok(Self {
shard: u.arbitrary::<Vec<u8>>()?.into(),
index: u.arbitrary()?,
proof: u.arbitrary()?,
})
}
}
fn prepare_data(mut data: impl Buf, k: usize) -> (Vec<u8>, usize) {
let data_len = data.remaining();
let prefixed_len = u32::SIZE + data_len;
let mut shard_len = prefixed_len.div_ceil(k);
if !shard_len.is_multiple_of(2) {
shard_len += 1;
}
let length_bytes = (data_len as u32).to_be_bytes();
let mut padded = vec![0u8; k * shard_len];
padded[..u32::SIZE].copy_from_slice(&length_bytes);
data.copy_to_slice(&mut padded[u32::SIZE..u32::SIZE + data_len]);
(padded, shard_len)
}
fn extract_data(shards: &[&[u8]], k: usize) -> Result<Vec<u8>, Error> {
let shards = shards.get(..k).ok_or(Error::NotEnoughChunks)?;
let data_len = read_data_len(shards)?;
let mut data = Vec::with_capacity(data_len);
let mut prefix_bytes_left = u32::SIZE;
let mut data_bytes_left = data_len;
for shard in shards {
if prefix_bytes_left >= shard.len() {
prefix_bytes_left -= shard.len();
continue;
}
let payload = &shard[prefix_bytes_left..];
let copy_len = data_bytes_left.min(payload.len());
data.extend_from_slice(&payload[..copy_len]);
data_bytes_left -= copy_len;
if !payload[copy_len..].iter().all(|byte| *byte == 0) {
return Err(Error::Inconsistent);
}
prefix_bytes_left = 0;
}
if data_bytes_left != 0 {
return Err(Error::Inconsistent);
}
Ok(data)
}
fn read_data_len(shards: &[&[u8]]) -> Result<usize, Error> {
let total_len: usize = shards.iter().map(|s| s.len()).sum();
if total_len < u32::SIZE {
return Err(Error::Inconsistent);
}
let mut prefix = [0u8; u32::SIZE];
let mut prefix_len = 0usize;
for shard in shards {
if prefix_len == u32::SIZE {
break;
}
let read = (u32::SIZE - prefix_len).min(shard.len());
prefix[prefix_len..prefix_len + read].copy_from_slice(&shard[..read]);
prefix_len += read;
}
let data_len = u32::from_be_bytes(prefix) as usize;
let payload_len = total_len - u32::SIZE;
if data_len > payload_len {
return Err(Error::Inconsistent);
}
Ok(data_len)
}
type Encoding<D> = (D, Vec<Chunk<D>>);
fn encode<H: Hasher, S: Strategy>(
total: u16,
min: u16,
data: impl Buf,
strategy: &S,
) -> Result<Encoding<H::Digest>, Error> {
assert!(total > min);
assert!(min > 0);
let n = total as usize;
let k = min as usize;
let m = n - k;
let data_len = data.remaining();
if data_len > u32::MAX as usize {
return Err(Error::InvalidDataLength(data_len));
}
let (padded, shard_len) = prepare_data(data, k);
let recovery_buf = {
let mut encoder = Cached::take(
&CACHED_ENCODER,
|| ReedSolomonEncoder::new(k, m, shard_len),
|enc| enc.reset(k, m, shard_len),
)
.map_err(Error::ReedSolomon)?;
for shard in padded.chunks(shard_len) {
encoder
.add_original_shard(shard)
.map_err(Error::ReedSolomon)?;
}
let encoding = encoder.encode().map_err(Error::ReedSolomon)?;
let mut buf = Vec::with_capacity(m * shard_len);
for shard in encoding.recovery_iter() {
buf.extend_from_slice(shard);
}
buf
};
let originals: Bytes = padded.into();
let recoveries: Bytes = recovery_buf.into();
let mut builder = Builder::<H>::new(n);
let shard_slices: Vec<Bytes> = (0..k)
.map(|i| originals.slice(i * shard_len..(i + 1) * shard_len))
.chain((0..m).map(|i| recoveries.slice(i * shard_len..(i + 1) * shard_len)))
.collect();
let shard_hashes = strategy.map_init_collect_vec(&shard_slices, H::new, |hasher, shard| {
hasher.update(shard);
hasher.finalize()
});
for hash in &shard_hashes {
builder.add(hash);
}
let tree = builder.build();
let root = tree.root();
let mut chunks = Vec::with_capacity(n);
for (i, shard) in shard_slices.into_iter().enumerate() {
let proof = tree.proof(i as u32).map_err(|_| Error::InvalidProof)?;
chunks.push(Chunk::new(shard, i as u16, proof));
}
Ok((root, chunks))
}
fn decode<'a, H: Hasher, S: Strategy>(
total: u16,
min: u16,
root: &H::Digest,
chunks: impl Iterator<Item = &'a CheckedChunk<H::Digest>>,
strategy: &S,
) -> Result<Vec<u8>, Error> {
assert!(total > min);
assert!(min > 0);
let n = total as usize;
let k = min as usize;
let m = n - k;
let mut chunks = chunks.peekable();
let Some(first) = chunks.peek() else {
return Err(Error::NotEnoughChunks);
};
let shard_len = first.shard.len();
let mut shard_digests: Vec<Option<H::Digest>> = vec![None; n];
let mut provided_originals: Vec<(usize, &[u8])> = Vec::new();
let mut provided_recoveries: Vec<(usize, &[u8])> = Vec::new();
let mut provided = 0usize;
for chunk in chunks {
provided += 1;
if &chunk.root != root {
return Err(Error::CommitmentMismatch);
}
let index = chunk.index;
if index >= total {
return Err(Error::InvalidIndex(index));
}
let digest_slot = &mut shard_digests[index as usize];
if digest_slot.is_some() {
return Err(Error::DuplicateIndex(index));
}
*digest_slot = Some(chunk.digest);
if index < min {
provided_originals.push((index as usize, chunk.shard.as_ref()));
} else {
provided_recoveries.push((index as usize - k, chunk.shard.as_ref()));
}
}
if provided < k {
return Err(Error::NotEnoughChunks);
}
let mut decoder = Cached::take(
&CACHED_DECODER,
|| ReedSolomonDecoder::new(k, m, shard_len),
|dec| dec.reset(k, m, shard_len),
)
.map_err(Error::ReedSolomon)?;
for (idx, shard) in &provided_originals {
decoder
.add_original_shard(*idx, shard)
.map_err(Error::ReedSolomon)?;
}
for (idx, shard) in &provided_recoveries {
decoder
.add_recovery_shard(*idx, shard)
.map_err(Error::ReedSolomon)?;
}
let decoding = decoder.decode().map_err(Error::ReedSolomon)?;
let mut shards = vec![Default::default(); k];
for (idx, shard) in provided_originals
.into_iter()
.chain(decoding.restored_original_iter())
{
shards[idx] = shard;
}
let mut encoder = Cached::take(
&CACHED_ENCODER,
|| ReedSolomonEncoder::new(k, m, shard_len),
|enc| enc.reset(k, m, shard_len),
)
.map_err(Error::ReedSolomon)?;
for shard in shards.iter().take(k) {
encoder
.add_original_shard(shard)
.map_err(Error::ReedSolomon)?;
}
let encoding = encoder.encode().map_err(Error::ReedSolomon)?;
shards.extend(encoding.recovery_iter());
for (i, digest) in strategy.map_init_collect_vec(
shard_digests
.iter()
.enumerate()
.filter_map(|(i, digest)| digest.is_none().then_some(i)),
H::new,
|hasher, i| {
hasher.update(shards[i]);
(i, hasher.finalize())
},
) {
shard_digests[i] = Some(digest);
}
let mut builder = Builder::<H>::new(n);
shard_digests
.into_iter()
.map(|digest| digest.expect("digest must be present for every shard"))
.for_each(|digest| {
builder.add(&digest);
});
let tree = builder.build();
if tree.root() != *root {
return Err(Error::Inconsistent);
}
extract_data(&shards, k)
}
#[derive(Clone, Copy)]
pub struct ReedSolomon<H> {
_marker: PhantomData<H>,
}
impl<H> std::fmt::Debug for ReedSolomon<H> {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
f.debug_struct("ReedSolomon").finish()
}
}
impl<H: Hasher> Scheme for ReedSolomon<H> {
type Commitment = H::Digest;
type Shard = Chunk<H::Digest>;
type CheckedShard = CheckedChunk<H::Digest>;
type Error = Error;
fn encode(
config: &Config,
data: impl Buf,
strategy: &impl Strategy,
) -> Result<(Self::Commitment, Vec<Self::Shard>), Self::Error> {
encode::<H, _>(
total_shards(config)?,
config.minimum_shards.get(),
data,
strategy,
)
}
fn check(
config: &Config,
commitment: &Self::Commitment,
index: u16,
shard: &Self::Shard,
) -> Result<Self::CheckedShard, Self::Error> {
let total = total_shards(config)?;
if index >= total {
return Err(Error::InvalidIndex(index));
}
if shard.proof.leaf_count != u32::from(total) {
return Err(Error::InvalidProof);
}
if shard.index != index {
return Err(Error::InvalidIndex(shard.index));
}
shard
.verify::<H>(shard.index, commitment)
.ok_or(Error::InvalidProof)
}
fn decode<'a>(
config: &Config,
commitment: &Self::Commitment,
shards: impl Iterator<Item = &'a Self::CheckedShard>,
strategy: &impl Strategy,
) -> Result<Vec<u8>, Self::Error> {
decode::<H, _>(
total_shards(config)?,
config.minimum_shards.get(),
commitment,
shards,
strategy,
)
}
}
#[cfg(test)]
mod tests {
use super::*;
use commonware_codec::Encode;
use commonware_cryptography::Sha256;
use commonware_parallel::Sequential;
use commonware_runtime::{deterministic, iobuf::EncodeExt, BufferPooler, Runner};
use commonware_utils::NZU16;
type RS = ReedSolomon<Sha256>;
const STRATEGY: Sequential = Sequential;
fn checked(
root: <Sha256 as Hasher>::Digest,
chunk: Chunk<<Sha256 as Hasher>::Digest>,
) -> CheckedChunk<<Sha256 as Hasher>::Digest> {
let Chunk { shard, index, .. } = chunk;
let digest = Sha256::hash(&shard);
CheckedChunk::new(root, shard, index, digest)
}
#[test]
fn test_recovery() {
let data = b"Testing recovery pieces";
let total = 8u16;
let min = 3u16;
let (root, chunks) = encode::<Sha256, _>(total, min, data.as_slice(), &STRATEGY).unwrap();
let pieces: Vec<_> = vec![
checked(root, chunks[0].clone()), checked(root, chunks[4].clone()), checked(root, chunks[6].clone()), ];
let decoded = decode::<Sha256, _>(total, min, &root, pieces.iter(), &STRATEGY).unwrap();
assert_eq!(decoded, data);
}
#[test]
fn test_not_enough_pieces() {
let data = b"Test insufficient pieces";
let total = 6u16;
let min = 4u16;
let (root, chunks) = encode::<Sha256, _>(total, min, data.as_slice(), &STRATEGY).unwrap();
let pieces: Vec<_> = chunks
.into_iter()
.take(2)
.map(|c| checked(root, c))
.collect();
let result = decode::<Sha256, _>(total, min, &root, pieces.iter(), &STRATEGY);
assert!(matches!(result, Err(Error::NotEnoughChunks)));
}
#[test]
fn test_duplicate_index() {
let data = b"Test duplicate detection";
let total = 5u16;
let min = 3u16;
let (root, chunks) = encode::<Sha256, _>(total, min, data.as_slice(), &STRATEGY).unwrap();
let pieces = [
checked(root, chunks[0].clone()),
checked(root, chunks[0].clone()),
checked(root, chunks[1].clone()),
];
let result = decode::<Sha256, _>(total, min, &root, pieces.iter(), &STRATEGY);
assert!(matches!(result, Err(Error::DuplicateIndex(0))));
}
#[test]
fn test_invalid_index() {
let data = b"Test invalid index";
let total = 5u16;
let min = 3u16;
let (root, chunks) = encode::<Sha256, _>(total, min, data.as_slice(), &STRATEGY).unwrap();
for i in 0..total {
assert!(chunks[i as usize].verify::<Sha256>(i + 1, &root).is_none());
}
}
#[test]
#[should_panic(expected = "assertion failed: total > min")]
fn test_invalid_total() {
let data = b"Test parameter validation";
encode::<Sha256, _>(3, 3, data.as_slice(), &STRATEGY).unwrap();
}
#[test]
#[should_panic(expected = "assertion failed: min > 0")]
fn test_invalid_min() {
let data = b"Test parameter validation";
encode::<Sha256, _>(5, 0, data.as_slice(), &STRATEGY).unwrap();
}
#[test]
fn test_empty_data() {
let data = b"";
let total = 100u16;
let min = 30u16;
let (root, chunks) = encode::<Sha256, _>(total, min, data.as_slice(), &STRATEGY).unwrap();
let minimal = chunks
.into_iter()
.take(min as usize)
.map(|c| checked(root, c))
.collect::<Vec<_>>();
let decoded = decode::<Sha256, _>(total, min, &root, minimal.iter(), &STRATEGY).unwrap();
assert_eq!(decoded, data);
}
#[test]
fn test_large_data() {
let data = vec![42u8; 1000]; let total = 7u16;
let min = 4u16;
let (root, chunks) = encode::<Sha256, _>(total, min, data.as_slice(), &STRATEGY).unwrap();
let minimal = chunks
.into_iter()
.take(min as usize)
.map(|c| checked(root, c))
.collect::<Vec<_>>();
let decoded = decode::<Sha256, _>(total, min, &root, minimal.iter(), &STRATEGY).unwrap();
assert_eq!(decoded, data);
}
#[test]
fn test_malicious_root_detection() {
let data = b"Original data that should be protected";
let total = 7u16;
let min = 4u16;
let (_correct_root, chunks) =
encode::<Sha256, _>(total, min, data.as_slice(), &STRATEGY).unwrap();
let mut hasher = Sha256::new();
hasher.update(b"malicious_data_that_wasnt_actually_encoded");
let malicious_root = hasher.finalize();
for i in 0..total {
assert!(chunks[i as usize]
.clone()
.verify::<Sha256>(i, &malicious_root)
.is_none());
}
let minimal = chunks
.into_iter()
.take(min as usize)
.map(|c| checked(_correct_root, c))
.collect::<Vec<_>>();
let result = decode::<Sha256, _>(total, min, &malicious_root, minimal.iter(), &STRATEGY);
assert!(matches!(result, Err(Error::CommitmentMismatch)));
}
#[test]
fn test_mismatched_config_rejected_during_check() {
let config_expected = Config {
minimum_shards: NZU16!(2),
extra_shards: NZU16!(2),
};
let config_actual = Config {
minimum_shards: NZU16!(3),
extra_shards: NZU16!(3),
};
let data = b"leaf_count mismatch proof";
let (commitment, shards) = RS::encode(&config_actual, data.as_slice(), &STRATEGY).unwrap();
let check_result = RS::check(&config_expected, &commitment, 0, &shards[0]);
assert!(matches!(check_result, Err(Error::InvalidProof)));
}
#[test]
fn test_manipulated_chunk_detection() {
let data = b"Data integrity must be maintained";
let total = 6u16;
let min = 3u16;
let (root, chunks) = encode::<Sha256, _>(total, min, data.as_slice(), &STRATEGY).unwrap();
let mut pieces: Vec<_> = chunks.into_iter().map(|c| checked(root, c)).collect();
if !pieces[1].shard.is_empty() {
let mut shard = pieces[1].shard.to_vec();
shard[0] ^= 0xFF; pieces[1].shard = shard.into();
pieces[1].digest = Sha256::hash(&pieces[1].shard);
}
let result = decode::<Sha256, _>(total, min, &root, pieces.iter(), &STRATEGY);
assert!(matches!(result, Err(Error::Inconsistent)));
}
#[test]
fn test_inconsistent_shards() {
let data = b"Test data for malicious encoding";
let total = 5u16;
let min = 3u16;
let m = total - min;
let (padded, shard_size) = prepare_data(data.as_slice(), min as usize);
let mut encoder = ReedSolomonEncoder::new(min as usize, m as usize, shard_size).unwrap();
for shard in padded.chunks(shard_size) {
encoder.add_original_shard(shard).unwrap();
}
let recovery_result = encoder.encode().unwrap();
let mut recovery_shards: Vec<Vec<u8>> = recovery_result
.recovery_iter()
.map(|s| s.to_vec())
.collect();
if !recovery_shards[0].is_empty() {
recovery_shards[0][0] ^= 0xFF;
}
let mut malicious_shards: Vec<Vec<u8>> =
padded.chunks(shard_size).map(|s| s.to_vec()).collect();
malicious_shards.extend(recovery_shards);
let mut builder = Builder::<Sha256>::new(total as usize);
for shard in &malicious_shards {
let mut hasher = Sha256::new();
hasher.update(shard);
builder.add(&hasher.finalize());
}
let malicious_tree = builder.build();
let malicious_root = malicious_tree.root();
let selected_indices = vec![0, 1, 3]; let mut pieces = Vec::new();
for &i in &selected_indices {
let merkle_proof = malicious_tree.proof(i as u32).unwrap();
let shard = malicious_shards[i].clone();
let chunk = Chunk::new(shard.into(), i as u16, merkle_proof);
pieces.push(chunk);
}
let pieces: Vec<_> = pieces
.into_iter()
.map(|c| checked(malicious_root, c))
.collect();
let result = decode::<Sha256, _>(total, min, &malicious_root, pieces.iter(), &STRATEGY);
assert!(matches!(result, Err(Error::Inconsistent)));
}
#[test]
fn test_non_canonical_padding_rejected() {
let data = b"X";
let total = 6u16;
let min = 3u16;
let k = min as usize;
let m = total as usize - k;
let (mut padded, shard_len) = prepare_data(data.as_slice(), k);
let payload_end = u32::SIZE + data.len();
let total_original_len = k * shard_len;
assert!(payload_end < total_original_len, "test requires padding");
let pad_shard = payload_end / shard_len;
let pad_offset = payload_end % shard_len;
padded[pad_shard * shard_len + pad_offset] = 0xAA;
let mut encoder = ReedSolomonEncoder::new(k, m, shard_len).unwrap();
for shard in padded.chunks(shard_len) {
encoder.add_original_shard(shard).unwrap();
}
let recovery = encoder.encode().unwrap();
let mut shards: Vec<Vec<u8>> = padded.chunks(shard_len).map(|s| s.to_vec()).collect();
shards.extend(recovery.recovery_iter().map(|s| s.to_vec()));
let mut builder = Builder::<Sha256>::new(total as usize);
for shard in &shards {
let mut hasher = Sha256::new();
hasher.update(shard);
builder.add(&hasher.finalize());
}
let tree = builder.build();
let non_canonical_root = tree.root();
let mut pieces = Vec::with_capacity(k);
for (i, shard) in shards.iter().take(k).enumerate() {
let proof = tree.proof(i as u32).unwrap();
pieces.push(checked(
non_canonical_root,
Chunk::new(shard.clone().into(), i as u16, proof),
));
}
let result = decode::<Sha256, _>(total, min, &non_canonical_root, pieces.iter(), &STRATEGY);
assert!(matches!(result, Err(Error::Inconsistent)));
}
#[test]
fn test_decode_invalid_index() {
let data = b"Testing recovery pieces";
let total = 8u16;
let min = 3u16;
let (root, chunks) = encode::<Sha256, _>(total, min, data.as_slice(), &STRATEGY).unwrap();
let mut invalid = checked(root, chunks[1].clone());
invalid.index = 8;
let pieces: Vec<_> = vec![
checked(root, chunks[0].clone()), invalid, checked(root, chunks[6].clone()), ];
let result = decode::<Sha256, _>(total, min, &root, pieces.iter(), &STRATEGY);
assert!(matches!(result, Err(Error::InvalidIndex(8))));
}
#[test]
fn test_max_chunks() {
let data = vec![42u8; 1000]; let total = u16::MAX;
let min = u16::MAX / 2;
let (root, chunks) = encode::<Sha256, _>(total, min, data.as_slice(), &STRATEGY).unwrap();
let minimal = chunks
.into_iter()
.take(min as usize)
.map(|c| checked(root, c))
.collect::<Vec<_>>();
let decoded = decode::<Sha256, _>(total, min, &root, minimal.iter(), &STRATEGY).unwrap();
assert_eq!(decoded, data);
}
#[test]
fn test_too_many_chunks() {
let data = vec![42u8; 1000]; let total = u16::MAX;
let min = u16::MAX / 2 - 1;
let result = encode::<Sha256, _>(total, min, data.as_slice(), &STRATEGY);
assert!(matches!(
result,
Err(Error::ReedSolomon(
reed_solomon_simd::Error::UnsupportedShardCount {
original_count: _,
recovery_count: _,
}
))
));
}
#[test]
fn test_too_many_total_shards() {
assert!(RS::encode(
&Config {
minimum_shards: NZU16!(u16::MAX / 2 + 1),
extra_shards: NZU16!(u16::MAX),
},
[].as_slice(),
&STRATEGY,
)
.is_err())
}
#[test]
fn test_chunk_encode_with_pool_matches_encode() {
let executor = deterministic::Runner::default();
executor.start(|context| async move {
let pool = context.network_buffer_pool();
let data = b"pool encoding test";
let (_root, chunks) = encode::<Sha256, _>(5, 3, data.as_slice(), &STRATEGY).unwrap();
let chunk = &chunks[0];
let encoded = chunk.encode();
let mut encoded_pool = chunk.encode_with_pool(pool);
let mut encoded_pool_bytes = vec![0u8; encoded_pool.remaining()];
encoded_pool.copy_to_slice(&mut encoded_pool_bytes);
assert_eq!(encoded_pool_bytes, encoded.as_ref());
});
}
#[cfg(feature = "arbitrary")]
mod conformance {
use super::*;
use commonware_codec::conformance::CodecConformance;
use commonware_cryptography::sha256::Digest as Sha256Digest;
commonware_conformance::conformance_tests! {
CodecConformance<Chunk<Sha256Digest>>,
}
}
}