use crate::{
merkle::{
batch::MIN_TO_PARALLELIZE,
hasher::Hasher,
mmr::{
self,
mem::{Config, Mmr},
verification, Error, Location, Position, Proof,
},
storage::Storage,
Family as _,
},
metadata::{Config as MConfig, Metadata},
Context,
};
use commonware_codec::DecodeExt;
use commonware_cryptography::Digest;
use commonware_parallel::ThreadPool;
use commonware_utils::{
bitmap::{BitMap as UtilsBitMap, Prunable as PrunableBitMap},
sequence::prefixed_u64::U64,
};
use rayon::prelude::*;
use std::collections::HashSet;
use tracing::{debug, error, warn};
pub(crate) fn partial_chunk_root<H: Hasher<mmr::Family>, const N: usize>(
hasher: &H,
mmr_root: &H::Digest,
next_bit: u64,
last_chunk_digest: &H::Digest,
) -> H::Digest {
assert!(next_bit > 0);
assert!(next_bit < UtilsBitMap::<N>::CHUNK_SIZE_BITS);
let next_bit = next_bit.to_be_bytes();
hasher.hash([
mmr_root.as_ref(),
next_bit.as_slice(),
last_chunk_digest.as_ref(),
])
}
mod private {
pub trait Sealed {}
}
pub trait State<D: Digest>: private::Sealed + Sized + Send + Sync {}
pub struct Merkleized<D: Digest> {
root: D,
}
impl<D: Digest> private::Sealed for Merkleized<D> {}
impl<D: Digest> State<D> for Merkleized<D> {}
pub struct Unmerkleized {
dirty_chunks: HashSet<usize>,
}
impl private::Sealed for Unmerkleized {}
impl<D: Digest> State<D> for Unmerkleized {}
pub type MerkleizedBitMap<E, D, const N: usize> = BitMap<E, D, N, Merkleized<D>>;
pub type UnmerkleizedBitMap<E, D, const N: usize> = BitMap<E, D, N, Unmerkleized>;
pub struct BitMap<E: Context, D: Digest, const N: usize, S: State<D> = Merkleized<D>> {
bitmap: PrunableBitMap<N>,
authenticated_len: usize,
mmr: Mmr<D>,
pool: Option<ThreadPool>,
state: S,
metadata: Metadata<E, U64, Vec<u8>>,
}
const NODE_PREFIX: u8 = 0;
const PRUNED_CHUNKS_PREFIX: u8 = 1;
impl<E: Context, D: Digest, const N: usize, S: State<D>> BitMap<E, D, N, S> {
pub const CHUNK_SIZE_BITS: u64 = PrunableBitMap::<N>::CHUNK_SIZE_BITS;
#[inline]
pub fn size(&self) -> Position {
self.mmr.size()
}
#[inline]
pub const fn len(&self) -> u64 {
self.bitmap.len()
}
#[inline]
pub const fn is_empty(&self) -> bool {
self.len() == 0
}
#[inline]
pub const fn pruned_bits(&self) -> u64 {
self.bitmap.pruned_bits()
}
#[inline]
fn complete_chunks(&self) -> usize {
let chunks_len = self.bitmap.chunks_len();
if self.bitmap.is_chunk_aligned() {
chunks_len
} else {
chunks_len.checked_sub(1).unwrap()
}
}
#[inline]
pub fn last_chunk(&self) -> (&[u8; N], u64) {
self.bitmap.last_chunk()
}
#[inline]
pub fn get_chunk_containing(&self, bit: u64) -> &[u8; N] {
self.bitmap.get_chunk_containing(bit)
}
#[inline]
pub fn get_bit(&self, bit: u64) -> bool {
self.bitmap.get_bit(bit)
}
#[inline]
pub const fn get_bit_from_chunk(chunk: &[u8; N], bit: u64) -> bool {
PrunableBitMap::<N>::get_bit_from_chunk(chunk, bit)
}
pub fn verify_bit_inclusion(
hasher: &impl Hasher<mmr::Family, Digest = D>,
proof: &Proof<D>,
chunk: &[u8; N],
bit: u64,
root: &D,
) -> bool {
let bit_len = *proof.leaves;
if bit >= bit_len {
debug!(bit_len, bit, "tried to verify non-existent bit");
return false;
}
let chunked_leaves = Location::new(PrunableBitMap::<N>::to_chunk_index(bit_len) as u64);
let mut mmr_proof = Proof {
leaves: chunked_leaves,
digests: proof.digests.clone(),
};
let loc = Location::new(PrunableBitMap::<N>::to_chunk_index(bit) as u64);
if bit_len.is_multiple_of(Self::CHUNK_SIZE_BITS) {
return mmr_proof.verify_element_inclusion(hasher, chunk, loc, root);
}
if proof.digests.is_empty() {
debug!("proof has no digests");
return false;
}
let last_digest = mmr_proof.digests.pop().unwrap();
if chunked_leaves == loc {
if !mmr_proof.digests.is_empty() {
debug!(
digests = mmr_proof.digests.len() + 1,
"proof over partial chunk should have exactly 1 digest"
);
return false;
}
let last_chunk_digest = hasher.digest(chunk);
let next_bit = bit_len % Self::CHUNK_SIZE_BITS;
let reconstructed_root =
partial_chunk_root::<_, N>(hasher, &last_digest, next_bit, &last_chunk_digest);
return reconstructed_root == *root;
};
let mmr_root = match mmr_proof.reconstruct_root(hasher, &[chunk], loc) {
Ok(root) => root,
Err(error) => {
debug!(error = ?error, "invalid proof input");
return false;
}
};
let next_bit = bit_len % Self::CHUNK_SIZE_BITS;
let reconstructed_root =
partial_chunk_root::<_, N>(hasher, &mmr_root, next_bit, &last_digest);
reconstructed_root == *root
}
}
impl<E: Context, D: Digest, const N: usize> MerkleizedBitMap<E, D, N> {
pub async fn init(
context: E,
partition: &str,
pool: Option<ThreadPool>,
hasher: &impl Hasher<mmr::Family, Digest = D>,
) -> Result<Self, Error> {
let metadata_cfg = MConfig {
partition: partition.into(),
codec_config: ((0..).into(), ()),
};
let metadata =
Metadata::<_, U64, Vec<u8>>::init(context.with_label("metadata"), metadata_cfg).await?;
let key: U64 = U64::new(PRUNED_CHUNKS_PREFIX, 0);
let pruned_chunks = match metadata.get(&key) {
Some(bytes) => u64::from_be_bytes(bytes.as_slice().try_into().map_err(|_| {
error!("pruned chunks value not a valid u64");
Error::DataCorrupted("pruned chunks value not a valid u64")
})?),
None => {
warn!("bitmap metadata does not contain pruned chunks, initializing as empty");
0
}
} as usize;
if pruned_chunks == 0 {
let mmr = Mmr::new(hasher);
let cached_root = *mmr.root();
return Ok(Self {
bitmap: PrunableBitMap::new(),
authenticated_len: 0,
mmr,
pool,
metadata,
state: Merkleized { root: cached_root },
});
}
let pruned_loc = Location::new(pruned_chunks as u64);
if !pruned_loc.is_valid() {
return Err(Error::DataCorrupted("pruned chunks exceeds MAX_LEAVES"));
}
let mut pinned_nodes = Vec::new();
for (index, pos) in mmr::Family::nodes_to_pin(pruned_loc).enumerate() {
let Some(bytes) = metadata.get(&U64::new(NODE_PREFIX, index as u64)) else {
error!(?pruned_loc, ?pos, "missing pinned node");
return Err(Error::MissingNode(pos));
};
let digest = D::decode(bytes.as_ref());
let Ok(digest) = digest else {
error!(?pruned_loc, ?pos, "could not convert node bytes to digest");
return Err(Error::MissingNode(pos));
};
pinned_nodes.push(digest);
}
let mmr = Mmr::init(
Config {
nodes: Vec::new(),
pruning_boundary: Location::new(pruned_chunks as u64),
pinned_nodes,
},
hasher,
)?;
let bitmap = PrunableBitMap::new_with_pruned_chunks(pruned_chunks)
.expect("pruned_chunks should never overflow");
let cached_root = *mmr.root();
Ok(Self {
bitmap,
authenticated_len: pruned_chunks,
mmr,
pool,
metadata,
state: Merkleized { root: cached_root },
})
}
pub fn get_node(&self, position: Position) -> Option<D> {
self.mmr.get_node(position)
}
pub async fn write_pruned(&mut self) -> Result<(), Error> {
self.metadata.clear();
let key = U64::new(PRUNED_CHUNKS_PREFIX, 0);
self.metadata
.put(key, self.bitmap.pruned_chunks().to_be_bytes().to_vec());
let pruned_loc = Location::new(self.bitmap.pruned_chunks() as u64);
assert!(
pruned_loc.is_valid(),
"expected valid location from pruned_chunks"
);
for (i, digest) in mmr::Family::nodes_to_pin(pruned_loc).enumerate() {
let digest = self.mmr.get_node_unchecked(digest);
let key = U64::new(NODE_PREFIX, i as u64);
self.metadata.put(key, digest.to_vec());
}
self.metadata.sync().await.map_err(Error::Metadata)
}
pub async fn destroy(self) -> Result<(), Error> {
self.metadata.destroy().await.map_err(Error::Metadata)
}
pub fn prune_to_bit(&mut self, bit: u64) -> Result<(), Error> {
let chunk = PrunableBitMap::<N>::to_chunk_index(bit);
if chunk < self.bitmap.pruned_chunks() {
return Ok(());
}
self.bitmap.prune_to_bit(bit);
self.authenticated_len = self.complete_chunks();
self.mmr.prune(Location::new(chunk as u64))?;
Ok(())
}
pub const fn root(&self) -> D {
self.state.root
}
pub async fn proof(
&self,
hasher: &impl Hasher<mmr::Family, Digest = D>,
bit: u64,
) -> Result<(Proof<D>, [u8; N]), Error> {
if bit >= self.len() {
return Err(Error::BitOutOfBounds(bit, self.len()));
}
let chunk = *self.get_chunk_containing(bit);
let chunk_loc = Location::from(PrunableBitMap::<N>::to_chunk_index(bit));
let (last_chunk, next_bit) = self.bitmap.last_chunk();
if chunk_loc == self.mmr.leaves() {
assert!(next_bit > 0);
return Ok((
Proof {
leaves: Location::new(self.len()),
digests: vec![*self.mmr.root()],
},
chunk,
));
}
let range = chunk_loc..chunk_loc + 1;
let mut proof = verification::range_proof(hasher, &self.mmr, range).await?;
proof.leaves = Location::new(self.len());
if next_bit == Self::CHUNK_SIZE_BITS {
return Ok((proof, chunk));
}
let last_chunk_digest = hasher.digest(last_chunk);
proof.digests.push(last_chunk_digest);
Ok((proof, chunk))
}
pub fn into_dirty(self) -> UnmerkleizedBitMap<E, D, N> {
UnmerkleizedBitMap {
bitmap: self.bitmap,
authenticated_len: self.authenticated_len,
mmr: self.mmr,
pool: self.pool,
state: Unmerkleized {
dirty_chunks: HashSet::new(),
},
metadata: self.metadata,
}
}
}
impl<E: Context, D: Digest, const N: usize> UnmerkleizedBitMap<E, D, N> {
pub fn push(&mut self, bit: bool) {
self.bitmap.push(bit);
}
pub fn set_bit(&mut self, bit: u64, value: bool) {
self.bitmap.set_bit(bit, value);
let chunk = PrunableBitMap::<N>::to_chunk_index(bit);
if chunk < self.authenticated_len {
self.state.dirty_chunks.insert(chunk);
}
}
pub fn dirty_chunks(&self) -> Vec<Location> {
let mut chunks: Vec<Location> = self
.state
.dirty_chunks
.iter()
.map(|&chunk| Location::new(chunk as u64))
.collect();
for i in self.authenticated_len..self.complete_chunks() {
chunks.push(Location::new(i as u64));
}
chunks
}
pub fn merkleize(
mut self,
hasher: &impl Hasher<mmr::Family, Digest = D>,
) -> Result<MerkleizedBitMap<E, D, N>, Error> {
let mut batch = self.mmr.new_batch().with_pool(self.pool.clone());
let start = self.authenticated_len;
let end = self.complete_chunks();
for i in start..end {
batch = batch.add(hasher, self.bitmap.get_chunk(i));
}
self.authenticated_len = end;
let dirty: Vec<(Location, D)> = {
let updates: Vec<(Location, &[u8; N])> = self
.state
.dirty_chunks
.iter()
.map(|&chunk| {
let loc = Location::new(chunk as u64);
(loc, self.bitmap.get_chunk(chunk))
})
.collect();
match self.pool.as_ref() {
Some(pool) if updates.len() >= MIN_TO_PARALLELIZE => pool.install(|| {
updates
.par_iter()
.map_init(
|| hasher.clone(),
|h, &(loc, chunk)| {
let pos = Position::try_from(loc).unwrap();
(loc, h.leaf_digest(pos, chunk.as_ref()))
},
)
.collect()
}),
_ => updates
.iter()
.map(|&(loc, chunk)| {
let pos = Position::try_from(loc).unwrap();
(loc, hasher.leaf_digest(pos, chunk.as_ref()))
})
.collect(),
}
};
batch = batch.update_leaf_batched(&dirty)?;
let batch = batch.merkleize(&self.mmr, hasher);
self.mmr.apply_batch(&batch)?;
let mmr_root = *self.mmr.root();
let cached_root = if self.bitmap.is_chunk_aligned() {
mmr_root
} else {
let (last_chunk, next_bit) = self.bitmap.last_chunk();
let last_chunk_digest = hasher.digest(last_chunk);
partial_chunk_root::<_, N>(hasher, &mmr_root, next_bit, &last_chunk_digest)
};
Ok(MerkleizedBitMap {
bitmap: self.bitmap,
authenticated_len: self.authenticated_len,
mmr: self.mmr,
pool: self.pool,
metadata: self.metadata,
state: Merkleized { root: cached_root },
})
}
}
impl<E: Context, D: Digest, const N: usize> Storage<mmr::Family> for MerkleizedBitMap<E, D, N> {
type Digest = D;
async fn size(&self) -> Position {
self.size()
}
async fn get_node(&self, position: Position) -> Result<Option<D>, Error> {
Ok(self.get_node(position))
}
}
#[cfg(test)]
mod tests {
use super::*;
use commonware_codec::FixedSize;
use commonware_cryptography::{sha256, Hasher, Sha256};
use commonware_macros::test_traced;
use commonware_runtime::{deterministic, Metrics, Runner as _};
use mmr::StandardHasher;
const SHA256_SIZE: usize = sha256::Digest::SIZE;
type TestContext = deterministic::Context;
type TestMerkleizedBitMap<const N: usize> = MerkleizedBitMap<TestContext, sha256::Digest, N>;
impl<E: Context, D: Digest, const N: usize> UnmerkleizedBitMap<E, D, N> {
fn push_byte(&mut self, byte: u8) {
self.bitmap.push_byte(byte);
}
fn push_chunk(&mut self, chunk: &[u8; N]) {
self.bitmap.push_chunk(chunk);
}
}
fn test_chunk<const N: usize>(s: &[u8]) -> [u8; N] {
assert_eq!(N % 32, 0);
let mut vec: Vec<u8> = Vec::new();
for _ in 0..N / 32 {
vec.extend(Sha256::hash(s).iter());
}
vec.try_into().unwrap()
}
#[test_traced]
fn test_bitmap_verify_empty_proof() {
let executor = deterministic::Runner::default();
executor.start(|_context| async move {
let hasher = StandardHasher::<Sha256>::new();
let proof = Proof {
leaves: Location::new(100),
digests: Vec::new(),
};
assert!(
!TestMerkleizedBitMap::<SHA256_SIZE>::verify_bit_inclusion(
&hasher,
&proof,
&[0u8; SHA256_SIZE],
0,
&Sha256::fill(0x00),
),
"proof without digests shouldn't verify or panic"
);
});
}
#[test_traced]
fn test_bitmap_empty_then_one() {
let executor = deterministic::Runner::default();
executor.start(|context| async move {
let hasher = StandardHasher::<Sha256>::new();
let mut bitmap: TestMerkleizedBitMap<SHA256_SIZE> =
TestMerkleizedBitMap::init(context.with_label("bitmap"), "test", None, &hasher)
.await
.unwrap();
assert_eq!(bitmap.len(), 0);
assert_eq!(bitmap.bitmap.pruned_chunks(), 0);
bitmap.prune_to_bit(0).unwrap();
assert_eq!(bitmap.bitmap.pruned_chunks(), 0);
let root = bitmap.root();
let mut dirty = bitmap.into_dirty();
dirty.push(true);
bitmap = dirty.merkleize(&hasher).unwrap();
let new_root = bitmap.root();
assert_ne!(root, new_root);
let root = new_root;
bitmap.prune_to_bit(1).unwrap();
assert_eq!(bitmap.len(), 1);
assert_ne!(bitmap.last_chunk().0, &[0u8; SHA256_SIZE]);
assert_eq!(bitmap.last_chunk().1, 1);
assert_eq!(bitmap.bitmap.pruned_chunks(), 0);
assert_eq!(root, bitmap.root());
let mut dirty = bitmap.into_dirty();
for i in 0..(TestMerkleizedBitMap::<SHA256_SIZE>::CHUNK_SIZE_BITS - 1) {
dirty.push(i % 2 != 0);
}
bitmap = dirty.merkleize(&hasher).unwrap();
assert_eq!(bitmap.len(), 256);
assert_ne!(root, bitmap.root());
let root = bitmap.root();
let (proof, chunk) = bitmap.proof(&hasher, 0).await.unwrap();
assert!(
TestMerkleizedBitMap::<SHA256_SIZE>::verify_bit_inclusion(
&hasher, &proof, &chunk, 255, &root
),
"failed to prove bit in only chunk"
);
assert!(
!TestMerkleizedBitMap::<SHA256_SIZE>::verify_bit_inclusion(
&hasher, &proof, &chunk, 256, &root
),
"should not be able to prove bit outside of chunk"
);
bitmap.prune_to_bit(256).unwrap();
assert_eq!(bitmap.len(), 256);
assert_eq!(bitmap.bitmap.pruned_chunks(), 1);
assert_eq!(bitmap.bitmap.pruned_bits(), 256);
assert_eq!(root, bitmap.root());
bitmap.prune_to_bit(10).unwrap();
assert_eq!(root, bitmap.root());
});
}
#[test_traced]
fn test_bitmap_building() {
let executor = deterministic::Runner::default();
executor.start(|context| async move {
let test_chunk = test_chunk(b"test");
let hasher: StandardHasher<Sha256> = StandardHasher::new();
let bitmap: TestMerkleizedBitMap<SHA256_SIZE> =
TestMerkleizedBitMap::init(context.with_label("bitmap1"), "test1", None, &hasher)
.await
.unwrap();
let mut dirty = bitmap.into_dirty();
dirty.push_chunk(&test_chunk);
for b in test_chunk {
for j in 0..8 {
let mask = 1 << j;
let bit = (b & mask) != 0;
dirty.push(bit);
}
}
assert_eq!(dirty.len(), 256 * 2);
let bitmap = dirty.merkleize(&hasher).unwrap();
let root = bitmap.root();
let inner_root = *bitmap.mmr.root();
assert_eq!(root, inner_root);
{
let bitmap: TestMerkleizedBitMap<SHA256_SIZE> = TestMerkleizedBitMap::init(
context.with_label("bitmap2"),
"test2",
None,
&hasher,
)
.await
.unwrap();
let mut dirty = bitmap.into_dirty();
dirty.push_chunk(&test_chunk);
dirty.push_chunk(&test_chunk);
let bitmap = dirty.merkleize(&hasher).unwrap();
let same_root = bitmap.root();
assert_eq!(root, same_root);
}
{
let bitmap: TestMerkleizedBitMap<SHA256_SIZE> = TestMerkleizedBitMap::init(
context.with_label("bitmap3"),
"test3",
None,
&hasher,
)
.await
.unwrap();
let mut dirty = bitmap.into_dirty();
dirty.push_chunk(&test_chunk);
for b in test_chunk {
dirty.push_byte(b);
}
let bitmap = dirty.merkleize(&hasher).unwrap();
let same_root = bitmap.root();
assert_eq!(root, same_root);
}
});
}
#[test_traced]
#[should_panic(expected = "cannot add chunk")]
fn test_bitmap_build_chunked_panic() {
let executor = deterministic::Runner::default();
executor.start(|context| async move {
let hasher: StandardHasher<Sha256> = StandardHasher::new();
let bitmap: TestMerkleizedBitMap<SHA256_SIZE> =
TestMerkleizedBitMap::init(context.with_label("bitmap"), "test", None, &hasher)
.await
.unwrap();
let mut dirty = bitmap.into_dirty();
dirty.push_chunk(&test_chunk(b"test"));
dirty.push(true);
dirty.push_chunk(&test_chunk(b"panic"));
});
}
#[test_traced]
#[should_panic(expected = "cannot add byte")]
fn test_bitmap_build_byte_panic() {
let executor = deterministic::Runner::default();
executor.start(|context| async move {
let hasher: StandardHasher<Sha256> = StandardHasher::new();
let bitmap: TestMerkleizedBitMap<SHA256_SIZE> =
TestMerkleizedBitMap::init(context.with_label("bitmap"), "test", None, &hasher)
.await
.unwrap();
let mut dirty = bitmap.into_dirty();
dirty.push_chunk(&test_chunk(b"test"));
dirty.push(true);
dirty.push_byte(0x01);
});
}
#[test_traced]
#[should_panic(expected = "out of bounds")]
fn test_bitmap_get_out_of_bounds_bit_panic() {
let executor = deterministic::Runner::default();
executor.start(|context| async move {
let hasher: StandardHasher<Sha256> = StandardHasher::new();
let bitmap: TestMerkleizedBitMap<SHA256_SIZE> =
TestMerkleizedBitMap::init(context.with_label("bitmap"), "test", None, &hasher)
.await
.unwrap();
let mut dirty = bitmap.into_dirty();
dirty.push_chunk(&test_chunk(b"test"));
dirty.get_bit(256);
});
}
#[test_traced]
#[should_panic(expected = "pruned")]
fn test_bitmap_get_pruned_bit_panic() {
let executor = deterministic::Runner::default();
executor.start(|context| async move {
let hasher: StandardHasher<Sha256> = StandardHasher::new();
let bitmap: TestMerkleizedBitMap<SHA256_SIZE> =
TestMerkleizedBitMap::init(context.with_label("bitmap"), "test", None, &hasher)
.await
.unwrap();
let mut dirty = bitmap.into_dirty();
dirty.push_chunk(&test_chunk(b"test"));
dirty.push_chunk(&test_chunk(b"test2"));
let mut bitmap = dirty.merkleize(&hasher).unwrap();
bitmap.prune_to_bit(256).unwrap();
bitmap.get_bit(255);
});
}
#[test_traced]
fn test_bitmap_root_boundaries() {
let executor = deterministic::Runner::default();
executor.start(|context| async move {
let hasher = StandardHasher::<Sha256>::new();
let bitmap: TestMerkleizedBitMap<SHA256_SIZE> =
TestMerkleizedBitMap::init(context.with_label("bitmap"), "test", None, &hasher)
.await
.unwrap();
let mut dirty = bitmap.into_dirty();
dirty.push_chunk(&test_chunk(b"test"));
dirty.push_chunk(&test_chunk(b"test2"));
let mut bitmap = dirty.merkleize(&hasher).unwrap();
let root = bitmap.root();
let mut dirty = bitmap.into_dirty();
dirty.push(true);
bitmap = dirty.merkleize(&hasher).unwrap();
let new_root = bitmap.root();
assert_ne!(root, new_root);
assert_eq!(bitmap.mmr.size(), 3);
for _ in 0..(SHA256_SIZE * 8 - 1) {
let mut dirty = bitmap.into_dirty();
dirty.push(false);
bitmap = dirty.merkleize(&hasher).unwrap();
let newer_root = bitmap.root();
assert_ne!(new_root, newer_root);
}
assert_eq!(bitmap.mmr.size(), 4);
let mut dirty = bitmap.into_dirty();
dirty.push(false);
assert_eq!(dirty.len(), 256 * 3 + 1);
bitmap = dirty.merkleize(&hasher).unwrap();
let newer_root = bitmap.root();
assert_ne!(new_root, newer_root);
bitmap.prune_to_bit(bitmap.len()).unwrap();
assert_eq!(bitmap.bitmap.pruned_chunks(), 3);
assert_eq!(bitmap.len(), 256 * 3 + 1);
assert_eq!(newer_root, bitmap.root());
});
}
#[test_traced]
fn test_bitmap_get_set_bits() {
let executor = deterministic::Runner::default();
executor.start(|context| async move {
let hasher = StandardHasher::<Sha256>::new();
let bitmap: TestMerkleizedBitMap<SHA256_SIZE> =
TestMerkleizedBitMap::init(context.with_label("bitmap"), "test", None, &hasher)
.await
.unwrap();
let mut dirty = bitmap.into_dirty();
dirty.push_chunk(&test_chunk(b"test"));
dirty.push_chunk(&test_chunk(b"test2"));
dirty.push_chunk(&test_chunk(b"test3"));
dirty.push_chunk(&test_chunk(b"test4"));
dirty.push_byte(0xF1);
dirty.push(true);
dirty.push(false);
dirty.push(true);
let mut bitmap = dirty.merkleize(&hasher).unwrap();
let root = bitmap.root();
for bit_pos in (0..bitmap.len()).rev() {
let bit = bitmap.get_bit(bit_pos);
let mut dirty = bitmap.into_dirty();
dirty.set_bit(bit_pos, !bit);
bitmap = dirty.merkleize(&hasher).unwrap();
let new_root = bitmap.root();
assert_ne!(root, new_root, "failed at bit {bit_pos}");
let mut dirty = bitmap.into_dirty();
dirty.set_bit(bit_pos, bit);
bitmap = dirty.merkleize(&hasher).unwrap();
let new_root = bitmap.root();
assert_eq!(root, new_root);
}
let start_bit = (SHA256_SIZE * 8 * 2) as u64;
bitmap.prune_to_bit(start_bit).unwrap();
for bit_pos in (start_bit..bitmap.len()).rev() {
let bit = bitmap.get_bit(bit_pos);
let mut dirty = bitmap.into_dirty();
dirty.set_bit(bit_pos, !bit);
bitmap = dirty.merkleize(&hasher).unwrap();
let new_root = bitmap.root();
assert_ne!(root, new_root, "failed at bit {bit_pos}");
let mut dirty = bitmap.into_dirty();
dirty.set_bit(bit_pos, bit);
bitmap = dirty.merkleize(&hasher).unwrap();
let new_root = bitmap.root();
assert_eq!(root, new_root);
}
});
}
fn flip_bit<const N: usize>(bit: u64, chunk: &[u8; N]) -> [u8; N] {
let byte = PrunableBitMap::<N>::chunk_byte_offset(bit);
let mask = PrunableBitMap::<N>::chunk_byte_bitmask(bit);
let mut tmp = chunk.to_vec();
tmp[byte] ^= mask;
tmp.try_into().unwrap()
}
#[test_traced]
fn test_bitmap_mmr_proof_verification() {
test_bitmap_mmr_proof_verification_n::<32>();
test_bitmap_mmr_proof_verification_n::<64>();
}
fn test_bitmap_mmr_proof_verification_n<const N: usize>() {
let executor = deterministic::Runner::default();
executor.start(|context| async move {
let hasher = StandardHasher::<Sha256>::new();
let bitmap: MerkleizedBitMap<TestContext, sha256::Digest, N> =
MerkleizedBitMap::init(context.with_label("bitmap"), "test", None, &hasher)
.await
.unwrap();
let mut dirty = bitmap.into_dirty();
for i in 0u32..10 {
dirty.push_chunk(&test_chunk(format!("test{i}").as_bytes()));
}
dirty.push_byte(0xA6);
dirty.push(true);
dirty.push(false);
dirty.push(true);
dirty.push(true);
dirty.push(false);
let mut bitmap = dirty.merkleize(&hasher).unwrap();
let root = bitmap.root();
for prune_to_bit in (0..bitmap.len()).step_by(251) {
assert_eq!(bitmap.root(), root);
bitmap.prune_to_bit(prune_to_bit).unwrap();
for i in prune_to_bit..bitmap.len() {
let (proof, chunk) = bitmap.proof(&hasher, i).await.unwrap();
assert!(
MerkleizedBitMap::<TestContext, _, N>::verify_bit_inclusion(
&hasher, &proof, &chunk, i, &root
),
"failed to prove bit {i}",
);
let corrupted = flip_bit(i, &chunk);
assert!(
!MerkleizedBitMap::<TestContext, _, N>::verify_bit_inclusion(
&hasher, &proof, &corrupted, i, &root
),
"proving bit {i} after flipping should have failed",
);
}
}
})
}
#[test_traced]
fn test_bitmap_persistence() {
const PARTITION: &str = "bitmap-test";
const FULL_CHUNK_COUNT: usize = 100;
let executor = deterministic::Runner::default();
executor.start(|context| async move {
let hasher = StandardHasher::<Sha256>::new();
let bitmap: TestMerkleizedBitMap<SHA256_SIZE> =
TestMerkleizedBitMap::init(context.with_label("initial"), PARTITION, None, &hasher)
.await
.unwrap();
assert_eq!(bitmap.len(), 0);
let mut dirty = bitmap.into_dirty();
for i in 0..FULL_CHUNK_COUNT {
dirty.push_chunk(&test_chunk(format!("test{i}").as_bytes()));
}
let mut bitmap = dirty.merkleize(&hasher).unwrap();
let chunk_aligned_root = bitmap.root();
let mut dirty = bitmap.into_dirty();
dirty.push_byte(0xA6);
dirty.push(true);
dirty.push(false);
dirty.push(true);
bitmap = dirty.merkleize(&hasher).unwrap();
let root = bitmap.root();
for i in (10..=FULL_CHUNK_COUNT).step_by(10) {
bitmap
.prune_to_bit(
(i * TestMerkleizedBitMap::<SHA256_SIZE>::CHUNK_SIZE_BITS as usize) as u64,
)
.unwrap();
bitmap.write_pruned().await.unwrap();
bitmap = TestMerkleizedBitMap::init(
context.with_label(&format!("restore_{i}")),
PARTITION,
None,
&hasher,
)
.await
.unwrap();
let _ = bitmap.root();
let mut dirty = bitmap.into_dirty();
for j in i..FULL_CHUNK_COUNT {
dirty.push_chunk(&test_chunk(format!("test{j}").as_bytes()));
}
assert_eq!(dirty.bitmap.pruned_chunks(), i);
assert_eq!(dirty.len(), FULL_CHUNK_COUNT as u64 * 256);
bitmap = dirty.merkleize(&hasher).unwrap();
assert_eq!(bitmap.root(), chunk_aligned_root);
let mut dirty = bitmap.into_dirty();
dirty.push_byte(0xA6);
dirty.push(true);
dirty.push(false);
dirty.push(true);
bitmap = dirty.merkleize(&hasher).unwrap();
assert_eq!(bitmap.root(), root);
}
});
}
#[test_traced]
fn test_bitmap_proof_out_of_bounds() {
let executor = deterministic::Runner::default();
executor.start(|context| async move {
let hasher = StandardHasher::<Sha256>::new();
let bitmap: TestMerkleizedBitMap<SHA256_SIZE> =
TestMerkleizedBitMap::init(context.with_label("bitmap"), "test", None, &hasher)
.await
.unwrap();
let mut dirty = bitmap.into_dirty();
dirty.push_chunk(&test_chunk(b"test"));
let bitmap = dirty.merkleize(&hasher).unwrap();
let result = bitmap.proof(&hasher, 256).await;
assert!(matches!(result, Err(Error::BitOutOfBounds(offset, size))
if offset == 256 && size == 256));
let result = bitmap.proof(&hasher, 1000).await;
assert!(matches!(result, Err(Error::BitOutOfBounds(offset, size))
if offset == 1000 && size == 256));
assert!(bitmap.proof(&hasher, 0).await.is_ok());
assert!(bitmap.proof(&hasher, 255).await.is_ok());
});
}
}