1use crate::{
14 merkle::{
15 batch::MIN_TO_PARALLELIZE,
16 hasher::Hasher,
17 mmr::{
18 self,
19 mem::{Config, Mmr},
20 verification, Error, Location, Position, Proof,
21 },
22 storage::Storage,
23 Family as _,
24 },
25 metadata::{Config as MConfig, Metadata},
26 Context,
27};
28use commonware_codec::DecodeExt;
29use commonware_cryptography::Digest;
30use commonware_parallel::ThreadPool;
31use commonware_utils::{
32 bitmap::{BitMap as UtilsBitMap, Prunable as PrunableBitMap},
33 sequence::prefixed_u64::U64,
34};
35use rayon::prelude::*;
36use std::collections::HashSet;
37use tracing::{debug, error, warn};
38
39pub(crate) fn partial_chunk_root<H: Hasher<mmr::Family>, const N: usize>(
42 hasher: &H,
43 mmr_root: &H::Digest,
44 next_bit: u64,
45 last_chunk_digest: &H::Digest,
46) -> H::Digest {
47 assert!(next_bit > 0);
48 assert!(next_bit < UtilsBitMap::<N>::CHUNK_SIZE_BITS);
49 let next_bit = next_bit.to_be_bytes();
50 hasher.hash([
51 mmr_root.as_ref(),
52 next_bit.as_slice(),
53 last_chunk_digest.as_ref(),
54 ])
55}
56
57mod private {
58 pub trait Sealed {}
59}
60
61pub trait State<D: Digest>: private::Sealed + Sized + Send + Sync {}
63
64pub struct Merkleized<D: Digest> {
66 root: D,
68}
69
70impl<D: Digest> private::Sealed for Merkleized<D> {}
71impl<D: Digest> State<D> for Merkleized<D> {}
72
73pub struct Unmerkleized {
75 dirty_chunks: HashSet<usize>,
82}
83
84impl private::Sealed for Unmerkleized {}
85impl<D: Digest> State<D> for Unmerkleized {}
86
87pub type MerkleizedBitMap<E, D, const N: usize> = BitMap<E, D, N, Merkleized<D>>;
89
90pub type UnmerkleizedBitMap<E, D, const N: usize> = BitMap<E, D, N, Unmerkleized>;
92
93pub struct BitMap<E: Context, D: Digest, const N: usize, S: State<D> = Merkleized<D>> {
111 bitmap: PrunableBitMap<N>,
113
114 authenticated_len: usize,
117
118 mmr: Mmr<D>,
128
129 pool: Option<ThreadPool>,
131
132 state: S,
134
135 metadata: Metadata<E, U64, Vec<u8>>,
137}
138
139const NODE_PREFIX: u8 = 0;
141
142const PRUNED_CHUNKS_PREFIX: u8 = 1;
144
145impl<E: Context, D: Digest, const N: usize, S: State<D>> BitMap<E, D, N, S> {
146 pub const CHUNK_SIZE_BITS: u64 = PrunableBitMap::<N>::CHUNK_SIZE_BITS;
148
149 #[inline]
151 pub fn size(&self) -> Position {
152 self.mmr.size()
153 }
154
155 #[inline]
157 pub const fn len(&self) -> u64 {
158 self.bitmap.len()
159 }
160
161 #[inline]
163 pub const fn is_empty(&self) -> bool {
164 self.len() == 0
165 }
166
167 #[inline]
169 pub const fn pruned_bits(&self) -> u64 {
170 self.bitmap.pruned_bits()
171 }
172
173 #[inline]
176 fn complete_chunks(&self) -> usize {
177 let chunks_len = self.bitmap.chunks_len();
178 if self.bitmap.is_chunk_aligned() {
179 chunks_len
180 } else {
181 chunks_len.checked_sub(1).unwrap()
183 }
184 }
185
186 #[inline]
189 pub fn last_chunk(&self) -> (&[u8; N], u64) {
190 self.bitmap.last_chunk()
191 }
192
193 #[inline]
199 pub fn get_chunk_containing(&self, bit: u64) -> &[u8; N] {
200 self.bitmap.get_chunk_containing(bit)
201 }
202
203 #[inline]
209 pub fn get_bit(&self, bit: u64) -> bool {
210 self.bitmap.get_bit(bit)
211 }
212
213 #[inline]
216 pub const fn get_bit_from_chunk(chunk: &[u8; N], bit: u64) -> bool {
217 PrunableBitMap::<N>::get_bit_from_chunk(chunk, bit)
218 }
219
220 pub fn verify_bit_inclusion(
223 hasher: &impl Hasher<mmr::Family, Digest = D>,
224 proof: &Proof<D>,
225 chunk: &[u8; N],
226 bit: u64,
227 root: &D,
228 ) -> bool {
229 let bit_len = *proof.leaves;
230 if bit >= bit_len {
231 debug!(bit_len, bit, "tried to verify non-existent bit");
232 return false;
233 }
234
235 let chunked_leaves = Location::new(PrunableBitMap::<N>::to_chunk_index(bit_len) as u64);
237 let mut mmr_proof = Proof {
238 leaves: chunked_leaves,
239 digests: proof.digests.clone(),
240 };
241
242 let loc = Location::new(PrunableBitMap::<N>::to_chunk_index(bit) as u64);
243 if bit_len.is_multiple_of(Self::CHUNK_SIZE_BITS) {
244 return mmr_proof.verify_element_inclusion(hasher, chunk, loc, root);
245 }
246
247 if proof.digests.is_empty() {
248 debug!("proof has no digests");
249 return false;
250 }
251 let last_digest = mmr_proof.digests.pop().unwrap();
252
253 if chunked_leaves == loc {
254 if !mmr_proof.digests.is_empty() {
258 debug!(
259 digests = mmr_proof.digests.len() + 1,
260 "proof over partial chunk should have exactly 1 digest"
261 );
262 return false;
263 }
264 let last_chunk_digest = hasher.digest(chunk);
265 let next_bit = bit_len % Self::CHUNK_SIZE_BITS;
266 let reconstructed_root =
267 partial_chunk_root::<_, N>(hasher, &last_digest, next_bit, &last_chunk_digest);
268 return reconstructed_root == *root;
269 };
270
271 let mmr_root = match mmr_proof.reconstruct_root(hasher, &[chunk], loc) {
274 Ok(root) => root,
275 Err(error) => {
276 debug!(error = ?error, "invalid proof input");
277 return false;
278 }
279 };
280
281 let next_bit = bit_len % Self::CHUNK_SIZE_BITS;
282 let reconstructed_root =
283 partial_chunk_root::<_, N>(hasher, &mmr_root, next_bit, &last_digest);
284
285 reconstructed_root == *root
286 }
287}
288
289impl<E: Context, D: Digest, const N: usize> MerkleizedBitMap<E, D, N> {
290 pub async fn init(
297 context: E,
298 partition: &str,
299 pool: Option<ThreadPool>,
300 hasher: &impl Hasher<mmr::Family, Digest = D>,
301 ) -> Result<Self, Error> {
302 let metadata_cfg = MConfig {
303 partition: partition.into(),
304 codec_config: ((0..).into(), ()),
305 };
306 let metadata =
307 Metadata::<_, U64, Vec<u8>>::init(context.with_label("metadata"), metadata_cfg).await?;
308
309 let key: U64 = U64::new(PRUNED_CHUNKS_PREFIX, 0);
310 let pruned_chunks = match metadata.get(&key) {
311 Some(bytes) => u64::from_be_bytes(bytes.as_slice().try_into().map_err(|_| {
312 error!("pruned chunks value not a valid u64");
313 Error::DataCorrupted("pruned chunks value not a valid u64")
314 })?),
315 None => {
316 warn!("bitmap metadata does not contain pruned chunks, initializing as empty");
317 0
318 }
319 } as usize;
320 if pruned_chunks == 0 {
321 let mmr = Mmr::new(hasher);
322 let cached_root = *mmr.root();
323 return Ok(Self {
324 bitmap: PrunableBitMap::new(),
325 authenticated_len: 0,
326 mmr,
327 pool,
328 metadata,
329 state: Merkleized { root: cached_root },
330 });
331 }
332 let pruned_loc = Location::new(pruned_chunks as u64);
333 if !pruned_loc.is_valid() {
334 return Err(Error::DataCorrupted("pruned chunks exceeds MAX_LEAVES"));
335 }
336
337 let mut pinned_nodes = Vec::new();
338 for (index, pos) in mmr::Family::nodes_to_pin(pruned_loc).enumerate() {
339 let Some(bytes) = metadata.get(&U64::new(NODE_PREFIX, index as u64)) else {
340 error!(?pruned_loc, ?pos, "missing pinned node");
341 return Err(Error::MissingNode(pos));
342 };
343 let digest = D::decode(bytes.as_ref());
344 let Ok(digest) = digest else {
345 error!(?pruned_loc, ?pos, "could not convert node bytes to digest");
346 return Err(Error::MissingNode(pos));
347 };
348 pinned_nodes.push(digest);
349 }
350
351 let mmr = Mmr::init(
352 Config {
353 nodes: Vec::new(),
354 pruning_boundary: Location::new(pruned_chunks as u64),
355 pinned_nodes,
356 },
357 hasher,
358 )?;
359
360 let bitmap = PrunableBitMap::new_with_pruned_chunks(pruned_chunks)
361 .expect("pruned_chunks should never overflow");
362 let cached_root = *mmr.root();
363 Ok(Self {
364 bitmap,
365 authenticated_len: pruned_chunks,
367 mmr,
368 pool,
369 metadata,
370 state: Merkleized { root: cached_root },
371 })
372 }
373
374 pub fn get_node(&self, position: Position) -> Option<D> {
375 self.mmr.get_node(position)
376 }
377
378 pub async fn write_pruned(&mut self) -> Result<(), Error> {
382 self.metadata.clear();
383
384 let key = U64::new(PRUNED_CHUNKS_PREFIX, 0);
386 self.metadata
387 .put(key, self.bitmap.pruned_chunks().to_be_bytes().to_vec());
388
389 let pruned_loc = Location::new(self.bitmap.pruned_chunks() as u64);
391 assert!(
392 pruned_loc.is_valid(),
393 "expected valid location from pruned_chunks"
394 );
395 for (i, digest) in mmr::Family::nodes_to_pin(pruned_loc).enumerate() {
396 let digest = self.mmr.get_node_unchecked(digest);
397 let key = U64::new(NODE_PREFIX, i as u64);
398 self.metadata.put(key, digest.to_vec());
399 }
400
401 self.metadata.sync().await.map_err(Error::Metadata)
402 }
403
404 pub async fn destroy(self) -> Result<(), Error> {
406 self.metadata.destroy().await.map_err(Error::Metadata)
407 }
408
409 pub fn prune_to_bit(&mut self, bit: u64) -> Result<(), Error> {
417 let chunk = PrunableBitMap::<N>::to_chunk_index(bit);
418 if chunk < self.bitmap.pruned_chunks() {
419 return Ok(());
420 }
421
422 self.bitmap.prune_to_bit(bit);
424
425 self.authenticated_len = self.complete_chunks();
427
428 self.mmr.prune(Location::new(chunk as u64))?;
429 Ok(())
430 }
431
432 pub const fn root(&self) -> D {
444 self.state.root
445 }
446
447 pub async fn proof(
459 &self,
460 hasher: &impl Hasher<mmr::Family, Digest = D>,
461 bit: u64,
462 ) -> Result<(Proof<D>, [u8; N]), Error> {
463 if bit >= self.len() {
464 return Err(Error::BitOutOfBounds(bit, self.len()));
465 }
466
467 let chunk = *self.get_chunk_containing(bit);
468 let chunk_loc = Location::from(PrunableBitMap::<N>::to_chunk_index(bit));
469 let (last_chunk, next_bit) = self.bitmap.last_chunk();
470
471 if chunk_loc == self.mmr.leaves() {
472 assert!(next_bit > 0);
473 return Ok((
476 Proof {
477 leaves: Location::new(self.len()),
478 digests: vec![*self.mmr.root()],
479 },
480 chunk,
481 ));
482 }
483
484 let range = chunk_loc..chunk_loc + 1;
485 let mut proof = verification::range_proof(hasher, &self.mmr, range).await?;
486 proof.leaves = Location::new(self.len());
487 if next_bit == Self::CHUNK_SIZE_BITS {
488 return Ok((proof, chunk));
490 }
491
492 let last_chunk_digest = hasher.digest(last_chunk);
495 proof.digests.push(last_chunk_digest);
496
497 Ok((proof, chunk))
498 }
499
500 pub fn into_dirty(self) -> UnmerkleizedBitMap<E, D, N> {
502 UnmerkleizedBitMap {
503 bitmap: self.bitmap,
504 authenticated_len: self.authenticated_len,
505 mmr: self.mmr,
506 pool: self.pool,
507 state: Unmerkleized {
508 dirty_chunks: HashSet::new(),
509 },
510 metadata: self.metadata,
511 }
512 }
513}
514
515impl<E: Context, D: Digest, const N: usize> UnmerkleizedBitMap<E, D, N> {
516 pub fn push(&mut self, bit: bool) {
522 self.bitmap.push(bit);
523 }
524
525 pub fn set_bit(&mut self, bit: u64, value: bool) {
531 self.bitmap.set_bit(bit, value);
533
534 let chunk = PrunableBitMap::<N>::to_chunk_index(bit);
536 if chunk < self.authenticated_len {
537 self.state.dirty_chunks.insert(chunk);
538 }
539 }
540
541 pub fn dirty_chunks(&self) -> Vec<Location> {
543 let mut chunks: Vec<Location> = self
544 .state
545 .dirty_chunks
546 .iter()
547 .map(|&chunk| Location::new(chunk as u64))
548 .collect();
549
550 for i in self.authenticated_len..self.complete_chunks() {
552 chunks.push(Location::new(i as u64));
553 }
554
555 chunks
556 }
557
558 pub fn merkleize(
560 mut self,
561 hasher: &impl Hasher<mmr::Family, Digest = D>,
562 ) -> Result<MerkleizedBitMap<E, D, N>, Error> {
563 let mut batch = self.mmr.new_batch().with_pool(self.pool.clone());
565 let start = self.authenticated_len;
566 let end = self.complete_chunks();
567 for i in start..end {
568 batch = batch.add(hasher, self.bitmap.get_chunk(i));
569 }
570 self.authenticated_len = end;
571
572 let dirty: Vec<(Location, D)> = {
574 let updates: Vec<(Location, &[u8; N])> = self
575 .state
576 .dirty_chunks
577 .iter()
578 .map(|&chunk| {
579 let loc = Location::new(chunk as u64);
580 (loc, self.bitmap.get_chunk(chunk))
581 })
582 .collect();
583
584 match self.pool.as_ref() {
585 Some(pool) if updates.len() >= MIN_TO_PARALLELIZE => pool.install(|| {
586 updates
587 .par_iter()
588 .map_init(
589 || hasher.clone(),
590 |h, &(loc, chunk)| {
591 let pos = Position::try_from(loc).unwrap();
592 (loc, h.leaf_digest(pos, chunk.as_ref()))
593 },
594 )
595 .collect()
596 }),
597 _ => updates
598 .iter()
599 .map(|&(loc, chunk)| {
600 let pos = Position::try_from(loc).unwrap();
601 (loc, hasher.leaf_digest(pos, chunk.as_ref()))
602 })
603 .collect(),
604 }
605 };
606 batch = batch.update_leaf_batched(&dirty)?;
607
608 let batch = batch.merkleize(&self.mmr, hasher);
610 self.mmr.apply_batch(&batch)?;
611
612 let mmr_root = *self.mmr.root();
614 let cached_root = if self.bitmap.is_chunk_aligned() {
615 mmr_root
616 } else {
617 let (last_chunk, next_bit) = self.bitmap.last_chunk();
618 let last_chunk_digest = hasher.digest(last_chunk);
619 partial_chunk_root::<_, N>(hasher, &mmr_root, next_bit, &last_chunk_digest)
620 };
621
622 Ok(MerkleizedBitMap {
623 bitmap: self.bitmap,
624 authenticated_len: self.authenticated_len,
625 mmr: self.mmr,
626 pool: self.pool,
627 metadata: self.metadata,
628 state: Merkleized { root: cached_root },
629 })
630 }
631}
632
633impl<E: Context, D: Digest, const N: usize> Storage<mmr::Family> for MerkleizedBitMap<E, D, N> {
634 type Digest = D;
635
636 async fn size(&self) -> Position {
637 self.size()
638 }
639
640 async fn get_node(&self, position: Position) -> Result<Option<D>, Error> {
641 Ok(self.get_node(position))
642 }
643}
644
645#[cfg(test)]
646mod tests {
647 use super::*;
648 use commonware_codec::FixedSize;
649 use commonware_cryptography::{sha256, Hasher, Sha256};
650 use commonware_macros::test_traced;
651 use commonware_runtime::{deterministic, Metrics, Runner as _};
652 use mmr::StandardHasher;
653
654 const SHA256_SIZE: usize = sha256::Digest::SIZE;
655
656 type TestContext = deterministic::Context;
657 type TestMerkleizedBitMap<const N: usize> = MerkleizedBitMap<TestContext, sha256::Digest, N>;
658
659 impl<E: Context, D: Digest, const N: usize> UnmerkleizedBitMap<E, D, N> {
660 fn push_byte(&mut self, byte: u8) {
668 self.bitmap.push_byte(byte);
669 }
670
671 fn push_chunk(&mut self, chunk: &[u8; N]) {
679 self.bitmap.push_chunk(chunk);
680 }
681 }
682
683 fn test_chunk<const N: usize>(s: &[u8]) -> [u8; N] {
684 assert_eq!(N % 32, 0);
685 let mut vec: Vec<u8> = Vec::new();
686 for _ in 0..N / 32 {
687 vec.extend(Sha256::hash(s).iter());
688 }
689
690 vec.try_into().unwrap()
691 }
692
693 #[test_traced]
694 fn test_bitmap_verify_empty_proof() {
695 let executor = deterministic::Runner::default();
696 executor.start(|_context| async move {
697 let hasher = StandardHasher::<Sha256>::new();
698 let proof = Proof {
699 leaves: Location::new(100),
700 digests: Vec::new(),
701 };
702 assert!(
703 !TestMerkleizedBitMap::<SHA256_SIZE>::verify_bit_inclusion(
704 &hasher,
705 &proof,
706 &[0u8; SHA256_SIZE],
707 0,
708 &Sha256::fill(0x00),
709 ),
710 "proof without digests shouldn't verify or panic"
711 );
712 });
713 }
714
715 #[test_traced]
716 fn test_bitmap_empty_then_one() {
717 let executor = deterministic::Runner::default();
718 executor.start(|context| async move {
719 let hasher = StandardHasher::<Sha256>::new();
720 let mut bitmap: TestMerkleizedBitMap<SHA256_SIZE> =
721 TestMerkleizedBitMap::init(context.with_label("bitmap"), "test", None, &hasher)
722 .await
723 .unwrap();
724 assert_eq!(bitmap.len(), 0);
725 assert_eq!(bitmap.bitmap.pruned_chunks(), 0);
726 bitmap.prune_to_bit(0).unwrap();
727 assert_eq!(bitmap.bitmap.pruned_chunks(), 0);
728
729 let root = bitmap.root();
731 let mut dirty = bitmap.into_dirty();
732 dirty.push(true);
733 bitmap = dirty.merkleize(&hasher).unwrap();
734 let new_root = bitmap.root();
736 assert_ne!(root, new_root);
737 let root = new_root;
738 bitmap.prune_to_bit(1).unwrap();
739 assert_eq!(bitmap.len(), 1);
740 assert_ne!(bitmap.last_chunk().0, &[0u8; SHA256_SIZE]);
741 assert_eq!(bitmap.last_chunk().1, 1);
742 assert_eq!(bitmap.bitmap.pruned_chunks(), 0);
744 assert_eq!(root, bitmap.root());
745
746 let mut dirty = bitmap.into_dirty();
748 for i in 0..(TestMerkleizedBitMap::<SHA256_SIZE>::CHUNK_SIZE_BITS - 1) {
749 dirty.push(i % 2 != 0);
750 }
751 bitmap = dirty.merkleize(&hasher).unwrap();
752 assert_eq!(bitmap.len(), 256);
753 assert_ne!(root, bitmap.root());
754 let root = bitmap.root();
755
756 let (proof, chunk) = bitmap.proof(&hasher, 0).await.unwrap();
758 assert!(
759 TestMerkleizedBitMap::<SHA256_SIZE>::verify_bit_inclusion(
760 &hasher, &proof, &chunk, 255, &root
761 ),
762 "failed to prove bit in only chunk"
763 );
764 assert!(
766 !TestMerkleizedBitMap::<SHA256_SIZE>::verify_bit_inclusion(
767 &hasher, &proof, &chunk, 256, &root
768 ),
769 "should not be able to prove bit outside of chunk"
770 );
771
772 bitmap.prune_to_bit(256).unwrap();
774 assert_eq!(bitmap.len(), 256);
775 assert_eq!(bitmap.bitmap.pruned_chunks(), 1);
776 assert_eq!(bitmap.bitmap.pruned_bits(), 256);
777 assert_eq!(root, bitmap.root());
778
779 bitmap.prune_to_bit(10).unwrap();
781 assert_eq!(root, bitmap.root());
782 });
783 }
784
785 #[test_traced]
786 fn test_bitmap_building() {
787 let executor = deterministic::Runner::default();
790 executor.start(|context| async move {
791 let test_chunk = test_chunk(b"test");
792 let hasher: StandardHasher<Sha256> = StandardHasher::new();
793
794 let bitmap: TestMerkleizedBitMap<SHA256_SIZE> =
796 TestMerkleizedBitMap::init(context.with_label("bitmap1"), "test1", None, &hasher)
797 .await
798 .unwrap();
799 let mut dirty = bitmap.into_dirty();
800 dirty.push_chunk(&test_chunk);
801 for b in test_chunk {
802 for j in 0..8 {
803 let mask = 1 << j;
804 let bit = (b & mask) != 0;
805 dirty.push(bit);
806 }
807 }
808 assert_eq!(dirty.len(), 256 * 2);
809
810 let bitmap = dirty.merkleize(&hasher).unwrap();
811 let root = bitmap.root();
812 let inner_root = *bitmap.mmr.root();
813 assert_eq!(root, inner_root);
814
815 {
816 let bitmap: TestMerkleizedBitMap<SHA256_SIZE> = TestMerkleizedBitMap::init(
819 context.with_label("bitmap2"),
820 "test2",
821 None,
822 &hasher,
823 )
824 .await
825 .unwrap();
826 let mut dirty = bitmap.into_dirty();
827 dirty.push_chunk(&test_chunk);
828 dirty.push_chunk(&test_chunk);
829 let bitmap = dirty.merkleize(&hasher).unwrap();
830 let same_root = bitmap.root();
831 assert_eq!(root, same_root);
832 }
833 {
834 let bitmap: TestMerkleizedBitMap<SHA256_SIZE> = TestMerkleizedBitMap::init(
836 context.with_label("bitmap3"),
837 "test3",
838 None,
839 &hasher,
840 )
841 .await
842 .unwrap();
843 let mut dirty = bitmap.into_dirty();
844 dirty.push_chunk(&test_chunk);
845 for b in test_chunk {
846 dirty.push_byte(b);
847 }
848 let bitmap = dirty.merkleize(&hasher).unwrap();
849 let same_root = bitmap.root();
850 assert_eq!(root, same_root);
851 }
852 });
853 }
854
855 #[test_traced]
856 #[should_panic(expected = "cannot add chunk")]
857 fn test_bitmap_build_chunked_panic() {
858 let executor = deterministic::Runner::default();
859 executor.start(|context| async move {
860 let hasher: StandardHasher<Sha256> = StandardHasher::new();
861 let bitmap: TestMerkleizedBitMap<SHA256_SIZE> =
862 TestMerkleizedBitMap::init(context.with_label("bitmap"), "test", None, &hasher)
863 .await
864 .unwrap();
865 let mut dirty = bitmap.into_dirty();
866 dirty.push_chunk(&test_chunk(b"test"));
867 dirty.push(true);
868 dirty.push_chunk(&test_chunk(b"panic"));
869 });
870 }
871
872 #[test_traced]
873 #[should_panic(expected = "cannot add byte")]
874 fn test_bitmap_build_byte_panic() {
875 let executor = deterministic::Runner::default();
876 executor.start(|context| async move {
877 let hasher: StandardHasher<Sha256> = StandardHasher::new();
878 let bitmap: TestMerkleizedBitMap<SHA256_SIZE> =
879 TestMerkleizedBitMap::init(context.with_label("bitmap"), "test", None, &hasher)
880 .await
881 .unwrap();
882 let mut dirty = bitmap.into_dirty();
883 dirty.push_chunk(&test_chunk(b"test"));
884 dirty.push(true);
885 dirty.push_byte(0x01);
886 });
887 }
888
889 #[test_traced]
890 #[should_panic(expected = "out of bounds")]
891 fn test_bitmap_get_out_of_bounds_bit_panic() {
892 let executor = deterministic::Runner::default();
893 executor.start(|context| async move {
894 let hasher: StandardHasher<Sha256> = StandardHasher::new();
895 let bitmap: TestMerkleizedBitMap<SHA256_SIZE> =
896 TestMerkleizedBitMap::init(context.with_label("bitmap"), "test", None, &hasher)
897 .await
898 .unwrap();
899 let mut dirty = bitmap.into_dirty();
900 dirty.push_chunk(&test_chunk(b"test"));
901 dirty.get_bit(256);
902 });
903 }
904
905 #[test_traced]
906 #[should_panic(expected = "pruned")]
907 fn test_bitmap_get_pruned_bit_panic() {
908 let executor = deterministic::Runner::default();
909 executor.start(|context| async move {
910 let hasher: StandardHasher<Sha256> = StandardHasher::new();
911 let bitmap: TestMerkleizedBitMap<SHA256_SIZE> =
912 TestMerkleizedBitMap::init(context.with_label("bitmap"), "test", None, &hasher)
913 .await
914 .unwrap();
915 let mut dirty = bitmap.into_dirty();
916 dirty.push_chunk(&test_chunk(b"test"));
917 dirty.push_chunk(&test_chunk(b"test2"));
918 let mut bitmap = dirty.merkleize(&hasher).unwrap();
919
920 bitmap.prune_to_bit(256).unwrap();
921 bitmap.get_bit(255);
922 });
923 }
924
925 #[test_traced]
926 fn test_bitmap_root_boundaries() {
927 let executor = deterministic::Runner::default();
928 executor.start(|context| async move {
929 let hasher = StandardHasher::<Sha256>::new();
931 let bitmap: TestMerkleizedBitMap<SHA256_SIZE> =
932 TestMerkleizedBitMap::init(context.with_label("bitmap"), "test", None, &hasher)
933 .await
934 .unwrap();
935 let mut dirty = bitmap.into_dirty();
936 dirty.push_chunk(&test_chunk(b"test"));
937 dirty.push_chunk(&test_chunk(b"test2"));
938 let mut bitmap = dirty.merkleize(&hasher).unwrap();
939
940 let root = bitmap.root();
941
942 let mut dirty = bitmap.into_dirty();
944 dirty.push(true);
945 bitmap = dirty.merkleize(&hasher).unwrap();
946 let new_root = bitmap.root();
947 assert_ne!(root, new_root);
948 assert_eq!(bitmap.mmr.size(), 3); for _ in 0..(SHA256_SIZE * 8 - 1) {
952 let mut dirty = bitmap.into_dirty();
953 dirty.push(false);
954 bitmap = dirty.merkleize(&hasher).unwrap();
955 let newer_root = bitmap.root();
956 assert_ne!(new_root, newer_root);
958 }
959 assert_eq!(bitmap.mmr.size(), 4); let mut dirty = bitmap.into_dirty();
963 dirty.push(false);
964 assert_eq!(dirty.len(), 256 * 3 + 1);
965 bitmap = dirty.merkleize(&hasher).unwrap();
966 let newer_root = bitmap.root();
967 assert_ne!(new_root, newer_root);
968
969 bitmap.prune_to_bit(bitmap.len()).unwrap();
971 assert_eq!(bitmap.bitmap.pruned_chunks(), 3);
972 assert_eq!(bitmap.len(), 256 * 3 + 1);
973 assert_eq!(newer_root, bitmap.root());
974 });
975 }
976
977 #[test_traced]
978 fn test_bitmap_get_set_bits() {
979 let executor = deterministic::Runner::default();
980 executor.start(|context| async move {
981 let hasher = StandardHasher::<Sha256>::new();
983 let bitmap: TestMerkleizedBitMap<SHA256_SIZE> =
984 TestMerkleizedBitMap::init(context.with_label("bitmap"), "test", None, &hasher)
985 .await
986 .unwrap();
987 let mut dirty = bitmap.into_dirty();
988 dirty.push_chunk(&test_chunk(b"test"));
989 dirty.push_chunk(&test_chunk(b"test2"));
990 dirty.push_chunk(&test_chunk(b"test3"));
991 dirty.push_chunk(&test_chunk(b"test4"));
992 dirty.push_byte(0xF1);
994 dirty.push(true);
995 dirty.push(false);
996 dirty.push(true);
997
998 let mut bitmap = dirty.merkleize(&hasher).unwrap();
999 let root = bitmap.root();
1000
1001 for bit_pos in (0..bitmap.len()).rev() {
1004 let bit = bitmap.get_bit(bit_pos);
1005 let mut dirty = bitmap.into_dirty();
1006 dirty.set_bit(bit_pos, !bit);
1007 bitmap = dirty.merkleize(&hasher).unwrap();
1008 let new_root = bitmap.root();
1009 assert_ne!(root, new_root, "failed at bit {bit_pos}");
1010 let mut dirty = bitmap.into_dirty();
1012 dirty.set_bit(bit_pos, bit);
1013 bitmap = dirty.merkleize(&hasher).unwrap();
1014 let new_root = bitmap.root();
1015 assert_eq!(root, new_root);
1016 }
1017
1018 let start_bit = (SHA256_SIZE * 8 * 2) as u64;
1020 bitmap.prune_to_bit(start_bit).unwrap();
1021 for bit_pos in (start_bit..bitmap.len()).rev() {
1022 let bit = bitmap.get_bit(bit_pos);
1023 let mut dirty = bitmap.into_dirty();
1024 dirty.set_bit(bit_pos, !bit);
1025 bitmap = dirty.merkleize(&hasher).unwrap();
1026 let new_root = bitmap.root();
1027 assert_ne!(root, new_root, "failed at bit {bit_pos}");
1028 let mut dirty = bitmap.into_dirty();
1030 dirty.set_bit(bit_pos, bit);
1031 bitmap = dirty.merkleize(&hasher).unwrap();
1032 let new_root = bitmap.root();
1033 assert_eq!(root, new_root);
1034 }
1035 });
1036 }
1037
1038 fn flip_bit<const N: usize>(bit: u64, chunk: &[u8; N]) -> [u8; N] {
1039 let byte = PrunableBitMap::<N>::chunk_byte_offset(bit);
1040 let mask = PrunableBitMap::<N>::chunk_byte_bitmask(bit);
1041 let mut tmp = chunk.to_vec();
1042 tmp[byte] ^= mask;
1043 tmp.try_into().unwrap()
1044 }
1045
1046 #[test_traced]
1047 fn test_bitmap_mmr_proof_verification() {
1048 test_bitmap_mmr_proof_verification_n::<32>();
1049 test_bitmap_mmr_proof_verification_n::<64>();
1050 }
1051
1052 fn test_bitmap_mmr_proof_verification_n<const N: usize>() {
1053 let executor = deterministic::Runner::default();
1054 executor.start(|context| async move {
1055 let hasher = StandardHasher::<Sha256>::new();
1057 let bitmap: MerkleizedBitMap<TestContext, sha256::Digest, N> =
1058 MerkleizedBitMap::init(context.with_label("bitmap"), "test", None, &hasher)
1059 .await
1060 .unwrap();
1061 let mut dirty = bitmap.into_dirty();
1062 for i in 0u32..10 {
1063 dirty.push_chunk(&test_chunk(format!("test{i}").as_bytes()));
1064 }
1065 dirty.push_byte(0xA6);
1067 dirty.push(true);
1068 dirty.push(false);
1069 dirty.push(true);
1070 dirty.push(true);
1071 dirty.push(false);
1072
1073 let mut bitmap = dirty.merkleize(&hasher).unwrap();
1074 let root = bitmap.root();
1075
1076 for prune_to_bit in (0..bitmap.len()).step_by(251) {
1079 assert_eq!(bitmap.root(), root);
1080 bitmap.prune_to_bit(prune_to_bit).unwrap();
1081 for i in prune_to_bit..bitmap.len() {
1082 let (proof, chunk) = bitmap.proof(&hasher, i).await.unwrap();
1083
1084 assert!(
1086 MerkleizedBitMap::<TestContext, _, N>::verify_bit_inclusion(
1087 &hasher, &proof, &chunk, i, &root
1088 ),
1089 "failed to prove bit {i}",
1090 );
1091
1092 let corrupted = flip_bit(i, &chunk);
1094 assert!(
1095 !MerkleizedBitMap::<TestContext, _, N>::verify_bit_inclusion(
1096 &hasher, &proof, &corrupted, i, &root
1097 ),
1098 "proving bit {i} after flipping should have failed",
1099 );
1100 }
1101 }
1102 })
1103 }
1104
1105 #[test_traced]
1106 fn test_bitmap_persistence() {
1107 const PARTITION: &str = "bitmap-test";
1108 const FULL_CHUNK_COUNT: usize = 100;
1109
1110 let executor = deterministic::Runner::default();
1111 executor.start(|context| async move {
1112 let hasher = StandardHasher::<Sha256>::new();
1113 let bitmap: TestMerkleizedBitMap<SHA256_SIZE> =
1115 TestMerkleizedBitMap::init(context.with_label("initial"), PARTITION, None, &hasher)
1116 .await
1117 .unwrap();
1118 assert_eq!(bitmap.len(), 0);
1119
1120 let mut dirty = bitmap.into_dirty();
1122 for i in 0..FULL_CHUNK_COUNT {
1123 dirty.push_chunk(&test_chunk(format!("test{i}").as_bytes()));
1124 }
1125 let mut bitmap = dirty.merkleize(&hasher).unwrap();
1126 let chunk_aligned_root = bitmap.root();
1127
1128 let mut dirty = bitmap.into_dirty();
1130 dirty.push_byte(0xA6);
1131 dirty.push(true);
1132 dirty.push(false);
1133 dirty.push(true);
1134 bitmap = dirty.merkleize(&hasher).unwrap();
1135 let root = bitmap.root();
1136
1137 for i in (10..=FULL_CHUNK_COUNT).step_by(10) {
1139 bitmap
1140 .prune_to_bit(
1141 (i * TestMerkleizedBitMap::<SHA256_SIZE>::CHUNK_SIZE_BITS as usize) as u64,
1142 )
1143 .unwrap();
1144 bitmap.write_pruned().await.unwrap();
1145 bitmap = TestMerkleizedBitMap::init(
1146 context.with_label(&format!("restore_{i}")),
1147 PARTITION,
1148 None,
1149 &hasher,
1150 )
1151 .await
1152 .unwrap();
1153 let _ = bitmap.root();
1154
1155 let mut dirty = bitmap.into_dirty();
1157 for j in i..FULL_CHUNK_COUNT {
1158 dirty.push_chunk(&test_chunk(format!("test{j}").as_bytes()));
1159 }
1160 assert_eq!(dirty.bitmap.pruned_chunks(), i);
1161 assert_eq!(dirty.len(), FULL_CHUNK_COUNT as u64 * 256);
1162 bitmap = dirty.merkleize(&hasher).unwrap();
1163 assert_eq!(bitmap.root(), chunk_aligned_root);
1164
1165 let mut dirty = bitmap.into_dirty();
1167 dirty.push_byte(0xA6);
1168 dirty.push(true);
1169 dirty.push(false);
1170 dirty.push(true);
1171 bitmap = dirty.merkleize(&hasher).unwrap();
1172 assert_eq!(bitmap.root(), root);
1173 }
1174 });
1175 }
1176
1177 #[test_traced]
1178 fn test_bitmap_proof_out_of_bounds() {
1179 let executor = deterministic::Runner::default();
1180 executor.start(|context| async move {
1181 let hasher = StandardHasher::<Sha256>::new();
1182 let bitmap: TestMerkleizedBitMap<SHA256_SIZE> =
1183 TestMerkleizedBitMap::init(context.with_label("bitmap"), "test", None, &hasher)
1184 .await
1185 .unwrap();
1186 let mut dirty = bitmap.into_dirty();
1187 dirty.push_chunk(&test_chunk(b"test"));
1188 let bitmap = dirty.merkleize(&hasher).unwrap();
1189
1190 let result = bitmap.proof(&hasher, 256).await;
1192 assert!(matches!(result, Err(Error::BitOutOfBounds(offset, size))
1193 if offset == 256 && size == 256));
1194
1195 let result = bitmap.proof(&hasher, 1000).await;
1196 assert!(matches!(result, Err(Error::BitOutOfBounds(offset, size))
1197 if offset == 1000 && size == 256));
1198
1199 assert!(bitmap.proof(&hasher, 0).await.is_ok());
1201 assert!(bitmap.proof(&hasher, 255).await.is_ok());
1202 });
1203 }
1204}