1use crate::{
14 metadata::{Config as MConfig, Metadata},
15 mmr::{
16 batch::UnmerkleizedBatch,
17 hasher::Hasher as MmrHasher,
18 iterator::nodes_to_pin,
19 mem::{Config, Mmr, MIN_TO_PARALLELIZE},
20 storage::Storage,
21 verification,
22 Error::{self, *},
23 Location, Position, Proof,
24 },
25};
26use commonware_codec::DecodeExt;
27use commonware_cryptography::{Digest, Hasher};
28use commonware_parallel::ThreadPool;
29use commonware_runtime::{Clock, Metrics, Storage as RStorage};
30use commonware_utils::{
31 bitmap::{BitMap as UtilsBitMap, Prunable as PrunableBitMap},
32 sequence::prefixed_u64::U64,
33};
34use rayon::prelude::*;
35use std::collections::HashSet;
36use tracing::{debug, error, warn};
37
38pub(crate) fn partial_chunk_root<H: Hasher, const N: usize>(
41 hasher: &mut H,
42 mmr_root: &H::Digest,
43 next_bit: u64,
44 last_chunk_digest: &H::Digest,
45) -> H::Digest {
46 assert!(next_bit > 0);
47 assert!(next_bit < UtilsBitMap::<N>::CHUNK_SIZE_BITS);
48 hasher.update(mmr_root);
49 hasher.update(&next_bit.to_be_bytes());
50 hasher.update(last_chunk_digest);
51 hasher.finalize()
52}
53
54mod private {
55 pub trait Sealed {}
56}
57
58pub trait State<D: Digest>: private::Sealed + Sized + Send + Sync {}
60
61pub struct Merkleized<D: Digest> {
63 root: D,
65}
66
67impl<D: Digest> private::Sealed for Merkleized<D> {}
68impl<D: Digest> State<D> for Merkleized<D> {}
69
70pub struct Unmerkleized {
72 dirty_chunks: HashSet<usize>,
79}
80
81impl private::Sealed for Unmerkleized {}
82impl<D: Digest> State<D> for Unmerkleized {}
83
84pub type MerkleizedBitMap<E, D, const N: usize> = BitMap<E, D, N, Merkleized<D>>;
86
87pub type UnmerkleizedBitMap<E, D, const N: usize> = BitMap<E, D, N, Unmerkleized>;
89
90pub struct BitMap<
108 E: Clock + RStorage + Metrics,
109 D: Digest,
110 const N: usize,
111 S: State<D> = Merkleized<D>,
112> {
113 bitmap: PrunableBitMap<N>,
115
116 authenticated_len: usize,
119
120 mmr: Mmr<D>,
130
131 pool: Option<ThreadPool>,
133
134 state: S,
136
137 metadata: Metadata<E, U64, Vec<u8>>,
139}
140
141const NODE_PREFIX: u8 = 0;
143
144const PRUNED_CHUNKS_PREFIX: u8 = 1;
146
147impl<E: Clock + RStorage + Metrics, D: Digest, const N: usize, S: State<D>> BitMap<E, D, N, S> {
148 pub const CHUNK_SIZE_BITS: u64 = PrunableBitMap::<N>::CHUNK_SIZE_BITS;
150
151 #[inline]
153 pub fn size(&self) -> Position {
154 self.mmr.size()
155 }
156
157 #[inline]
159 pub const fn len(&self) -> u64 {
160 self.bitmap.len()
161 }
162
163 #[inline]
165 pub const fn is_empty(&self) -> bool {
166 self.len() == 0
167 }
168
169 #[inline]
171 pub const fn pruned_bits(&self) -> u64 {
172 self.bitmap.pruned_bits()
173 }
174
175 #[inline]
178 fn complete_chunks(&self) -> usize {
179 let chunks_len = self.bitmap.chunks_len();
180 if self.bitmap.is_chunk_aligned() {
181 chunks_len
182 } else {
183 chunks_len.checked_sub(1).unwrap()
185 }
186 }
187
188 #[inline]
191 pub fn last_chunk(&self) -> (&[u8; N], u64) {
192 self.bitmap.last_chunk()
193 }
194
195 #[inline]
201 pub fn get_chunk_containing(&self, bit: u64) -> &[u8; N] {
202 self.bitmap.get_chunk_containing(bit)
203 }
204
205 #[inline]
211 pub fn get_bit(&self, bit: u64) -> bool {
212 self.bitmap.get_bit(bit)
213 }
214
215 #[inline]
218 pub const fn get_bit_from_chunk(chunk: &[u8; N], bit: u64) -> bool {
219 PrunableBitMap::<N>::get_bit_from_chunk(chunk, bit)
220 }
221
222 pub fn verify_bit_inclusion(
225 hasher: &mut impl MmrHasher<Digest = D>,
226 proof: &Proof<D>,
227 chunk: &[u8; N],
228 bit: u64,
229 root: &D,
230 ) -> bool {
231 let bit_len = *proof.leaves;
232 if bit >= bit_len {
233 debug!(bit_len, bit, "tried to verify non-existent bit");
234 return false;
235 }
236
237 let chunked_leaves = Location::new(PrunableBitMap::<N>::to_chunk_index(bit_len) as u64);
239 let mut mmr_proof = Proof {
240 leaves: chunked_leaves,
241 digests: proof.digests.clone(),
242 };
243
244 let loc = Location::new(PrunableBitMap::<N>::to_chunk_index(bit) as u64);
245 if bit_len.is_multiple_of(Self::CHUNK_SIZE_BITS) {
246 return mmr_proof.verify_element_inclusion(hasher, chunk, loc, root);
247 }
248
249 if proof.digests.is_empty() {
250 debug!("proof has no digests");
251 return false;
252 }
253 let last_digest = mmr_proof.digests.pop().unwrap();
254
255 if chunked_leaves == loc {
256 if !mmr_proof.digests.is_empty() {
260 debug!(
261 digests = mmr_proof.digests.len() + 1,
262 "proof over partial chunk should have exactly 1 digest"
263 );
264 return false;
265 }
266 let last_chunk_digest = hasher.digest(chunk);
267 let next_bit = bit_len % Self::CHUNK_SIZE_BITS;
268 let reconstructed_root = partial_chunk_root::<_, N>(
269 hasher.inner(),
270 &last_digest,
271 next_bit,
272 &last_chunk_digest,
273 );
274 return reconstructed_root == *root;
275 };
276
277 let mmr_root = match mmr_proof.reconstruct_root(hasher, &[chunk], loc) {
280 Ok(root) => root,
281 Err(error) => {
282 debug!(error = ?error, "invalid proof input");
283 return false;
284 }
285 };
286
287 let next_bit = bit_len % Self::CHUNK_SIZE_BITS;
288 let reconstructed_root =
289 partial_chunk_root::<_, N>(hasher.inner(), &mmr_root, next_bit, &last_digest);
290
291 reconstructed_root == *root
292 }
293}
294
295impl<E: Clock + RStorage + Metrics, D: Digest, const N: usize> MerkleizedBitMap<E, D, N> {
296 pub async fn init(
303 context: E,
304 partition: &str,
305 pool: Option<ThreadPool>,
306 hasher: &mut impl MmrHasher<Digest = D>,
307 ) -> Result<Self, Error> {
308 let metadata_cfg = MConfig {
309 partition: partition.into(),
310 codec_config: ((0..).into(), ()),
311 };
312 let metadata =
313 Metadata::<_, U64, Vec<u8>>::init(context.with_label("metadata"), metadata_cfg).await?;
314
315 let key: U64 = U64::new(PRUNED_CHUNKS_PREFIX, 0);
316 let pruned_chunks = match metadata.get(&key) {
317 Some(bytes) => u64::from_be_bytes(bytes.as_slice().try_into().map_err(|_| {
318 error!("pruned chunks value not a valid u64");
319 Error::DataCorrupted("pruned chunks value not a valid u64")
320 })?),
321 None => {
322 warn!("bitmap metadata does not contain pruned chunks, initializing as empty");
323 0
324 }
325 } as usize;
326 if pruned_chunks == 0 {
327 let mmr = Mmr::new(hasher);
328 let cached_root = *mmr.root();
329 return Ok(Self {
330 bitmap: PrunableBitMap::new(),
331 authenticated_len: 0,
332 mmr,
333 pool,
334 metadata,
335 state: Merkleized { root: cached_root },
336 });
337 }
338 let mmr_size = Position::try_from(Location::new(pruned_chunks as u64))?;
339
340 let mut pinned_nodes = Vec::new();
341 for (index, pos) in nodes_to_pin(mmr_size).enumerate() {
342 let Some(bytes) = metadata.get(&U64::new(NODE_PREFIX, index as u64)) else {
343 error!(?mmr_size, ?pos, "missing pinned node");
344 return Err(MissingNode(pos));
345 };
346 let digest = D::decode(bytes.as_ref());
347 let Ok(digest) = digest else {
348 error!(?mmr_size, ?pos, "could not convert node bytes to digest");
349 return Err(MissingNode(pos));
350 };
351 pinned_nodes.push(digest);
352 }
353
354 let mmr = Mmr::init(
355 Config {
356 nodes: Vec::new(),
357 pruned_to: Location::new(pruned_chunks as u64),
358 pinned_nodes,
359 },
360 hasher,
361 )?;
362
363 let bitmap = PrunableBitMap::new_with_pruned_chunks(pruned_chunks)
364 .expect("pruned_chunks should never overflow");
365 let cached_root = *mmr.root();
366 Ok(Self {
367 bitmap,
368 authenticated_len: pruned_chunks,
370 mmr,
371 pool,
372 metadata,
373 state: Merkleized { root: cached_root },
374 })
375 }
376
377 pub fn get_node(&self, position: Position) -> Option<D> {
378 self.mmr.get_node(position)
379 }
380
381 pub async fn write_pruned(&mut self) -> Result<(), Error> {
385 self.metadata.clear();
386
387 let key = U64::new(PRUNED_CHUNKS_PREFIX, 0);
389 self.metadata
390 .put(key, self.bitmap.pruned_chunks().to_be_bytes().to_vec());
391
392 let mmr_size = Position::try_from(Location::new(self.bitmap.pruned_chunks() as u64))?;
395 for (i, digest) in nodes_to_pin(mmr_size).enumerate() {
396 let digest = self.mmr.get_node_unchecked(digest);
397 let key = U64::new(NODE_PREFIX, i as u64);
398 self.metadata.put(key, digest.to_vec());
399 }
400
401 self.metadata.sync().await.map_err(MetadataError)
402 }
403
404 pub async fn destroy(self) -> Result<(), Error> {
406 self.metadata.destroy().await.map_err(MetadataError)
407 }
408
409 pub fn prune_to_bit(&mut self, bit: u64) -> Result<(), Error> {
417 let chunk = PrunableBitMap::<N>::to_chunk_index(bit);
418 if chunk < self.bitmap.pruned_chunks() {
419 return Ok(());
420 }
421
422 self.bitmap.prune_to_bit(bit);
424
425 self.authenticated_len = self.complete_chunks();
427
428 self.mmr.prune(Location::new(chunk as u64))?;
429 Ok(())
430 }
431
432 pub const fn root(&self) -> D {
444 self.state.root
445 }
446
447 pub async fn proof(
459 &self,
460 hasher: &mut impl MmrHasher<Digest = D>,
461 bit: u64,
462 ) -> Result<(Proof<D>, [u8; N]), Error> {
463 if bit >= self.len() {
464 return Err(Error::BitOutOfBounds(bit, self.len()));
465 }
466
467 let chunk = *self.get_chunk_containing(bit);
468 let chunk_loc = Location::from(PrunableBitMap::<N>::to_chunk_index(bit));
469 let (last_chunk, next_bit) = self.bitmap.last_chunk();
470
471 if chunk_loc == self.mmr.leaves() {
472 assert!(next_bit > 0);
473 return Ok((
476 Proof {
477 leaves: Location::new(self.len()),
478 digests: vec![*self.mmr.root()],
479 },
480 chunk,
481 ));
482 }
483
484 let range = chunk_loc..chunk_loc + 1;
485 let mut proof = verification::range_proof(&self.mmr, range).await?;
486 proof.leaves = Location::new(self.len());
487 if next_bit == Self::CHUNK_SIZE_BITS {
488 return Ok((proof, chunk));
490 }
491
492 let last_chunk_digest = hasher.digest(last_chunk);
495 proof.digests.push(last_chunk_digest);
496
497 Ok((proof, chunk))
498 }
499
500 pub fn into_dirty(self) -> UnmerkleizedBitMap<E, D, N> {
502 UnmerkleizedBitMap {
503 bitmap: self.bitmap,
504 authenticated_len: self.authenticated_len,
505 mmr: self.mmr,
506 pool: self.pool,
507 state: Unmerkleized {
508 dirty_chunks: HashSet::new(),
509 },
510 metadata: self.metadata,
511 }
512 }
513}
514
515impl<E: Clock + RStorage + Metrics, D: Digest, const N: usize> UnmerkleizedBitMap<E, D, N> {
516 pub fn push(&mut self, bit: bool) {
522 self.bitmap.push(bit);
523 }
524
525 pub fn set_bit(&mut self, bit: u64, value: bool) {
531 self.bitmap.set_bit(bit, value);
533
534 let chunk = PrunableBitMap::<N>::to_chunk_index(bit);
536 if chunk < self.authenticated_len {
537 self.state.dirty_chunks.insert(chunk);
538 }
539 }
540
541 pub fn dirty_chunks(&self) -> Vec<Location> {
543 let mut chunks: Vec<Location> = self
544 .state
545 .dirty_chunks
546 .iter()
547 .map(|&chunk| Location::new(chunk as u64))
548 .collect();
549
550 for i in self.authenticated_len..self.complete_chunks() {
552 chunks.push(Location::new(i as u64));
553 }
554
555 chunks
556 }
557
558 pub fn merkleize(
560 mut self,
561 hasher: &mut impl MmrHasher<Digest = D>,
562 ) -> Result<MerkleizedBitMap<E, D, N>, Error> {
563 let mut batch = UnmerkleizedBatch::new(&self.mmr).with_pool(self.pool.clone());
565 let start = self.authenticated_len;
566 let end = self.complete_chunks();
567 for i in start..end {
568 batch.add(hasher, self.bitmap.get_chunk(i));
569 }
570 self.authenticated_len = end;
571
572 let dirty: Vec<(Location, D)> = {
574 let updates: Vec<(Location, &[u8; N])> = self
575 .state
576 .dirty_chunks
577 .iter()
578 .map(|&chunk| {
579 let loc = Location::new(chunk as u64);
580 (loc, self.bitmap.get_chunk(chunk))
581 })
582 .collect();
583
584 match self.pool.as_ref() {
585 Some(pool) if updates.len() >= MIN_TO_PARALLELIZE => pool.install(|| {
586 updates
587 .par_iter()
588 .map_init(
589 || hasher.fork(),
590 |h, &(loc, chunk)| {
591 let pos = Position::try_from(loc).unwrap();
592 (loc, h.leaf_digest(pos, chunk.as_ref()))
593 },
594 )
595 .collect()
596 }),
597 _ => updates
598 .iter()
599 .map(|&(loc, chunk)| {
600 let pos = Position::try_from(loc).unwrap();
601 (loc, hasher.leaf_digest(pos, chunk.as_ref()))
602 })
603 .collect(),
604 }
605 };
606 batch.update_leaf_batched(&dirty)?;
607
608 let changeset = batch.merkleize(hasher).finalize();
610 self.mmr.apply(changeset)?;
611
612 let mmr_root = *self.mmr.root();
614 let cached_root = if self.bitmap.is_chunk_aligned() {
615 mmr_root
616 } else {
617 let (last_chunk, next_bit) = self.bitmap.last_chunk();
618 let last_chunk_digest = hasher.digest(last_chunk);
619 partial_chunk_root::<_, N>(hasher.inner(), &mmr_root, next_bit, &last_chunk_digest)
620 };
621
622 Ok(MerkleizedBitMap {
623 bitmap: self.bitmap,
624 authenticated_len: self.authenticated_len,
625 mmr: self.mmr,
626 pool: self.pool,
627 metadata: self.metadata,
628 state: Merkleized { root: cached_root },
629 })
630 }
631}
632
633impl<E: Clock + RStorage + Metrics, D: Digest, const N: usize> Storage<D>
634 for MerkleizedBitMap<E, D, N>
635{
636 async fn size(&self) -> Position {
637 self.size()
638 }
639
640 async fn get_node(&self, position: Position) -> Result<Option<D>, Error> {
641 Ok(self.get_node(position))
642 }
643}
644
645#[cfg(test)]
646mod tests {
647 use super::*;
648 use crate::mmr::StandardHasher;
649 use commonware_codec::FixedSize;
650 use commonware_cryptography::{sha256, Hasher, Sha256};
651 use commonware_macros::test_traced;
652 use commonware_runtime::{deterministic, Metrics, Runner as _};
653
654 const SHA256_SIZE: usize = sha256::Digest::SIZE;
655
656 type TestContext = deterministic::Context;
657 type TestMerkleizedBitMap<const N: usize> = MerkleizedBitMap<TestContext, sha256::Digest, N>;
658
659 impl<E: Clock + RStorage + Metrics, D: Digest, const N: usize> UnmerkleizedBitMap<E, D, N> {
660 fn push_byte(&mut self, byte: u8) {
668 self.bitmap.push_byte(byte);
669 }
670
671 fn push_chunk(&mut self, chunk: &[u8; N]) {
679 self.bitmap.push_chunk(chunk);
680 }
681 }
682
683 fn test_chunk<const N: usize>(s: &[u8]) -> [u8; N] {
684 assert_eq!(N % 32, 0);
685 let mut vec: Vec<u8> = Vec::new();
686 for _ in 0..N / 32 {
687 vec.extend(Sha256::hash(s).iter());
688 }
689
690 vec.try_into().unwrap()
691 }
692
693 #[test_traced]
694 fn test_bitmap_verify_empty_proof() {
695 let executor = deterministic::Runner::default();
696 executor.start(|_context| async move {
697 let mut hasher = StandardHasher::<Sha256>::new();
698 let proof = Proof {
699 leaves: Location::new(100),
700 digests: Vec::new(),
701 };
702 assert!(
703 !TestMerkleizedBitMap::<SHA256_SIZE>::verify_bit_inclusion(
704 &mut hasher,
705 &proof,
706 &[0u8; SHA256_SIZE],
707 0,
708 &Sha256::fill(0x00),
709 ),
710 "proof without digests shouldn't verify or panic"
711 );
712 });
713 }
714
715 #[test_traced]
716 fn test_bitmap_empty_then_one() {
717 let executor = deterministic::Runner::default();
718 executor.start(|context| async move {
719 let mut hasher = StandardHasher::<Sha256>::new();
720 let mut bitmap: TestMerkleizedBitMap<SHA256_SIZE> =
721 TestMerkleizedBitMap::init(context.with_label("bitmap"), "test", None, &mut hasher)
722 .await
723 .unwrap();
724 assert_eq!(bitmap.len(), 0);
725 assert_eq!(bitmap.bitmap.pruned_chunks(), 0);
726 bitmap.prune_to_bit(0).unwrap();
727 assert_eq!(bitmap.bitmap.pruned_chunks(), 0);
728
729 let root = bitmap.root();
731 let mut dirty = bitmap.into_dirty();
732 dirty.push(true);
733 bitmap = dirty.merkleize(&mut hasher).unwrap();
734 let new_root = bitmap.root();
736 assert_ne!(root, new_root);
737 let root = new_root;
738 bitmap.prune_to_bit(1).unwrap();
739 assert_eq!(bitmap.len(), 1);
740 assert_ne!(bitmap.last_chunk().0, &[0u8; SHA256_SIZE]);
741 assert_eq!(bitmap.last_chunk().1, 1);
742 assert_eq!(bitmap.bitmap.pruned_chunks(), 0);
744 assert_eq!(root, bitmap.root());
745
746 let mut dirty = bitmap.into_dirty();
748 for i in 0..(TestMerkleizedBitMap::<SHA256_SIZE>::CHUNK_SIZE_BITS - 1) {
749 dirty.push(i % 2 != 0);
750 }
751 bitmap = dirty.merkleize(&mut hasher).unwrap();
752 assert_eq!(bitmap.len(), 256);
753 assert_ne!(root, bitmap.root());
754 let root = bitmap.root();
755
756 let (proof, chunk) = bitmap.proof(&mut hasher, 0).await.unwrap();
758 assert!(
759 TestMerkleizedBitMap::<SHA256_SIZE>::verify_bit_inclusion(
760 &mut hasher,
761 &proof,
762 &chunk,
763 255,
764 &root
765 ),
766 "failed to prove bit in only chunk"
767 );
768 assert!(
770 !TestMerkleizedBitMap::<SHA256_SIZE>::verify_bit_inclusion(
771 &mut hasher,
772 &proof,
773 &chunk,
774 256,
775 &root
776 ),
777 "should not be able to prove bit outside of chunk"
778 );
779
780 bitmap.prune_to_bit(256).unwrap();
782 assert_eq!(bitmap.len(), 256);
783 assert_eq!(bitmap.bitmap.pruned_chunks(), 1);
784 assert_eq!(bitmap.bitmap.pruned_bits(), 256);
785 assert_eq!(root, bitmap.root());
786
787 bitmap.prune_to_bit(10).unwrap();
789 assert_eq!(root, bitmap.root());
790 });
791 }
792
793 #[test_traced]
794 fn test_bitmap_building() {
795 let executor = deterministic::Runner::default();
798 executor.start(|context| async move {
799 let test_chunk = test_chunk(b"test");
800 let mut hasher: StandardHasher<Sha256> = StandardHasher::new();
801
802 let bitmap: TestMerkleizedBitMap<SHA256_SIZE> = TestMerkleizedBitMap::init(
804 context.with_label("bitmap1"),
805 "test1",
806 None,
807 &mut hasher,
808 )
809 .await
810 .unwrap();
811 let mut dirty = bitmap.into_dirty();
812 dirty.push_chunk(&test_chunk);
813 for b in test_chunk {
814 for j in 0..8 {
815 let mask = 1 << j;
816 let bit = (b & mask) != 0;
817 dirty.push(bit);
818 }
819 }
820 assert_eq!(dirty.len(), 256 * 2);
821
822 let bitmap = dirty.merkleize(&mut hasher).unwrap();
823 let root = bitmap.root();
824 let inner_root = *bitmap.mmr.root();
825 assert_eq!(root, inner_root);
826
827 {
828 let bitmap: TestMerkleizedBitMap<SHA256_SIZE> = TestMerkleizedBitMap::init(
831 context.with_label("bitmap2"),
832 "test2",
833 None,
834 &mut hasher,
835 )
836 .await
837 .unwrap();
838 let mut dirty = bitmap.into_dirty();
839 dirty.push_chunk(&test_chunk);
840 dirty.push_chunk(&test_chunk);
841 let bitmap = dirty.merkleize(&mut hasher).unwrap();
842 let same_root = bitmap.root();
843 assert_eq!(root, same_root);
844 }
845 {
846 let bitmap: TestMerkleizedBitMap<SHA256_SIZE> = TestMerkleizedBitMap::init(
848 context.with_label("bitmap3"),
849 "test3",
850 None,
851 &mut hasher,
852 )
853 .await
854 .unwrap();
855 let mut dirty = bitmap.into_dirty();
856 dirty.push_chunk(&test_chunk);
857 for b in test_chunk {
858 dirty.push_byte(b);
859 }
860 let bitmap = dirty.merkleize(&mut hasher).unwrap();
861 let same_root = bitmap.root();
862 assert_eq!(root, same_root);
863 }
864 });
865 }
866
867 #[test_traced]
868 #[should_panic(expected = "cannot add chunk")]
869 fn test_bitmap_build_chunked_panic() {
870 let executor = deterministic::Runner::default();
871 executor.start(|context| async move {
872 let mut hasher: StandardHasher<Sha256> = StandardHasher::new();
873 let bitmap: TestMerkleizedBitMap<SHA256_SIZE> =
874 TestMerkleizedBitMap::init(context.with_label("bitmap"), "test", None, &mut hasher)
875 .await
876 .unwrap();
877 let mut dirty = bitmap.into_dirty();
878 dirty.push_chunk(&test_chunk(b"test"));
879 dirty.push(true);
880 dirty.push_chunk(&test_chunk(b"panic"));
881 });
882 }
883
884 #[test_traced]
885 #[should_panic(expected = "cannot add byte")]
886 fn test_bitmap_build_byte_panic() {
887 let executor = deterministic::Runner::default();
888 executor.start(|context| async move {
889 let mut hasher: StandardHasher<Sha256> = StandardHasher::new();
890 let bitmap: TestMerkleizedBitMap<SHA256_SIZE> =
891 TestMerkleizedBitMap::init(context.with_label("bitmap"), "test", None, &mut hasher)
892 .await
893 .unwrap();
894 let mut dirty = bitmap.into_dirty();
895 dirty.push_chunk(&test_chunk(b"test"));
896 dirty.push(true);
897 dirty.push_byte(0x01);
898 });
899 }
900
901 #[test_traced]
902 #[should_panic(expected = "out of bounds")]
903 fn test_bitmap_get_out_of_bounds_bit_panic() {
904 let executor = deterministic::Runner::default();
905 executor.start(|context| async move {
906 let mut hasher: StandardHasher<Sha256> = StandardHasher::new();
907 let bitmap: TestMerkleizedBitMap<SHA256_SIZE> =
908 TestMerkleizedBitMap::init(context.with_label("bitmap"), "test", None, &mut hasher)
909 .await
910 .unwrap();
911 let mut dirty = bitmap.into_dirty();
912 dirty.push_chunk(&test_chunk(b"test"));
913 dirty.get_bit(256);
914 });
915 }
916
917 #[test_traced]
918 #[should_panic(expected = "pruned")]
919 fn test_bitmap_get_pruned_bit_panic() {
920 let executor = deterministic::Runner::default();
921 executor.start(|context| async move {
922 let mut hasher: StandardHasher<Sha256> = StandardHasher::new();
923 let bitmap: TestMerkleizedBitMap<SHA256_SIZE> =
924 TestMerkleizedBitMap::init(context.with_label("bitmap"), "test", None, &mut hasher)
925 .await
926 .unwrap();
927 let mut dirty = bitmap.into_dirty();
928 dirty.push_chunk(&test_chunk(b"test"));
929 dirty.push_chunk(&test_chunk(b"test2"));
930 let mut bitmap = dirty.merkleize(&mut hasher).unwrap();
931
932 bitmap.prune_to_bit(256).unwrap();
933 bitmap.get_bit(255);
934 });
935 }
936
937 #[test_traced]
938 fn test_bitmap_root_boundaries() {
939 let executor = deterministic::Runner::default();
940 executor.start(|context| async move {
941 let mut hasher = StandardHasher::<Sha256>::new();
943 let bitmap: TestMerkleizedBitMap<SHA256_SIZE> =
944 TestMerkleizedBitMap::init(context.with_label("bitmap"), "test", None, &mut hasher)
945 .await
946 .unwrap();
947 let mut dirty = bitmap.into_dirty();
948 dirty.push_chunk(&test_chunk(b"test"));
949 dirty.push_chunk(&test_chunk(b"test2"));
950 let mut bitmap = dirty.merkleize(&mut hasher).unwrap();
951
952 let root = bitmap.root();
953
954 let mut dirty = bitmap.into_dirty();
956 dirty.push(true);
957 bitmap = dirty.merkleize(&mut hasher).unwrap();
958 let new_root = bitmap.root();
959 assert_ne!(root, new_root);
960 assert_eq!(bitmap.mmr.size(), 3); for _ in 0..(SHA256_SIZE * 8 - 1) {
964 let mut dirty = bitmap.into_dirty();
965 dirty.push(false);
966 bitmap = dirty.merkleize(&mut hasher).unwrap();
967 let newer_root = bitmap.root();
968 assert_ne!(new_root, newer_root);
970 }
971 assert_eq!(bitmap.mmr.size(), 4); let mut dirty = bitmap.into_dirty();
975 dirty.push(false);
976 assert_eq!(dirty.len(), 256 * 3 + 1);
977 bitmap = dirty.merkleize(&mut hasher).unwrap();
978 let newer_root = bitmap.root();
979 assert_ne!(new_root, newer_root);
980
981 bitmap.prune_to_bit(bitmap.len()).unwrap();
983 assert_eq!(bitmap.bitmap.pruned_chunks(), 3);
984 assert_eq!(bitmap.len(), 256 * 3 + 1);
985 assert_eq!(newer_root, bitmap.root());
986 });
987 }
988
989 #[test_traced]
990 fn test_bitmap_get_set_bits() {
991 let executor = deterministic::Runner::default();
992 executor.start(|context| async move {
993 let mut hasher = StandardHasher::<Sha256>::new();
995 let bitmap: TestMerkleizedBitMap<SHA256_SIZE> =
996 TestMerkleizedBitMap::init(context.with_label("bitmap"), "test", None, &mut hasher)
997 .await
998 .unwrap();
999 let mut dirty = bitmap.into_dirty();
1000 dirty.push_chunk(&test_chunk(b"test"));
1001 dirty.push_chunk(&test_chunk(b"test2"));
1002 dirty.push_chunk(&test_chunk(b"test3"));
1003 dirty.push_chunk(&test_chunk(b"test4"));
1004 dirty.push_byte(0xF1);
1006 dirty.push(true);
1007 dirty.push(false);
1008 dirty.push(true);
1009
1010 let mut bitmap = dirty.merkleize(&mut hasher).unwrap();
1011 let root = bitmap.root();
1012
1013 for bit_pos in (0..bitmap.len()).rev() {
1016 let bit = bitmap.get_bit(bit_pos);
1017 let mut dirty = bitmap.into_dirty();
1018 dirty.set_bit(bit_pos, !bit);
1019 bitmap = dirty.merkleize(&mut hasher).unwrap();
1020 let new_root = bitmap.root();
1021 assert_ne!(root, new_root, "failed at bit {bit_pos}");
1022 let mut dirty = bitmap.into_dirty();
1024 dirty.set_bit(bit_pos, bit);
1025 bitmap = dirty.merkleize(&mut hasher).unwrap();
1026 let new_root = bitmap.root();
1027 assert_eq!(root, new_root);
1028 }
1029
1030 let start_bit = (SHA256_SIZE * 8 * 2) as u64;
1032 bitmap.prune_to_bit(start_bit).unwrap();
1033 for bit_pos in (start_bit..bitmap.len()).rev() {
1034 let bit = bitmap.get_bit(bit_pos);
1035 let mut dirty = bitmap.into_dirty();
1036 dirty.set_bit(bit_pos, !bit);
1037 bitmap = dirty.merkleize(&mut hasher).unwrap();
1038 let new_root = bitmap.root();
1039 assert_ne!(root, new_root, "failed at bit {bit_pos}");
1040 let mut dirty = bitmap.into_dirty();
1042 dirty.set_bit(bit_pos, bit);
1043 bitmap = dirty.merkleize(&mut hasher).unwrap();
1044 let new_root = bitmap.root();
1045 assert_eq!(root, new_root);
1046 }
1047 });
1048 }
1049
1050 fn flip_bit<const N: usize>(bit: u64, chunk: &[u8; N]) -> [u8; N] {
1051 let byte = PrunableBitMap::<N>::chunk_byte_offset(bit);
1052 let mask = PrunableBitMap::<N>::chunk_byte_bitmask(bit);
1053 let mut tmp = chunk.to_vec();
1054 tmp[byte] ^= mask;
1055 tmp.try_into().unwrap()
1056 }
1057
1058 #[test_traced]
1059 fn test_bitmap_mmr_proof_verification() {
1060 test_bitmap_mmr_proof_verification_n::<32>();
1061 test_bitmap_mmr_proof_verification_n::<64>();
1062 }
1063
1064 fn test_bitmap_mmr_proof_verification_n<const N: usize>() {
1065 let executor = deterministic::Runner::default();
1066 executor.start(|context| async move {
1067 let mut hasher = StandardHasher::<Sha256>::new();
1069 let bitmap: MerkleizedBitMap<TestContext, sha256::Digest, N> =
1070 MerkleizedBitMap::init(context.with_label("bitmap"), "test", None, &mut hasher)
1071 .await
1072 .unwrap();
1073 let mut dirty = bitmap.into_dirty();
1074 for i in 0u32..10 {
1075 dirty.push_chunk(&test_chunk(format!("test{i}").as_bytes()));
1076 }
1077 dirty.push_byte(0xA6);
1079 dirty.push(true);
1080 dirty.push(false);
1081 dirty.push(true);
1082 dirty.push(true);
1083 dirty.push(false);
1084
1085 let mut bitmap = dirty.merkleize(&mut hasher).unwrap();
1086 let root = bitmap.root();
1087
1088 for prune_to_bit in (0..bitmap.len()).step_by(251) {
1091 assert_eq!(bitmap.root(), root);
1092 bitmap.prune_to_bit(prune_to_bit).unwrap();
1093 for i in prune_to_bit..bitmap.len() {
1094 let (proof, chunk) = bitmap.proof(&mut hasher, i).await.unwrap();
1095
1096 assert!(
1098 MerkleizedBitMap::<TestContext, _, N>::verify_bit_inclusion(
1099 &mut hasher,
1100 &proof,
1101 &chunk,
1102 i,
1103 &root
1104 ),
1105 "failed to prove bit {i}",
1106 );
1107
1108 let corrupted = flip_bit(i, &chunk);
1110 assert!(
1111 !MerkleizedBitMap::<TestContext, _, N>::verify_bit_inclusion(
1112 &mut hasher,
1113 &proof,
1114 &corrupted,
1115 i,
1116 &root
1117 ),
1118 "proving bit {i} after flipping should have failed",
1119 );
1120 }
1121 }
1122 })
1123 }
1124
1125 #[test_traced]
1126 fn test_bitmap_persistence() {
1127 const PARTITION: &str = "bitmap-test";
1128 const FULL_CHUNK_COUNT: usize = 100;
1129
1130 let executor = deterministic::Runner::default();
1131 executor.start(|context| async move {
1132 let mut hasher = StandardHasher::<Sha256>::new();
1133 let bitmap: TestMerkleizedBitMap<SHA256_SIZE> = TestMerkleizedBitMap::init(
1135 context.with_label("initial"),
1136 PARTITION,
1137 None,
1138 &mut hasher,
1139 )
1140 .await
1141 .unwrap();
1142 assert_eq!(bitmap.len(), 0);
1143
1144 let mut dirty = bitmap.into_dirty();
1146 for i in 0..FULL_CHUNK_COUNT {
1147 dirty.push_chunk(&test_chunk(format!("test{i}").as_bytes()));
1148 }
1149 let mut bitmap = dirty.merkleize(&mut hasher).unwrap();
1150 let chunk_aligned_root = bitmap.root();
1151
1152 let mut dirty = bitmap.into_dirty();
1154 dirty.push_byte(0xA6);
1155 dirty.push(true);
1156 dirty.push(false);
1157 dirty.push(true);
1158 bitmap = dirty.merkleize(&mut hasher).unwrap();
1159 let root = bitmap.root();
1160
1161 for i in (10..=FULL_CHUNK_COUNT).step_by(10) {
1163 bitmap
1164 .prune_to_bit(
1165 (i * TestMerkleizedBitMap::<SHA256_SIZE>::CHUNK_SIZE_BITS as usize) as u64,
1166 )
1167 .unwrap();
1168 bitmap.write_pruned().await.unwrap();
1169 bitmap = TestMerkleizedBitMap::init(
1170 context.with_label(&format!("restore_{i}")),
1171 PARTITION,
1172 None,
1173 &mut hasher,
1174 )
1175 .await
1176 .unwrap();
1177 let _ = bitmap.root();
1178
1179 let mut dirty = bitmap.into_dirty();
1181 for j in i..FULL_CHUNK_COUNT {
1182 dirty.push_chunk(&test_chunk(format!("test{j}").as_bytes()));
1183 }
1184 assert_eq!(dirty.bitmap.pruned_chunks(), i);
1185 assert_eq!(dirty.len(), FULL_CHUNK_COUNT as u64 * 256);
1186 bitmap = dirty.merkleize(&mut hasher).unwrap();
1187 assert_eq!(bitmap.root(), chunk_aligned_root);
1188
1189 let mut dirty = bitmap.into_dirty();
1191 dirty.push_byte(0xA6);
1192 dirty.push(true);
1193 dirty.push(false);
1194 dirty.push(true);
1195 bitmap = dirty.merkleize(&mut hasher).unwrap();
1196 assert_eq!(bitmap.root(), root);
1197 }
1198 });
1199 }
1200
1201 #[test_traced]
1202 fn test_bitmap_proof_out_of_bounds() {
1203 let executor = deterministic::Runner::default();
1204 executor.start(|context| async move {
1205 let mut hasher = StandardHasher::<Sha256>::new();
1206 let bitmap: TestMerkleizedBitMap<SHA256_SIZE> =
1207 TestMerkleizedBitMap::init(context.with_label("bitmap"), "test", None, &mut hasher)
1208 .await
1209 .unwrap();
1210 let mut dirty = bitmap.into_dirty();
1211 dirty.push_chunk(&test_chunk(b"test"));
1212 let bitmap = dirty.merkleize(&mut hasher).unwrap();
1213
1214 let result = bitmap.proof(&mut hasher, 256).await;
1216 assert!(matches!(result, Err(Error::BitOutOfBounds(offset, size))
1217 if offset == 256 && size == 256));
1218
1219 let result = bitmap.proof(&mut hasher, 1000).await;
1220 assert!(matches!(result, Err(Error::BitOutOfBounds(offset, size))
1221 if offset == 1000 && size == 256));
1222
1223 assert!(bitmap.proof(&mut hasher, 0).await.is_ok());
1225 assert!(bitmap.proof(&mut hasher, 255).await.is_ok());
1226 });
1227 }
1228}