1use crate::{
14 metadata::{Config as MConfig, Metadata},
15 mmr::{
16 hasher::Hasher as MmrHasher,
17 iterator::nodes_to_pin,
18 mem::{Clean, CleanMmr, Config, Dirty, Mmr, State as MmrState},
19 storage::Storage,
20 verification,
21 Error::{self, *},
22 Location, Position, Proof,
23 },
24};
25use commonware_codec::DecodeExt;
26use commonware_cryptography::{Digest, Hasher};
27use commonware_parallel::ThreadPool;
28use commonware_runtime::{Clock, Metrics, Storage as RStorage};
29use commonware_utils::{
30 bitmap::{BitMap as UtilsBitMap, Prunable as PrunableBitMap},
31 sequence::prefixed_u64::U64,
32};
33use std::collections::HashSet;
34use tracing::{debug, error, warn};
35
36pub(crate) fn partial_chunk_root<H: Hasher, const N: usize>(
39 hasher: &mut H,
40 mmr_root: &H::Digest,
41 next_bit: u64,
42 last_chunk_digest: &H::Digest,
43) -> H::Digest {
44 assert!(next_bit > 0);
45 assert!(next_bit < UtilsBitMap::<N>::CHUNK_SIZE_BITS);
46 hasher.update(mmr_root);
47 hasher.update(&next_bit.to_be_bytes());
48 hasher.update(last_chunk_digest);
49 hasher.finalize()
50}
51
52mod private {
53 pub trait Sealed {}
54}
55
56pub trait State<D: Digest>: private::Sealed + Sized + Send + Sync {
58 type MmrState: MmrState<D>;
60}
61
62pub struct Merkleized<D: Digest> {
64 root: D,
66}
67
68impl<D: Digest> private::Sealed for Merkleized<D> {}
69impl<D: Digest> State<D> for Merkleized<D> {
70 type MmrState = Clean<D>;
71}
72
73pub struct Unmerkleized {
75 dirty_chunks: HashSet<usize>,
80}
81
82impl private::Sealed for Unmerkleized {}
83impl<D: Digest> State<D> for Unmerkleized {
84 type MmrState = Dirty;
85}
86
87pub type MerkleizedBitMap<E, D, const N: usize> = BitMap<E, D, N, Merkleized<D>>;
89
90pub type UnmerkleizedBitMap<E, D, const N: usize> = BitMap<E, D, N, Unmerkleized>;
92
93pub struct BitMap<
111 E: Clock + RStorage + Metrics,
112 D: Digest,
113 const N: usize,
114 S: State<D> = Merkleized<D>,
115> {
116 bitmap: PrunableBitMap<N>,
118
119 authenticated_len: usize,
121
122 mmr: Mmr<D, S::MmrState>,
132
133 pool: Option<ThreadPool>,
135
136 state: S,
138
139 metadata: Metadata<E, U64, Vec<u8>>,
141}
142
143const NODE_PREFIX: u8 = 0;
145
146const PRUNED_CHUNKS_PREFIX: u8 = 1;
148
149impl<E: Clock + RStorage + Metrics, D: Digest, const N: usize, S: State<D>> BitMap<E, D, N, S> {
150 pub const CHUNK_SIZE_BITS: u64 = PrunableBitMap::<N>::CHUNK_SIZE_BITS;
152
153 #[inline]
155 pub fn size(&self) -> Position {
156 self.mmr.size()
157 }
158
159 #[inline]
161 pub const fn len(&self) -> u64 {
162 self.bitmap.len()
163 }
164
165 #[inline]
167 pub const fn is_empty(&self) -> bool {
168 self.len() == 0
169 }
170
171 #[inline]
173 pub const fn pruned_bits(&self) -> u64 {
174 self.bitmap.pruned_bits()
175 }
176
177 #[inline]
179 fn complete_chunks(&self) -> usize {
180 let chunks_len = self.bitmap.chunks_len();
181 if self.bitmap.is_chunk_aligned() {
182 chunks_len
183 } else {
184 chunks_len.checked_sub(1).unwrap()
186 }
187 }
188
189 #[inline]
192 pub fn last_chunk(&self) -> (&[u8; N], u64) {
193 self.bitmap.last_chunk()
194 }
195
196 #[inline]
202 pub fn get_chunk_containing(&self, bit: u64) -> &[u8; N] {
203 self.bitmap.get_chunk_containing(bit)
204 }
205
206 #[inline]
212 pub fn get_bit(&self, bit: u64) -> bool {
213 self.bitmap.get_bit(bit)
214 }
215
216 #[inline]
219 pub const fn get_bit_from_chunk(chunk: &[u8; N], bit: u64) -> bool {
220 PrunableBitMap::<N>::get_bit_from_chunk(chunk, bit)
221 }
222
223 pub fn verify_bit_inclusion(
226 hasher: &mut impl MmrHasher<Digest = D>,
227 proof: &Proof<D>,
228 chunk: &[u8; N],
229 bit: u64,
230 root: &D,
231 ) -> bool {
232 let bit_len = *proof.leaves;
233 if bit >= bit_len {
234 debug!(bit_len, bit, "tried to verify non-existent bit");
235 return false;
236 }
237
238 let chunked_leaves =
240 Location::new_unchecked(PrunableBitMap::<N>::unpruned_chunk(bit_len) as u64);
241 let mut mmr_proof = Proof {
242 leaves: chunked_leaves,
243 digests: proof.digests.clone(),
244 };
245
246 let loc = Location::new_unchecked(PrunableBitMap::<N>::unpruned_chunk(bit) as u64);
247 if bit_len.is_multiple_of(Self::CHUNK_SIZE_BITS) {
248 return mmr_proof.verify_element_inclusion(hasher, chunk, loc, root);
249 }
250
251 if proof.digests.is_empty() {
252 debug!("proof has no digests");
253 return false;
254 }
255 let last_digest = mmr_proof.digests.pop().unwrap();
256
257 if chunked_leaves == loc {
258 if !mmr_proof.digests.is_empty() {
262 debug!(
263 digests = mmr_proof.digests.len() + 1,
264 "proof over partial chunk should have exactly 1 digest"
265 );
266 return false;
267 }
268 let last_chunk_digest = hasher.digest(chunk);
269 let next_bit = bit_len % Self::CHUNK_SIZE_BITS;
270 let reconstructed_root = partial_chunk_root::<_, N>(
271 hasher.inner(),
272 &last_digest,
273 next_bit,
274 &last_chunk_digest,
275 );
276 return reconstructed_root == *root;
277 };
278
279 let mmr_root = match mmr_proof.reconstruct_root(hasher, &[chunk], loc) {
282 Ok(root) => root,
283 Err(error) => {
284 debug!(error = ?error, "invalid proof input");
285 return false;
286 }
287 };
288
289 let next_bit = bit_len % Self::CHUNK_SIZE_BITS;
290 let reconstructed_root =
291 partial_chunk_root::<_, N>(hasher.inner(), &mmr_root, next_bit, &last_digest);
292
293 reconstructed_root == *root
294 }
295}
296
297impl<E: Clock + RStorage + Metrics, D: Digest, const N: usize> MerkleizedBitMap<E, D, N> {
298 pub async fn init(
305 context: E,
306 partition: &str,
307 pool: Option<ThreadPool>,
308 hasher: &mut impl MmrHasher<Digest = D>,
309 ) -> Result<Self, Error> {
310 let metadata_cfg = MConfig {
311 partition: partition.to_string(),
312 codec_config: ((0..).into(), ()),
313 };
314 let metadata =
315 Metadata::<_, U64, Vec<u8>>::init(context.with_label("metadata"), metadata_cfg).await?;
316
317 let key: U64 = U64::new(PRUNED_CHUNKS_PREFIX, 0);
318 let pruned_chunks = match metadata.get(&key) {
319 Some(bytes) => u64::from_be_bytes(bytes.as_slice().try_into().map_err(|_| {
320 error!("pruned chunks value not a valid u64");
321 Error::DataCorrupted("pruned chunks value not a valid u64")
322 })?),
323 None => {
324 warn!("bitmap metadata does not contain pruned chunks, initializing as empty");
325 0
326 }
327 } as usize;
328 if pruned_chunks == 0 {
329 let mmr = CleanMmr::new(hasher);
330 let cached_root = *mmr.root();
331 return Ok(Self {
332 bitmap: PrunableBitMap::new(),
333 authenticated_len: 0,
334 mmr,
335 pool,
336 metadata,
337 state: Merkleized { root: cached_root },
338 });
339 }
340 let mmr_size = Position::try_from(Location::new_unchecked(pruned_chunks as u64))?;
341
342 let mut pinned_nodes = Vec::new();
343 for (index, pos) in nodes_to_pin(mmr_size).enumerate() {
344 let Some(bytes) = metadata.get(&U64::new(NODE_PREFIX, index as u64)) else {
345 error!(?mmr_size, ?pos, "missing pinned node");
346 return Err(MissingNode(pos));
347 };
348 let digest = D::decode(bytes.as_ref());
349 let Ok(digest) = digest else {
350 error!(?mmr_size, ?pos, "could not convert node bytes to digest");
351 return Err(MissingNode(pos));
352 };
353 pinned_nodes.push(digest);
354 }
355
356 let mmr = CleanMmr::init(
357 Config {
358 nodes: Vec::new(),
359 pruned_to_pos: mmr_size,
360 pinned_nodes,
361 },
362 hasher,
363 )?;
364
365 let bitmap = PrunableBitMap::new_with_pruned_chunks(pruned_chunks)
366 .expect("pruned_chunks should never overflow");
367 let cached_root = *mmr.root();
368 Ok(Self {
369 bitmap,
370 authenticated_len: 0,
371 mmr,
372 pool,
373 metadata,
374 state: Merkleized { root: cached_root },
375 })
376 }
377
378 pub fn get_node(&self, position: Position) -> Option<D> {
379 self.mmr.get_node(position)
380 }
381
382 pub async fn write_pruned(&mut self) -> Result<(), Error> {
386 self.metadata.clear();
387
388 let key = U64::new(PRUNED_CHUNKS_PREFIX, 0);
390 self.metadata
391 .put(key, self.bitmap.pruned_chunks().to_be_bytes().to_vec());
392
393 let mmr_size =
396 Position::try_from(Location::new_unchecked(self.bitmap.pruned_chunks() as u64))?;
397 for (i, digest) in nodes_to_pin(mmr_size).enumerate() {
398 let digest = self.mmr.get_node_unchecked(digest);
399 let key = U64::new(NODE_PREFIX, i as u64);
400 self.metadata.put(key, digest.to_vec());
401 }
402
403 self.metadata.sync().await.map_err(MetadataError)
404 }
405
406 pub async fn destroy(self) -> Result<(), Error> {
408 self.metadata.destroy().await.map_err(MetadataError)
409 }
410
411 pub fn prune_to_bit(&mut self, bit: u64) -> Result<(), Error> {
419 let chunk = PrunableBitMap::<N>::unpruned_chunk(bit);
420 if chunk < self.bitmap.pruned_chunks() {
421 return Ok(());
422 }
423
424 self.bitmap.prune_to_bit(bit);
426
427 self.authenticated_len = self.complete_chunks();
429
430 let mmr_pos = Position::try_from(Location::new_unchecked(chunk as u64)).unwrap();
432 self.mmr.prune_to_pos(mmr_pos);
433 Ok(())
434 }
435
436 pub const fn root(&self) -> D {
448 self.state.root
449 }
450
451 pub async fn proof(
463 &self,
464 hasher: &mut impl MmrHasher<Digest = D>,
465 bit: u64,
466 ) -> Result<(Proof<D>, [u8; N]), Error> {
467 if bit >= self.len() {
468 return Err(Error::BitOutOfBounds(bit, self.len()));
469 }
470
471 let chunk = *self.get_chunk_containing(bit);
472 let chunk_loc = Location::from(PrunableBitMap::<N>::unpruned_chunk(bit));
473 let (last_chunk, next_bit) = self.bitmap.last_chunk();
474
475 if chunk_loc == self.mmr.leaves() {
476 assert!(next_bit > 0);
477 return Ok((
480 Proof {
481 leaves: Location::new_unchecked(self.len()),
482 digests: vec![*self.mmr.root()],
483 },
484 chunk,
485 ));
486 }
487
488 let range = chunk_loc..chunk_loc + 1;
489 let mut proof = verification::range_proof(&self.mmr, range).await?;
490 proof.leaves = Location::new_unchecked(self.len());
491 if next_bit == Self::CHUNK_SIZE_BITS {
492 return Ok((proof, chunk));
494 }
495
496 let last_chunk_digest = hasher.digest(last_chunk);
499 proof.digests.push(last_chunk_digest);
500
501 Ok((proof, chunk))
502 }
503
504 pub fn into_dirty(self) -> UnmerkleizedBitMap<E, D, N> {
506 UnmerkleizedBitMap {
507 bitmap: self.bitmap,
508 authenticated_len: self.authenticated_len,
509 mmr: self.mmr.into_dirty(),
510 pool: self.pool,
511 state: Unmerkleized {
512 dirty_chunks: HashSet::new(),
513 },
514 metadata: self.metadata,
515 }
516 }
517}
518
519impl<E: Clock + RStorage + Metrics, D: Digest, const N: usize> UnmerkleizedBitMap<E, D, N> {
520 pub fn push(&mut self, bit: bool) {
526 self.bitmap.push(bit);
527 }
528
529 pub fn set_bit(&mut self, bit: u64, value: bool) {
535 self.bitmap.set_bit(bit, value);
537
538 let chunk = self.bitmap.pruned_chunk(bit);
540 if chunk < self.authenticated_len {
541 self.state.dirty_chunks.insert(chunk);
542 }
543 }
544
545 pub fn dirty_chunks(&self) -> Vec<Location> {
547 let pruned_chunks = self.bitmap.pruned_chunks();
548 let mut chunks: Vec<Location> = self
549 .state
550 .dirty_chunks
551 .iter()
552 .map(|&chunk| Location::new_unchecked((chunk + pruned_chunks) as u64))
553 .collect();
554
555 for i in self.authenticated_len..self.complete_chunks() {
557 chunks.push(Location::new_unchecked((i + pruned_chunks) as u64));
558 }
559
560 chunks
561 }
562
563 pub async fn merkleize(
565 mut self,
566 hasher: &mut impl MmrHasher<Digest = D>,
567 ) -> Result<MerkleizedBitMap<E, D, N>, Error> {
568 let start = self.authenticated_len;
570 let end = self.complete_chunks();
571 for i in start..end {
572 self.mmr.add(hasher, self.bitmap.get_chunk(i));
573 }
574 self.authenticated_len = end;
575
576 let pruned_chunks = self.bitmap.pruned_chunks();
578 let updates = self
579 .state
580 .dirty_chunks
581 .iter()
582 .map(|chunk| {
583 let loc = Location::new_unchecked((*chunk + pruned_chunks) as u64);
584 (loc, self.bitmap.get_chunk(*chunk))
585 })
586 .collect::<Vec<_>>();
587 self.mmr
588 .update_leaf_batched(hasher, self.pool.clone(), &updates)?;
589 let mmr = self.mmr.merkleize(hasher, self.pool.clone());
590
591 let mmr_root = *mmr.root();
593 let cached_root = if self.bitmap.is_chunk_aligned() {
594 mmr_root
595 } else {
596 let (last_chunk, next_bit) = self.bitmap.last_chunk();
597 let last_chunk_digest = hasher.digest(last_chunk);
598 partial_chunk_root::<_, N>(hasher.inner(), &mmr_root, next_bit, &last_chunk_digest)
599 };
600
601 Ok(MerkleizedBitMap {
602 bitmap: self.bitmap,
603 authenticated_len: self.authenticated_len,
604 mmr,
605 pool: self.pool,
606 metadata: self.metadata,
607 state: Merkleized { root: cached_root },
608 })
609 }
610}
611
612impl<E: Clock + RStorage + Metrics, D: Digest, const N: usize> Storage<D>
613 for MerkleizedBitMap<E, D, N>
614{
615 fn size(&self) -> Position {
616 self.size()
617 }
618
619 async fn get_node(&self, position: Position) -> Result<Option<D>, Error> {
620 Ok(self.get_node(position))
621 }
622}
623
624#[cfg(test)]
625mod tests {
626 use super::*;
627 use crate::mmr::StandardHasher;
628 use commonware_codec::FixedSize;
629 use commonware_cryptography::{sha256, Hasher, Sha256};
630 use commonware_macros::test_traced;
631 use commonware_runtime::{deterministic, Metrics, Runner as _};
632
633 const SHA256_SIZE: usize = sha256::Digest::SIZE;
634
635 type TestContext = deterministic::Context;
636 type TestMerkleizedBitMap<const N: usize> = MerkleizedBitMap<TestContext, sha256::Digest, N>;
637
638 impl<E: Clock + RStorage + Metrics, D: Digest, const N: usize, S: State<D>> BitMap<E, D, N, S> {
639 pub(crate) fn leaf_pos(bit: u64) -> Position {
641 let chunk = PrunableBitMap::<N>::unpruned_chunk(bit);
642 let chunk = Location::new_unchecked(chunk as u64);
643 Position::try_from(chunk).expect("chunk should never overflow MAX_LOCATION")
644 }
645 }
646
647 impl<E: Clock + RStorage + Metrics, D: Digest, const N: usize> UnmerkleizedBitMap<E, D, N> {
648 fn push_byte(&mut self, byte: u8) {
656 self.bitmap.push_byte(byte);
657 }
658
659 fn push_chunk(&mut self, chunk: &[u8; N]) {
667 self.bitmap.push_chunk(chunk);
668 }
669 }
670
671 fn test_chunk<const N: usize>(s: &[u8]) -> [u8; N] {
672 assert_eq!(N % 32, 0);
673 let mut vec: Vec<u8> = Vec::new();
674 for _ in 0..N / 32 {
675 vec.extend(Sha256::hash(s).iter());
676 }
677
678 vec.try_into().unwrap()
679 }
680
681 #[test_traced]
682 fn test_bitmap_verify_empty_proof() {
683 let executor = deterministic::Runner::default();
684 executor.start(|_context| async move {
685 let mut hasher = StandardHasher::<Sha256>::new();
686 let proof = Proof {
687 leaves: Location::new_unchecked(100),
688 digests: Vec::new(),
689 };
690 assert!(
691 !TestMerkleizedBitMap::<SHA256_SIZE>::verify_bit_inclusion(
692 &mut hasher,
693 &proof,
694 &[0u8; SHA256_SIZE],
695 0,
696 &Sha256::fill(0x00),
697 ),
698 "proof without digests shouldn't verify or panic"
699 );
700 });
701 }
702
703 #[test_traced]
704 fn test_bitmap_empty_then_one() {
705 let executor = deterministic::Runner::default();
706 executor.start(|context| async move {
707 let mut hasher = StandardHasher::<Sha256>::new();
708 let mut bitmap: TestMerkleizedBitMap<SHA256_SIZE> =
709 TestMerkleizedBitMap::init(context.with_label("bitmap"), "test", None, &mut hasher)
710 .await
711 .unwrap();
712 assert_eq!(bitmap.len(), 0);
713 assert_eq!(bitmap.bitmap.pruned_chunks(), 0);
714 bitmap.prune_to_bit(0).unwrap();
715 assert_eq!(bitmap.bitmap.pruned_chunks(), 0);
716
717 let root = bitmap.root();
719 let mut dirty = bitmap.into_dirty();
720 dirty.push(true);
721 bitmap = dirty.merkleize(&mut hasher).await.unwrap();
722 let new_root = bitmap.root();
724 assert_ne!(root, new_root);
725 let root = new_root;
726 bitmap.prune_to_bit(1).unwrap();
727 assert_eq!(bitmap.len(), 1);
728 assert_ne!(bitmap.last_chunk().0, &[0u8; SHA256_SIZE]);
729 assert_eq!(bitmap.last_chunk().1, 1);
730 assert_eq!(bitmap.bitmap.pruned_chunks(), 0);
732 assert_eq!(root, bitmap.root());
733
734 let mut dirty = bitmap.into_dirty();
736 for i in 0..(TestMerkleizedBitMap::<SHA256_SIZE>::CHUNK_SIZE_BITS - 1) {
737 dirty.push(i % 2 != 0);
738 }
739 bitmap = dirty.merkleize(&mut hasher).await.unwrap();
740 assert_eq!(bitmap.len(), 256);
741 assert_ne!(root, bitmap.root());
742 let root = bitmap.root();
743
744 let (proof, chunk) = bitmap.proof(&mut hasher, 0).await.unwrap();
746 assert!(
747 TestMerkleizedBitMap::<SHA256_SIZE>::verify_bit_inclusion(
748 &mut hasher,
749 &proof,
750 &chunk,
751 255,
752 &root
753 ),
754 "failed to prove bit in only chunk"
755 );
756 assert!(
758 !TestMerkleizedBitMap::<SHA256_SIZE>::verify_bit_inclusion(
759 &mut hasher,
760 &proof,
761 &chunk,
762 256,
763 &root
764 ),
765 "should not be able to prove bit outside of chunk"
766 );
767
768 bitmap.prune_to_bit(256).unwrap();
770 assert_eq!(bitmap.len(), 256);
771 assert_eq!(bitmap.bitmap.pruned_chunks(), 1);
772 assert_eq!(bitmap.bitmap.pruned_bits(), 256);
773 assert_eq!(root, bitmap.root());
774
775 bitmap.prune_to_bit(10).unwrap();
777 assert_eq!(root, bitmap.root());
778 });
779 }
780
781 #[test_traced]
782 fn test_bitmap_building() {
783 let executor = deterministic::Runner::default();
786 executor.start(|context| async move {
787 let test_chunk = test_chunk(b"test");
788 let mut hasher: StandardHasher<Sha256> = StandardHasher::new();
789
790 let bitmap: TestMerkleizedBitMap<SHA256_SIZE> = TestMerkleizedBitMap::init(
792 context.with_label("bitmap1"),
793 "test1",
794 None,
795 &mut hasher,
796 )
797 .await
798 .unwrap();
799 let mut dirty = bitmap.into_dirty();
800 dirty.push_chunk(&test_chunk);
801 for b in test_chunk {
802 for j in 0..8 {
803 let mask = 1 << j;
804 let bit = (b & mask) != 0;
805 dirty.push(bit);
806 }
807 }
808 assert_eq!(dirty.len(), 256 * 2);
809
810 let bitmap = dirty.merkleize(&mut hasher).await.unwrap();
811 let root = bitmap.root();
812 let inner_root = *bitmap.mmr.root();
813 assert_eq!(root, inner_root);
814
815 {
816 let bitmap: TestMerkleizedBitMap<SHA256_SIZE> = TestMerkleizedBitMap::init(
819 context.with_label("bitmap2"),
820 "test2",
821 None,
822 &mut hasher,
823 )
824 .await
825 .unwrap();
826 let mut dirty = bitmap.into_dirty();
827 dirty.push_chunk(&test_chunk);
828 dirty.push_chunk(&test_chunk);
829 let bitmap = dirty.merkleize(&mut hasher).await.unwrap();
830 let same_root = bitmap.root();
831 assert_eq!(root, same_root);
832 }
833 {
834 let bitmap: TestMerkleizedBitMap<SHA256_SIZE> = TestMerkleizedBitMap::init(
836 context.with_label("bitmap3"),
837 "test3",
838 None,
839 &mut hasher,
840 )
841 .await
842 .unwrap();
843 let mut dirty = bitmap.into_dirty();
844 dirty.push_chunk(&test_chunk);
845 for b in test_chunk {
846 dirty.push_byte(b);
847 }
848 let bitmap = dirty.merkleize(&mut hasher).await.unwrap();
849 let same_root = bitmap.root();
850 assert_eq!(root, same_root);
851 }
852 });
853 }
854
855 #[test_traced]
856 #[should_panic(expected = "cannot add chunk")]
857 fn test_bitmap_build_chunked_panic() {
858 let executor = deterministic::Runner::default();
859 executor.start(|context| async move {
860 let mut hasher: StandardHasher<Sha256> = StandardHasher::new();
861 let bitmap: TestMerkleizedBitMap<SHA256_SIZE> =
862 TestMerkleizedBitMap::init(context.with_label("bitmap"), "test", None, &mut hasher)
863 .await
864 .unwrap();
865 let mut dirty = bitmap.into_dirty();
866 dirty.push_chunk(&test_chunk(b"test"));
867 dirty.push(true);
868 dirty.push_chunk(&test_chunk(b"panic"));
869 });
870 }
871
872 #[test_traced]
873 #[should_panic(expected = "cannot add byte")]
874 fn test_bitmap_build_byte_panic() {
875 let executor = deterministic::Runner::default();
876 executor.start(|context| async move {
877 let mut hasher: StandardHasher<Sha256> = StandardHasher::new();
878 let bitmap: TestMerkleizedBitMap<SHA256_SIZE> =
879 TestMerkleizedBitMap::init(context.with_label("bitmap"), "test", None, &mut hasher)
880 .await
881 .unwrap();
882 let mut dirty = bitmap.into_dirty();
883 dirty.push_chunk(&test_chunk(b"test"));
884 dirty.push(true);
885 dirty.push_byte(0x01);
886 });
887 }
888
889 #[test_traced]
890 #[should_panic(expected = "out of bounds")]
891 fn test_bitmap_get_out_of_bounds_bit_panic() {
892 let executor = deterministic::Runner::default();
893 executor.start(|context| async move {
894 let mut hasher: StandardHasher<Sha256> = StandardHasher::new();
895 let bitmap: TestMerkleizedBitMap<SHA256_SIZE> =
896 TestMerkleizedBitMap::init(context.with_label("bitmap"), "test", None, &mut hasher)
897 .await
898 .unwrap();
899 let mut dirty = bitmap.into_dirty();
900 dirty.push_chunk(&test_chunk(b"test"));
901 dirty.get_bit(256);
902 });
903 }
904
905 #[test_traced]
906 #[should_panic(expected = "pruned")]
907 fn test_bitmap_get_pruned_bit_panic() {
908 let executor = deterministic::Runner::default();
909 executor.start(|context| async move {
910 let mut hasher: StandardHasher<Sha256> = StandardHasher::new();
911 let bitmap: TestMerkleizedBitMap<SHA256_SIZE> =
912 TestMerkleizedBitMap::init(context.with_label("bitmap"), "test", None, &mut hasher)
913 .await
914 .unwrap();
915 let mut dirty = bitmap.into_dirty();
916 dirty.push_chunk(&test_chunk(b"test"));
917 dirty.push_chunk(&test_chunk(b"test2"));
918 let mut bitmap = dirty.merkleize(&mut hasher).await.unwrap();
919
920 bitmap.prune_to_bit(256).unwrap();
921 bitmap.get_bit(255);
922 });
923 }
924
925 #[test_traced]
926 fn test_bitmap_root_boundaries() {
927 let executor = deterministic::Runner::default();
928 executor.start(|context| async move {
929 let mut hasher = StandardHasher::<Sha256>::new();
931 let bitmap: TestMerkleizedBitMap<SHA256_SIZE> =
932 TestMerkleizedBitMap::init(context.with_label("bitmap"), "test", None, &mut hasher)
933 .await
934 .unwrap();
935 let mut dirty = bitmap.into_dirty();
936 dirty.push_chunk(&test_chunk(b"test"));
937 dirty.push_chunk(&test_chunk(b"test2"));
938 let mut bitmap = dirty.merkleize(&mut hasher).await.unwrap();
939
940 let root = bitmap.root();
941
942 let mut dirty = bitmap.into_dirty();
944 dirty.push(true);
945 bitmap = dirty.merkleize(&mut hasher).await.unwrap();
946 let new_root = bitmap.root();
947 assert_ne!(root, new_root);
948 assert_eq!(bitmap.mmr.size(), 3); for _ in 0..(SHA256_SIZE * 8 - 1) {
952 let mut dirty = bitmap.into_dirty();
953 dirty.push(false);
954 bitmap = dirty.merkleize(&mut hasher).await.unwrap();
955 let newer_root = bitmap.root();
956 assert_ne!(new_root, newer_root);
958 }
959 assert_eq!(bitmap.mmr.size(), 4); let mut dirty = bitmap.into_dirty();
963 dirty.push(false);
964 assert_eq!(dirty.len(), 256 * 3 + 1);
965 bitmap = dirty.merkleize(&mut hasher).await.unwrap();
966 let newer_root = bitmap.root();
967 assert_ne!(new_root, newer_root);
968
969 bitmap.prune_to_bit(bitmap.len()).unwrap();
971 assert_eq!(bitmap.bitmap.pruned_chunks(), 3);
972 assert_eq!(bitmap.len(), 256 * 3 + 1);
973 assert_eq!(newer_root, bitmap.root());
974 });
975 }
976
977 #[test_traced]
978 fn test_bitmap_get_set_bits() {
979 let executor = deterministic::Runner::default();
980 executor.start(|context| async move {
981 let mut hasher = StandardHasher::<Sha256>::new();
983 let bitmap: TestMerkleizedBitMap<SHA256_SIZE> =
984 TestMerkleizedBitMap::init(context.with_label("bitmap"), "test", None, &mut hasher)
985 .await
986 .unwrap();
987 let mut dirty = bitmap.into_dirty();
988 dirty.push_chunk(&test_chunk(b"test"));
989 dirty.push_chunk(&test_chunk(b"test2"));
990 dirty.push_chunk(&test_chunk(b"test3"));
991 dirty.push_chunk(&test_chunk(b"test4"));
992 dirty.push_byte(0xF1);
994 dirty.push(true);
995 dirty.push(false);
996 dirty.push(true);
997
998 let mut bitmap = dirty.merkleize(&mut hasher).await.unwrap();
999 let root = bitmap.root();
1000
1001 for bit_pos in (0..bitmap.len()).rev() {
1004 let bit = bitmap.get_bit(bit_pos);
1005 let mut dirty = bitmap.into_dirty();
1006 dirty.set_bit(bit_pos, !bit);
1007 bitmap = dirty.merkleize(&mut hasher).await.unwrap();
1008 let new_root = bitmap.root();
1009 assert_ne!(root, new_root, "failed at bit {bit_pos}");
1010 let mut dirty = bitmap.into_dirty();
1012 dirty.set_bit(bit_pos, bit);
1013 bitmap = dirty.merkleize(&mut hasher).await.unwrap();
1014 let new_root = bitmap.root();
1015 assert_eq!(root, new_root);
1016 }
1017
1018 let start_bit = (SHA256_SIZE * 8 * 2) as u64;
1020 bitmap.prune_to_bit(start_bit).unwrap();
1021 for bit_pos in (start_bit..bitmap.len()).rev() {
1022 let bit = bitmap.get_bit(bit_pos);
1023 let mut dirty = bitmap.into_dirty();
1024 dirty.set_bit(bit_pos, !bit);
1025 bitmap = dirty.merkleize(&mut hasher).await.unwrap();
1026 let new_root = bitmap.root();
1027 assert_ne!(root, new_root, "failed at bit {bit_pos}");
1028 let mut dirty = bitmap.into_dirty();
1030 dirty.set_bit(bit_pos, bit);
1031 bitmap = dirty.merkleize(&mut hasher).await.unwrap();
1032 let new_root = bitmap.root();
1033 assert_eq!(root, new_root);
1034 }
1035 });
1036 }
1037
1038 fn flip_bit<const N: usize>(bit: u64, chunk: &[u8; N]) -> [u8; N] {
1039 let byte = PrunableBitMap::<N>::chunk_byte_offset(bit);
1040 let mask = PrunableBitMap::<N>::chunk_byte_bitmask(bit);
1041 let mut tmp = chunk.to_vec();
1042 tmp[byte] ^= mask;
1043 tmp.try_into().unwrap()
1044 }
1045
1046 #[test_traced]
1047 fn test_bitmap_mmr_proof_verification() {
1048 test_bitmap_mmr_proof_verification_n::<32>();
1049 test_bitmap_mmr_proof_verification_n::<64>();
1050 }
1051
1052 fn test_bitmap_mmr_proof_verification_n<const N: usize>() {
1053 let executor = deterministic::Runner::default();
1054 executor.start(|context| async move {
1055 let mut hasher = StandardHasher::<Sha256>::new();
1057 let bitmap: MerkleizedBitMap<TestContext, sha256::Digest, N> =
1058 MerkleizedBitMap::init(context.with_label("bitmap"), "test", None, &mut hasher)
1059 .await
1060 .unwrap();
1061 let mut dirty = bitmap.into_dirty();
1062 for i in 0u32..10 {
1063 dirty.push_chunk(&test_chunk(format!("test{i}").as_bytes()));
1064 }
1065 dirty.push_byte(0xA6);
1067 dirty.push(true);
1068 dirty.push(false);
1069 dirty.push(true);
1070 dirty.push(true);
1071 dirty.push(false);
1072
1073 let mut bitmap = dirty.merkleize(&mut hasher).await.unwrap();
1074 let root = bitmap.root();
1075
1076 for prune_to_bit in (0..bitmap.len()).step_by(251) {
1079 assert_eq!(bitmap.root(), root);
1080 bitmap.prune_to_bit(prune_to_bit).unwrap();
1081 for i in prune_to_bit..bitmap.len() {
1082 let (proof, chunk) = bitmap.proof(&mut hasher, i).await.unwrap();
1083
1084 assert!(
1086 MerkleizedBitMap::<TestContext, _, N>::verify_bit_inclusion(
1087 &mut hasher,
1088 &proof,
1089 &chunk,
1090 i,
1091 &root
1092 ),
1093 "failed to prove bit {i}",
1094 );
1095
1096 let corrupted = flip_bit(i, &chunk);
1098 assert!(
1099 !MerkleizedBitMap::<TestContext, _, N>::verify_bit_inclusion(
1100 &mut hasher,
1101 &proof,
1102 &corrupted,
1103 i,
1104 &root
1105 ),
1106 "proving bit {i} after flipping should have failed",
1107 );
1108 }
1109 }
1110 })
1111 }
1112
1113 #[test_traced]
1114 fn test_bitmap_persistence() {
1115 const PARTITION: &str = "bitmap_test";
1116 const FULL_CHUNK_COUNT: usize = 100;
1117
1118 let executor = deterministic::Runner::default();
1119 executor.start(|context| async move {
1120 let mut hasher = StandardHasher::<Sha256>::new();
1121 let bitmap: TestMerkleizedBitMap<SHA256_SIZE> = TestMerkleizedBitMap::init(
1123 context.with_label("initial"),
1124 PARTITION,
1125 None,
1126 &mut hasher,
1127 )
1128 .await
1129 .unwrap();
1130 assert_eq!(bitmap.len(), 0);
1131
1132 let mut dirty = bitmap.into_dirty();
1134 for i in 0..FULL_CHUNK_COUNT {
1135 dirty.push_chunk(&test_chunk(format!("test{i}").as_bytes()));
1136 }
1137 let mut bitmap = dirty.merkleize(&mut hasher).await.unwrap();
1138 let chunk_aligned_root = bitmap.root();
1139
1140 let mut dirty = bitmap.into_dirty();
1142 dirty.push_byte(0xA6);
1143 dirty.push(true);
1144 dirty.push(false);
1145 dirty.push(true);
1146 bitmap = dirty.merkleize(&mut hasher).await.unwrap();
1147 let root = bitmap.root();
1148
1149 for i in (10..=FULL_CHUNK_COUNT).step_by(10) {
1151 bitmap
1152 .prune_to_bit(
1153 (i * TestMerkleizedBitMap::<SHA256_SIZE>::CHUNK_SIZE_BITS as usize) as u64,
1154 )
1155 .unwrap();
1156 bitmap.write_pruned().await.unwrap();
1157 bitmap = TestMerkleizedBitMap::init(
1158 context.with_label(&format!("restore_{i}")),
1159 PARTITION,
1160 None,
1161 &mut hasher,
1162 )
1163 .await
1164 .unwrap();
1165 let _ = bitmap.root();
1166
1167 let mut dirty = bitmap.into_dirty();
1169 for j in i..FULL_CHUNK_COUNT {
1170 dirty.push_chunk(&test_chunk(format!("test{j}").as_bytes()));
1171 }
1172 assert_eq!(dirty.bitmap.pruned_chunks(), i);
1173 assert_eq!(dirty.len(), FULL_CHUNK_COUNT as u64 * 256);
1174 bitmap = dirty.merkleize(&mut hasher).await.unwrap();
1175 assert_eq!(bitmap.root(), chunk_aligned_root);
1176
1177 let mut dirty = bitmap.into_dirty();
1179 dirty.push_byte(0xA6);
1180 dirty.push(true);
1181 dirty.push(false);
1182 dirty.push(true);
1183 bitmap = dirty.merkleize(&mut hasher).await.unwrap();
1184 assert_eq!(bitmap.root(), root);
1185 }
1186 });
1187 }
1188
1189 #[test_traced]
1190 fn test_bitmap_proof_out_of_bounds() {
1191 let executor = deterministic::Runner::default();
1192 executor.start(|context| async move {
1193 let mut hasher = StandardHasher::<Sha256>::new();
1194 let bitmap: TestMerkleizedBitMap<SHA256_SIZE> =
1195 TestMerkleizedBitMap::init(context.with_label("bitmap"), "test", None, &mut hasher)
1196 .await
1197 .unwrap();
1198 let mut dirty = bitmap.into_dirty();
1199 dirty.push_chunk(&test_chunk(b"test"));
1200 let bitmap = dirty.merkleize(&mut hasher).await.unwrap();
1201
1202 let result = bitmap.proof(&mut hasher, 256).await;
1204 assert!(matches!(result, Err(Error::BitOutOfBounds(offset, size))
1205 if offset == 256 && size == 256));
1206
1207 let result = bitmap.proof(&mut hasher, 1000).await;
1208 assert!(matches!(result, Err(Error::BitOutOfBounds(offset, size))
1209 if offset == 1000 && size == 256));
1210
1211 assert!(bitmap.proof(&mut hasher, 0).await.is_ok());
1213 assert!(bitmap.proof(&mut hasher, 255).await.is_ok());
1214 });
1215 }
1216}