1use crate::{
14 metadata::{Config as MConfig, Metadata},
15 mmr::{
16 hasher::Hasher,
17 iterator::nodes_to_pin,
18 mem::{Config, Mmr},
19 storage::Storage,
20 verification, Error,
21 Error::*,
22 Location, Position, Proof,
23 },
24};
25use commonware_codec::DecodeExt;
26use commonware_cryptography::Hasher as CHasher;
27use commonware_runtime::{Clock, Metrics, Storage as RStorage, ThreadPool};
28use commonware_utils::{bitmap::Prunable as PrunableBitMap, sequence::prefixed_u64::U64};
29use std::collections::HashSet;
30use tracing::{debug, error, warn};
31
32pub struct BitMap<H: CHasher, const N: usize> {
42 bitmap: PrunableBitMap<N>,
44
45 authenticated_len: usize,
47
48 mmr: Mmr<H>,
58
59 dirty_chunks: HashSet<usize>,
64}
65
66impl<H: CHasher, const N: usize> Default for BitMap<H, N> {
67 fn default() -> Self {
68 Self::new()
69 }
70}
71
72const NODE_PREFIX: u8 = 0;
74
75const PRUNED_CHUNKS_PREFIX: u8 = 1;
77
78impl<H: CHasher, const N: usize> BitMap<H, N> {
79 pub const CHUNK_SIZE_BITS: u64 = PrunableBitMap::<N>::CHUNK_SIZE_BITS;
81
82 pub fn new() -> Self {
84 BitMap {
85 bitmap: PrunableBitMap::new(),
86 authenticated_len: 0,
87 mmr: Mmr::new(),
88 dirty_chunks: HashSet::new(),
89 }
90 }
91
92 pub fn size(&self) -> Position {
93 self.mmr.size()
94 }
95
96 pub fn get_node(&self, position: Position) -> Option<H::Digest> {
97 self.mmr.get_node(position)
98 }
99
100 pub async fn restore_pruned<C: RStorage + Metrics + Clock>(
109 context: C,
110 partition: &str,
111 pool: Option<ThreadPool>,
112 ) -> Result<Self, Error> {
113 let metadata_cfg = MConfig {
114 partition: partition.to_string(),
115 codec_config: ((0..).into(), ()),
116 };
117 let metadata =
118 Metadata::<_, U64, Vec<u8>>::init(context.with_label("metadata"), metadata_cfg).await?;
119
120 let key: U64 = U64::new(PRUNED_CHUNKS_PREFIX, 0);
121 let pruned_chunks = match metadata.get(&key) {
122 Some(bytes) => u64::from_be_bytes(bytes.as_slice().try_into().map_err(|_| {
123 error!("pruned chunks value not a valid u64");
124 Error::DataCorrupted("pruned chunks value not a valid u64")
125 })?),
126 None => {
127 warn!("bitmap metadata does not contain pruned chunks, initializing as empty");
128 0
129 }
130 } as usize;
131 if pruned_chunks == 0 {
132 return Ok(Self::new());
133 }
134 let mmr_size = Position::try_from(Location::new_unchecked(pruned_chunks as u64))?;
135
136 let mut pinned_nodes = Vec::new();
137 for (index, pos) in nodes_to_pin(mmr_size).enumerate() {
138 let Some(bytes) = metadata.get(&U64::new(NODE_PREFIX, index as u64)) else {
139 error!(?mmr_size, ?pos, "missing pinned node");
140 return Err(MissingNode(pos));
141 };
142 let digest = H::Digest::decode(bytes.as_ref());
143 let Ok(digest) = digest else {
144 error!(?mmr_size, ?pos, "could not convert node bytes to digest");
145 return Err(MissingNode(pos));
146 };
147 pinned_nodes.push(digest);
148 }
149
150 metadata.close().await?;
151
152 let mmr = Mmr::init(Config {
153 nodes: Vec::new(),
154 pruned_to_pos: mmr_size,
155 pinned_nodes,
156 pool,
157 })?;
158
159 let bitmap = PrunableBitMap::new_with_pruned_chunks(pruned_chunks)
160 .expect("pruned_chunks should never overflow");
161 Ok(Self {
162 bitmap,
163 authenticated_len: 0,
164 mmr,
165 dirty_chunks: HashSet::new(),
166 })
167 }
168
169 pub async fn write_pruned<C: RStorage + Metrics + Clock>(
173 &self,
174 context: C,
175 partition: &str,
176 ) -> Result<(), Error> {
177 let metadata_cfg = MConfig {
178 partition: partition.to_string(),
179 codec_config: ((0..).into(), ()),
180 };
181 let mut metadata =
182 Metadata::<_, U64, Vec<u8>>::init(context.with_label("metadata"), metadata_cfg).await?;
183 metadata.clear();
184
185 let key = U64::new(PRUNED_CHUNKS_PREFIX, 0);
187 metadata.put(key, self.bitmap.pruned_chunks().to_be_bytes().to_vec());
188
189 let mmr_size =
192 Position::try_from(Location::new_unchecked(self.bitmap.pruned_chunks() as u64))?;
193 for (i, digest) in nodes_to_pin(mmr_size).enumerate() {
194 let digest = self.mmr.get_node_unchecked(digest);
195 let key = U64::new(NODE_PREFIX, i as u64);
196 metadata.put(key, digest.to_vec());
197 }
198
199 metadata.close().await.map_err(MetadataError)
200 }
201
202 #[inline]
204 pub fn len(&self) -> u64 {
205 self.bitmap.len()
206 }
207
208 #[inline]
210 pub fn is_empty(&self) -> bool {
211 self.len() == 0
212 }
213
214 #[inline]
216 pub fn pruned_bits(&self) -> u64 {
217 self.bitmap.pruned_bits()
218 }
219
220 #[inline]
222 fn complete_chunks(&self) -> usize {
223 let chunks_len = self.bitmap.chunks_len();
224 if self.bitmap.is_chunk_aligned() {
225 chunks_len
226 } else {
227 chunks_len.checked_sub(1).unwrap()
229 }
230 }
231
232 pub fn prune_to_bit(&mut self, bit: u64) -> Result<(), Error> {
244 if self.is_dirty() {
245 return Err(Error::DirtyState);
246 }
247 let chunk = PrunableBitMap::<N>::unpruned_chunk(bit);
248 if chunk < self.bitmap.pruned_chunks() {
249 return Ok(());
250 }
251
252 self.bitmap.prune_to_bit(bit);
254
255 self.authenticated_len = self.complete_chunks();
257
258 let mmr_pos = Position::try_from(Location::new_unchecked(chunk as u64)).unwrap();
260 self.mmr.prune_to_pos(mmr_pos);
261 Ok(())
262 }
263
264 #[inline]
267 pub fn last_chunk(&self) -> (&[u8; N], u64) {
268 self.bitmap.last_chunk()
269 }
270
271 #[inline]
277 pub fn get_chunk_containing(&self, bit: u64) -> &[u8; N] {
278 self.bitmap.get_chunk_containing(bit)
279 }
280
281 pub fn push(&mut self, bit: bool) {
287 self.bitmap.push(bit);
288 }
289
290 #[inline]
296 pub fn get_bit(&self, bit: u64) -> bool {
297 self.bitmap.get_bit(bit)
298 }
299
300 #[inline]
301 pub fn get_bit_from_chunk(chunk: &[u8; N], bit: u64) -> bool {
304 PrunableBitMap::<N>::get_bit_from_chunk(chunk, bit)
305 }
306
307 pub fn set_bit(&mut self, bit: u64, value: bool) {
313 self.bitmap.set_bit(bit, value);
315
316 let chunk = self.bitmap.pruned_chunk(bit);
318 if chunk < self.authenticated_len {
319 self.dirty_chunks.insert(chunk);
320 }
321 }
322
323 pub fn is_dirty(&self) -> bool {
325 !self.dirty_chunks.is_empty() || self.authenticated_len < self.complete_chunks()
326 }
327
328 pub fn dirty_chunks(&self) -> Vec<Location> {
330 let pruned_chunks = self.bitmap.pruned_chunks();
331 let mut chunks: Vec<Location> = self
332 .dirty_chunks
333 .iter()
334 .map(|&chunk| Location::new_unchecked((chunk + pruned_chunks) as u64))
335 .collect();
336
337 for i in self.authenticated_len..self.complete_chunks() {
339 chunks.push(Location::new_unchecked((i + pruned_chunks) as u64));
340 }
341
342 chunks
343 }
344
345 pub async fn merkleize(&mut self, hasher: &mut impl Hasher<H>) -> Result<(), Error> {
347 let start = self.authenticated_len;
349 let end = self.complete_chunks();
350 for i in start..end {
351 self.mmr.add_batched(hasher, self.bitmap.get_chunk(i));
352 }
353 self.authenticated_len = end;
354
355 let pruned_chunks = self.bitmap.pruned_chunks();
357 let updates = self
358 .dirty_chunks
359 .iter()
360 .map(|chunk| {
361 let loc = Location::new_unchecked((*chunk + pruned_chunks) as u64);
362 let pos = Position::try_from(loc).expect("invalid location");
363 (pos, self.bitmap.get_chunk(*chunk))
364 })
365 .collect::<Vec<_>>();
366 self.mmr.update_leaf_batched(hasher, &updates)?;
367 self.dirty_chunks.clear();
368 self.mmr.merkleize(hasher);
369
370 Ok(())
371 }
372
373 pub async fn root(&self, hasher: &mut impl Hasher<H>) -> Result<H::Digest, Error> {
387 assert!(
388 !self.is_dirty(),
389 "cannot compute root with unmerkleized updates",
390 );
391 let mmr_root = self.mmr.root(hasher);
392
393 if self.bitmap.is_chunk_aligned() {
395 return Ok(mmr_root);
396 }
397
398 let (last_chunk, next_bit) = self.bitmap.last_chunk();
399
400 let last_chunk_digest = hasher.digest(last_chunk);
402 Ok(Self::partial_chunk_root(
403 hasher.inner(),
404 &mmr_root,
405 next_bit,
406 &last_chunk_digest,
407 ))
408 }
409
410 pub fn partial_chunk_root(
413 hasher: &mut H,
414 mmr_root: &H::Digest,
415 next_bit: u64,
416 last_chunk_digest: &H::Digest,
417 ) -> H::Digest {
418 assert!(next_bit > 0);
419 assert!(next_bit < Self::CHUNK_SIZE_BITS);
420 hasher.update(mmr_root);
421 hasher.update(&next_bit.to_be_bytes());
422 hasher.update(last_chunk_digest);
423 hasher.finalize()
424 }
425
426 pub async fn proof(
439 &self,
440 hasher: &mut impl Hasher<H>,
441 bit: u64,
442 ) -> Result<(Proof<H::Digest>, [u8; N]), Error> {
443 if bit >= self.len() {
444 return Err(Error::BitOutOfBounds(bit, self.len()));
445 }
446 if self.is_dirty() {
447 return Err(Error::DirtyState);
448 }
449
450 let chunk = *self.get_chunk_containing(bit);
451 let chunk_loc = Location::from(PrunableBitMap::<N>::unpruned_chunk(bit));
452 let (last_chunk, next_bit) = self.bitmap.last_chunk();
453
454 if chunk_loc == self.mmr.leaves() {
455 assert!(next_bit > 0);
456 return Ok((
459 Proof {
460 size: Position::new(self.len()),
461 digests: vec![self.mmr.root(hasher)],
462 },
463 chunk,
464 ));
465 }
466
467 let range = chunk_loc..chunk_loc + 1;
468 let mut proof = verification::range_proof(&self.mmr, range).await?;
469 proof.size = Position::new(self.len());
470 if next_bit == Self::CHUNK_SIZE_BITS {
471 return Ok((proof, chunk));
473 }
474
475 let last_chunk_digest = hasher.digest(last_chunk);
478 proof.digests.push(last_chunk_digest);
479
480 Ok((proof, chunk))
481 }
482
483 pub fn verify_bit_inclusion(
486 hasher: &mut impl Hasher<H>,
487 proof: &Proof<H::Digest>,
488 chunk: &[u8; N],
489 bit: u64,
490 root: &H::Digest,
491 ) -> bool {
492 let bit_len = *proof.size;
493 if bit >= bit_len {
494 debug!(bit_len, bit, "tried to verify non-existent bit");
495 return false;
496 }
497
498 let leaves = PrunableBitMap::<N>::unpruned_chunk(bit_len);
499 let size = Position::try_from(Location::new_unchecked(leaves as u64))
501 .expect("chunk_loc returned invalid location");
502 let mut mmr_proof = Proof::<H::Digest> {
503 size,
504 digests: proof.digests.clone(),
505 };
506
507 let loc = PrunableBitMap::<N>::unpruned_chunk(bit);
508 if bit_len % Self::CHUNK_SIZE_BITS == 0 {
509 return mmr_proof.verify_element_inclusion(
510 hasher,
511 chunk,
512 Location::new_unchecked(loc as u64),
513 root,
514 );
515 }
516
517 if proof.digests.is_empty() {
518 debug!("proof has no digests");
519 return false;
520 }
521 let last_digest = mmr_proof.digests.pop().unwrap();
522
523 if leaves == loc {
524 if !mmr_proof.digests.is_empty() {
528 debug!(
529 digests = mmr_proof.digests.len() + 1,
530 "proof over partial chunk should have exactly 1 digest"
531 );
532 return false;
533 }
534 let last_chunk_digest = hasher.digest(chunk);
535 let next_bit = bit_len % Self::CHUNK_SIZE_BITS;
536 let reconstructed_root = Self::partial_chunk_root(
537 hasher.inner(),
538 &last_digest,
539 next_bit,
540 &last_chunk_digest,
541 );
542 return reconstructed_root == *root;
543 };
544
545 let mmr_root =
548 match mmr_proof.reconstruct_root(hasher, &[chunk], Location::new_unchecked(loc as u64))
549 {
550 Ok(root) => root,
551 Err(error) => {
552 debug!(error = ?error, "invalid proof input");
553 return false;
554 }
555 };
556
557 let next_bit = bit_len % Self::CHUNK_SIZE_BITS;
558 let reconstructed_root =
559 Self::partial_chunk_root(hasher.inner(), &mmr_root, next_bit, &last_digest);
560
561 reconstructed_root == *root
562 }
563
564 pub async fn destroy<C: RStorage + Metrics + Clock>(
566 context: C,
567 partition: &str,
568 ) -> Result<(), Error> {
569 let metadata_cfg = MConfig {
570 partition: partition.to_string(),
571 codec_config: ((0..).into(), ()),
572 };
573 let metadata =
574 Metadata::<_, U64, Vec<u8>>::init(context.with_label("metadata"), metadata_cfg).await?;
575
576 metadata.destroy().await.map_err(MetadataError)
577 }
578}
579
580impl<H: CHasher, const N: usize> Storage<H::Digest> for BitMap<H, N> {
581 fn size(&self) -> Position {
582 self.size()
583 }
584
585 async fn get_node(&self, position: Position) -> Result<Option<H::Digest>, Error> {
586 Ok(self.get_node(position))
587 }
588}
589
590#[cfg(test)]
591mod tests {
592 use super::*;
593 use crate::mmr::StandardHasher;
594 use commonware_codec::FixedSize;
595 use commonware_cryptography::Sha256;
596 use commonware_macros::test_traced;
597 use commonware_runtime::{deterministic, Runner as _};
598
599 const SHA256_SIZE: usize = <Sha256 as CHasher>::Digest::SIZE;
600
601 impl<H: CHasher, const N: usize> BitMap<H, N> {
602 fn push_byte(&mut self, byte: u8) {
610 self.bitmap.push_byte(byte);
611 }
612
613 fn push_chunk(&mut self, chunk: &[u8; N]) {
621 self.bitmap.push_chunk(chunk);
622 }
623
624 pub(crate) fn leaf_pos(bit: u64) -> Position {
626 let chunk = PrunableBitMap::<N>::unpruned_chunk(bit);
627 let chunk = Location::new_unchecked(chunk as u64);
628 Position::try_from(chunk).expect("chunk should never overflow MAX_LOCATION")
629 }
630 }
631
632 fn test_chunk<const N: usize>(s: &[u8]) -> [u8; N] {
633 assert_eq!(N % 32, 0);
634 let mut vec: Vec<u8> = Vec::new();
635 for _ in 0..N / 32 {
636 vec.extend(Sha256::hash(s).iter());
637 }
638
639 vec.try_into().unwrap()
640 }
641
642 #[test_traced]
643 fn test_bitmap_verify_empty_proof() {
644 let executor = deterministic::Runner::default();
645 executor.start(|_| async move {
646 let mut hasher = StandardHasher::new();
647 let proof = Proof {
648 size: Position::new(100),
649 digests: Vec::new(),
650 };
651 assert!(
652 !BitMap::<Sha256, SHA256_SIZE>::verify_bit_inclusion(
653 &mut hasher,
654 &proof,
655 &[0u8; SHA256_SIZE],
656 0,
657 &Sha256::fill(0x00),
658 ),
659 "proof without digests shouldn't verify or panic"
660 );
661 });
662 }
663
664 #[test_traced]
665 fn test_bitmap_empty_then_one() {
666 let executor = deterministic::Runner::default();
667 executor.start(|_| async move {
668 let mut bitmap: BitMap<Sha256, SHA256_SIZE> = BitMap::new();
669 assert_eq!(bitmap.len(), 0);
670 assert_eq!(bitmap.bitmap.pruned_chunks(), 0);
671 bitmap.prune_to_bit(0).unwrap();
672 assert_eq!(bitmap.bitmap.pruned_chunks(), 0);
673
674 let mut hasher = StandardHasher::new();
676 let root = bitmap.root(&mut hasher).await.unwrap();
677 bitmap.push(true);
678 bitmap.merkleize(&mut hasher).await.unwrap();
679 let new_root = bitmap.root(&mut hasher).await.unwrap();
681 assert_ne!(root, new_root);
682 let root = new_root;
683 bitmap.prune_to_bit(1).unwrap();
684 assert_eq!(bitmap.len(), 1);
685 assert_ne!(bitmap.last_chunk().0, &[0u8; SHA256_SIZE]);
686 assert_eq!(bitmap.last_chunk().1, 1);
687 assert_eq!(bitmap.bitmap.pruned_chunks(), 0);
689 assert_eq!(root, bitmap.root(&mut hasher).await.unwrap());
690
691 for i in 0..(BitMap::<Sha256, SHA256_SIZE>::CHUNK_SIZE_BITS - 1) {
693 bitmap.push(i % 2 != 0);
694 }
695 bitmap.merkleize(&mut hasher).await.unwrap();
696 assert_eq!(bitmap.len(), 256);
697 assert_ne!(root, bitmap.root(&mut hasher).await.unwrap());
698 let root = bitmap.root(&mut hasher).await.unwrap();
699
700 let (proof, chunk) = bitmap.proof(&mut hasher, 0).await.unwrap();
702 assert!(
703 BitMap::verify_bit_inclusion(&mut hasher, &proof, &chunk, 255, &root),
704 "failed to prove bit in only chunk"
705 );
706 assert!(
708 !BitMap::verify_bit_inclusion(&mut hasher, &proof, &chunk, 256, &root),
709 "should not be able to prove bit outside of chunk"
710 );
711
712 bitmap.prune_to_bit(256).unwrap();
714 assert_eq!(bitmap.len(), 256);
715 assert_eq!(bitmap.bitmap.pruned_chunks(), 1);
716 assert_eq!(bitmap.bitmap.pruned_bits(), 256);
717 assert_eq!(root, bitmap.root(&mut hasher).await.unwrap());
718
719 bitmap.prune_to_bit(10).unwrap();
721 assert_eq!(root, bitmap.root(&mut hasher).await.unwrap());
722 });
723 }
724
725 #[test_traced]
726 fn test_bitmap_building() {
727 let executor = deterministic::Runner::default();
730 executor.start(|_| async move {
731 let test_chunk = test_chunk(b"test");
732 let mut hasher: StandardHasher<Sha256> = StandardHasher::new();
733
734 let mut bitmap = BitMap::<_, SHA256_SIZE>::new();
736 bitmap.push_chunk(&test_chunk);
737 for b in test_chunk {
738 for j in 0..8 {
739 let mask = 1 << j;
740 let bit = (b & mask) != 0;
741 bitmap.push(bit);
742 }
743 }
744 assert_eq!(bitmap.len(), 256 * 2);
745
746 bitmap.merkleize(&mut hasher).await.unwrap();
747 let root = bitmap.root(&mut hasher).await.unwrap();
748 let inner_root = bitmap.mmr.root(&mut hasher);
749 assert_eq!(root, inner_root);
750
751 {
752 let mut bitmap = BitMap::<_, SHA256_SIZE>::default();
755 bitmap.push_chunk(&test_chunk);
756 bitmap.push_chunk(&test_chunk);
757 bitmap.merkleize(&mut hasher).await.unwrap();
758 let same_root = bitmap.root(&mut hasher).await.unwrap();
759 assert_eq!(root, same_root);
760 }
761 {
762 let mut bitmap = BitMap::<_, SHA256_SIZE>::default();
764 bitmap.push_chunk(&test_chunk);
765 for b in test_chunk {
766 bitmap.push_byte(b);
767 }
768 bitmap.merkleize(&mut hasher).await.unwrap();
769 let same_root = bitmap.root(&mut hasher).await.unwrap();
770 assert_eq!(root, same_root);
771 }
772 });
773 }
774
775 #[test_traced]
776 #[should_panic(expected = "cannot add chunk")]
777 fn test_bitmap_build_chunked_panic() {
778 let executor = deterministic::Runner::default();
779 executor.start(|_| async move {
780 let mut bitmap = BitMap::<Sha256, SHA256_SIZE>::new();
781 bitmap.push_chunk(&test_chunk(b"test"));
782 bitmap.push(true);
783 bitmap.push_chunk(&test_chunk(b"panic"));
784 });
785 }
786
787 #[test_traced]
788 #[should_panic(expected = "cannot add byte")]
789 fn test_bitmap_build_byte_panic() {
790 let executor = deterministic::Runner::default();
791 executor.start(|_| async move {
792 let mut bitmap = BitMap::<Sha256, SHA256_SIZE>::new();
793 bitmap.push_chunk(&test_chunk(b"test"));
794 bitmap.push(true);
795 bitmap.push_byte(0x01);
796 });
797 }
798
799 #[test_traced]
800 #[should_panic(expected = "out of bounds")]
801 fn test_bitmap_get_out_of_bounds_bit_panic() {
802 let executor = deterministic::Runner::default();
803 executor.start(|_| async move {
804 let mut bitmap = BitMap::<Sha256, SHA256_SIZE>::new();
805 bitmap.push_chunk(&test_chunk(b"test"));
806 bitmap.get_bit(256);
807 });
808 }
809
810 #[test_traced]
811 #[should_panic(expected = "pruned")]
812 fn test_bitmap_get_pruned_bit_panic() {
813 let executor = deterministic::Runner::default();
814 executor.start(|_| async move {
815 let mut bitmap = BitMap::<Sha256, SHA256_SIZE>::new();
816 bitmap.push_chunk(&test_chunk(b"test"));
817 bitmap.push_chunk(&test_chunk(b"test2"));
818 let mut hasher = StandardHasher::new();
819 bitmap.merkleize(&mut hasher).await.unwrap();
820
821 bitmap.prune_to_bit(256).unwrap();
822 bitmap.get_bit(255);
823 });
824 }
825
826 #[test_traced]
827 fn test_bitmap_root_boundaries() {
828 let executor = deterministic::Runner::default();
829 executor.start(|_| async move {
830 let mut bitmap = BitMap::<Sha256, SHA256_SIZE>::default();
832 let mut hasher = StandardHasher::new();
833 bitmap.push_chunk(&test_chunk(b"test"));
834 bitmap.push_chunk(&test_chunk(b"test2"));
835 bitmap.merkleize(&mut hasher).await.unwrap();
836
837 let root = bitmap.root(&mut hasher).await.unwrap();
838
839 bitmap.push(true);
841 bitmap.merkleize(&mut hasher).await.unwrap();
842 let new_root = bitmap.root(&mut hasher).await.unwrap();
843 assert_ne!(root, new_root);
844 assert_eq!(bitmap.mmr.size(), 3); for _ in 0..(SHA256_SIZE * 8 - 1) {
848 bitmap.push(false);
849 bitmap.merkleize(&mut hasher).await.unwrap();
850 let newer_root = bitmap.root(&mut hasher).await.unwrap();
851 assert_ne!(new_root, newer_root);
853 }
854 assert_eq!(bitmap.mmr.size(), 4); bitmap.push(false);
858 assert_eq!(bitmap.len(), 256 * 3 + 1);
859 bitmap.merkleize(&mut hasher).await.unwrap();
860 let newer_root = bitmap.root(&mut hasher).await.unwrap();
861 assert_ne!(new_root, newer_root);
862
863 bitmap.prune_to_bit(bitmap.len()).unwrap();
865 assert_eq!(bitmap.bitmap.pruned_chunks(), 3);
866 assert_eq!(bitmap.len(), 256 * 3 + 1);
867 assert_eq!(newer_root, bitmap.root(&mut hasher).await.unwrap());
868 });
869 }
870
871 #[test_traced]
872 fn test_bitmap_get_set_bits() {
873 let executor = deterministic::Runner::default();
874 executor.start(|_| async move {
875 let mut bitmap = BitMap::<Sha256, SHA256_SIZE>::default();
877 let mut hasher = StandardHasher::new();
878 bitmap.push_chunk(&test_chunk(b"test"));
879 bitmap.push_chunk(&test_chunk(b"test2"));
880 bitmap.push_chunk(&test_chunk(b"test3"));
881 bitmap.push_chunk(&test_chunk(b"test4"));
882 bitmap.push_byte(0xF1);
884 bitmap.push(true);
885 bitmap.push(false);
886 bitmap.push(true);
887
888 bitmap.merkleize(&mut hasher).await.unwrap();
889 let root = bitmap.root(&mut hasher).await.unwrap();
890
891 for bit_pos in (0..bitmap.len()).rev() {
894 let bit = bitmap.get_bit(bit_pos);
895 bitmap.set_bit(bit_pos, !bit);
896 bitmap.merkleize(&mut hasher).await.unwrap();
897 let new_root = bitmap.root(&mut hasher).await.unwrap();
898 assert_ne!(root, new_root, "failed at bit {bit_pos}");
899 bitmap.set_bit(bit_pos, bit);
901 bitmap.merkleize(&mut hasher).await.unwrap();
902 let new_root = bitmap.root(&mut hasher).await.unwrap();
903 assert_eq!(root, new_root);
904 }
905
906 let start_bit = (SHA256_SIZE * 8 * 2) as u64;
908 bitmap.prune_to_bit(start_bit).unwrap();
909 for bit_pos in (start_bit..bitmap.len()).rev() {
910 let bit = bitmap.get_bit(bit_pos);
911 bitmap.set_bit(bit_pos, !bit);
912 bitmap.merkleize(&mut hasher).await.unwrap();
913 let new_root = bitmap.root(&mut hasher).await.unwrap();
914 assert_ne!(root, new_root, "failed at bit {bit_pos}");
915 bitmap.set_bit(bit_pos, bit);
917 bitmap.merkleize(&mut hasher).await.unwrap();
918 let new_root = bitmap.root(&mut hasher).await.unwrap();
919 assert_eq!(root, new_root);
920 }
921 });
922 }
923
924 fn flip_bit<const N: usize>(bit: u64, chunk: &[u8; N]) -> [u8; N] {
925 let byte = PrunableBitMap::<N>::chunk_byte_offset(bit);
926 let mask = PrunableBitMap::<N>::chunk_byte_bitmask(bit);
927 let mut tmp = chunk.to_vec();
928 tmp[byte] ^= mask;
929 tmp.try_into().unwrap()
930 }
931
932 #[test_traced]
933 fn test_bitmap_mmr_proof_verification() {
934 test_bitmap_mmr_proof_verification_n::<32>();
935 test_bitmap_mmr_proof_verification_n::<64>();
936 }
937
938 fn test_bitmap_mmr_proof_verification_n<const N: usize>() {
939 let executor = deterministic::Runner::default();
940 executor.start(|_| async move {
941 let mut hasher = StandardHasher::new();
943 let mut bitmap = BitMap::<Sha256, N>::new();
944 for i in 0u32..10 {
945 bitmap.push_chunk(&test_chunk(format!("test{i}").as_bytes()));
946 }
947 bitmap.push_byte(0xA6);
949 bitmap.push(true);
950 bitmap.push(false);
951 bitmap.push(true);
952 bitmap.push(true);
953 bitmap.push(false);
954
955 bitmap.merkleize(&mut hasher).await.unwrap();
956 let root = bitmap.root(&mut hasher).await.unwrap();
957
958 for prune_to_bit in (0..bitmap.len()).step_by(251) {
961 assert_eq!(bitmap.root(&mut hasher).await.unwrap(), root);
962 bitmap.prune_to_bit(prune_to_bit).unwrap();
963 for i in prune_to_bit..bitmap.len() {
964 let (proof, chunk) = bitmap.proof(&mut hasher, i).await.unwrap();
965
966 assert!(
968 BitMap::<_, N>::verify_bit_inclusion(&mut hasher, &proof, &chunk, i, &root),
969 "failed to prove bit {i}",
970 );
971
972 let corrupted = flip_bit(i, &chunk);
974 assert!(
975 !BitMap::<_, N>::verify_bit_inclusion(
976 &mut hasher,
977 &proof,
978 &corrupted,
979 i,
980 &root
981 ),
982 "proving bit {i} after flipping should have failed",
983 );
984 }
985 }
986 })
987 }
988
989 #[test_traced]
990 fn test_bitmap_persistence() {
991 const PARTITION: &str = "bitmap_test";
992 const FULL_CHUNK_COUNT: usize = 100;
993
994 let executor = deterministic::Runner::default();
995 executor.start(|context| async move {
996 let mut bitmap =
998 BitMap::<Sha256, SHA256_SIZE>::restore_pruned(context.clone(), PARTITION, None)
999 .await
1000 .unwrap();
1001 assert_eq!(bitmap.len(), 0);
1002
1003 let mut hasher = StandardHasher::new();
1005 for i in 0..FULL_CHUNK_COUNT {
1006 bitmap.push_chunk(&test_chunk(format!("test{i}").as_bytes()));
1007 }
1008 bitmap.merkleize(&mut hasher).await.unwrap();
1009 let chunk_aligned_root = bitmap.root(&mut hasher).await.unwrap();
1010
1011 bitmap.push_byte(0xA6);
1013 bitmap.push(true);
1014 bitmap.push(false);
1015 bitmap.push(true);
1016 bitmap.merkleize(&mut hasher).await.unwrap();
1017 let root = bitmap.root(&mut hasher).await.unwrap();
1018
1019 for i in (10..=FULL_CHUNK_COUNT).step_by(10) {
1021 bitmap
1022 .prune_to_bit(
1023 (i * BitMap::<Sha256, SHA256_SIZE>::CHUNK_SIZE_BITS as usize) as u64,
1024 )
1025 .unwrap();
1026 bitmap
1027 .write_pruned(context.clone(), PARTITION)
1028 .await
1029 .unwrap();
1030 bitmap = BitMap::<_, SHA256_SIZE>::restore_pruned(context.clone(), PARTITION, None)
1031 .await
1032 .unwrap();
1033 let _ = bitmap.root(&mut hasher).await.unwrap();
1034
1035 for j in i..FULL_CHUNK_COUNT {
1037 bitmap.push_chunk(&test_chunk(format!("test{j}").as_bytes()));
1038 }
1039 assert_eq!(bitmap.bitmap.pruned_chunks(), i);
1040 assert_eq!(bitmap.len(), FULL_CHUNK_COUNT as u64 * 256);
1041 bitmap.merkleize(&mut hasher).await.unwrap();
1042 assert_eq!(bitmap.root(&mut hasher).await.unwrap(), chunk_aligned_root);
1043
1044 bitmap.push_byte(0xA6);
1046 bitmap.push(true);
1047 bitmap.push(false);
1048 bitmap.push(true);
1049 assert_eq!(bitmap.root(&mut hasher).await.unwrap(), root);
1050 }
1051 });
1052 }
1053
1054 #[test_traced]
1055 fn test_bitmap_prune_to_bit_dirty_state() {
1056 let executor = deterministic::Runner::default();
1057 executor.start(|_| async move {
1058 let mut bitmap = BitMap::<Sha256, SHA256_SIZE>::new();
1059 bitmap.push_chunk(&test_chunk(b"test"));
1060 bitmap.push_chunk(&test_chunk(b"test2"));
1061 let mut hasher = StandardHasher::new();
1062 bitmap.merkleize(&mut hasher).await.unwrap();
1063
1064 bitmap.set_bit(0, !bitmap.get_bit(0));
1066
1067 assert!(bitmap.is_dirty(), "Bitmap should be dirty after set_bit");
1069 let result = bitmap.prune_to_bit(256);
1070 assert!(matches!(result, Err(crate::mmr::Error::DirtyState)));
1071
1072 bitmap.merkleize(&mut hasher).await.unwrap();
1074 assert!(bitmap.prune_to_bit(256).is_ok());
1075 });
1076 }
1077
1078 #[test_traced]
1079 fn test_bitmap_proof_out_of_bounds() {
1080 let executor = deterministic::Runner::default();
1081 executor.start(|_| async move {
1082 let mut bitmap = BitMap::<Sha256, SHA256_SIZE>::new();
1083 bitmap.push_chunk(&test_chunk(b"test"));
1084 let mut hasher = StandardHasher::new();
1085 bitmap.merkleize(&mut hasher).await.unwrap();
1086
1087 let result = bitmap.proof(&mut hasher, 256).await;
1089 assert!(matches!(result, Err(Error::BitOutOfBounds(offset, size))
1090 if offset == 256 && size == 256));
1091
1092 let result = bitmap.proof(&mut hasher, 1000).await;
1093 assert!(matches!(result, Err(Error::BitOutOfBounds(offset, size))
1094 if offset == 1000 && size == 256));
1095
1096 assert!(bitmap.proof(&mut hasher, 0).await.is_ok());
1098 assert!(bitmap.proof(&mut hasher, 255).await.is_ok());
1099 });
1100 }
1101
1102 #[test_traced]
1103 fn test_bitmap_proof_dirty_state() {
1104 let executor = deterministic::Runner::default();
1105 executor.start(|_| async move {
1106 let mut bitmap = BitMap::<Sha256, SHA256_SIZE>::new();
1107 bitmap.push_chunk(&test_chunk(b"test"));
1108 let mut hasher = StandardHasher::new();
1109 bitmap.merkleize(&mut hasher).await.unwrap();
1110
1111 bitmap.set_bit(0, !bitmap.get_bit(0));
1113
1114 assert!(bitmap.is_dirty(), "Bitmap should be dirty after set_bit");
1116 let result = bitmap.proof(&mut hasher, 0).await;
1117 assert!(matches!(result, Err(crate::mmr::Error::DirtyState)));
1118
1119 bitmap.merkleize(&mut hasher).await.unwrap();
1121 assert!(bitmap.proof(&mut hasher, 0).await.is_ok());
1122 });
1123 }
1124}