1use crate::{
14 metadata::{Config as MConfig, Metadata},
15 mmr::{
16 hasher::Hasher,
17 iterator::nodes_to_pin,
18 mem::{Clean, CleanMmr, Config, Dirty, Mmr, State},
19 storage::Storage,
20 verification,
21 Error::{self, *},
22 Location, Position, Proof,
23 },
24};
25use commonware_codec::DecodeExt;
26use commonware_cryptography::Digest;
27use commonware_parallel::ThreadPool;
28use commonware_runtime::{Clock, Metrics, Storage as RStorage};
29use commonware_utils::{bitmap::Prunable as PrunableBitMap, sequence::prefixed_u64::U64};
30use std::collections::HashSet;
31use tracing::{debug, error, warn};
32
33pub type CleanBitMap<D, const N: usize> = BitMap<D, N, Clean<D>>;
35
36pub type DirtyBitMap<D, const N: usize> = BitMap<D, N, Dirty>;
38
39pub struct BitMap<D: Digest, const N: usize, S: State<D> = Clean<D>> {
56 bitmap: PrunableBitMap<N>,
58
59 authenticated_len: usize,
61
62 mmr: Mmr<D, S>,
72
73 dirty_chunks: HashSet<usize>,
78
79 pool: Option<ThreadPool>,
81
82 cached_root: Option<D>,
85}
86
87const NODE_PREFIX: u8 = 0;
89
90const PRUNED_CHUNKS_PREFIX: u8 = 1;
92
93impl<D: Digest, const N: usize, S: State<D>> BitMap<D, N, S> {
94 pub const CHUNK_SIZE_BITS: u64 = PrunableBitMap::<N>::CHUNK_SIZE_BITS;
96
97 #[inline]
99 pub fn size(&self) -> Position {
100 self.mmr.size()
101 }
102
103 #[inline]
105 pub const fn len(&self) -> u64 {
106 self.bitmap.len()
107 }
108
109 #[inline]
111 pub const fn is_empty(&self) -> bool {
112 self.len() == 0
113 }
114
115 #[inline]
117 pub const fn pruned_bits(&self) -> u64 {
118 self.bitmap.pruned_bits()
119 }
120
121 #[inline]
123 fn complete_chunks(&self) -> usize {
124 let chunks_len = self.bitmap.chunks_len();
125 if self.bitmap.is_chunk_aligned() {
126 chunks_len
127 } else {
128 chunks_len.checked_sub(1).unwrap()
130 }
131 }
132
133 #[inline]
136 pub fn last_chunk(&self) -> (&[u8; N], u64) {
137 self.bitmap.last_chunk()
138 }
139
140 #[inline]
146 pub fn get_chunk_containing(&self, bit: u64) -> &[u8; N] {
147 self.bitmap.get_chunk_containing(bit)
148 }
149
150 #[inline]
156 pub fn get_bit(&self, bit: u64) -> bool {
157 self.bitmap.get_bit(bit)
158 }
159
160 #[inline]
163 pub const fn get_bit_from_chunk(chunk: &[u8; N], bit: u64) -> bool {
164 PrunableBitMap::<N>::get_bit_from_chunk(chunk, bit)
165 }
166
167 pub fn partial_chunk_root(
170 hasher: &mut impl commonware_cryptography::Hasher<Digest = D>,
171 mmr_root: &D,
172 next_bit: u64,
173 last_chunk_digest: &D,
174 ) -> D {
175 assert!(next_bit > 0);
176 assert!(next_bit < Self::CHUNK_SIZE_BITS);
177 hasher.update(mmr_root);
178 hasher.update(&next_bit.to_be_bytes());
179 hasher.update(last_chunk_digest);
180 hasher.finalize()
181 }
182
183 pub fn verify_bit_inclusion(
186 hasher: &mut impl Hasher<D>,
187 proof: &Proof<D>,
188 chunk: &[u8; N],
189 bit: u64,
190 root: &D,
191 ) -> bool {
192 let bit_len = *proof.size;
193 if bit >= bit_len {
194 debug!(bit_len, bit, "tried to verify non-existent bit");
195 return false;
196 }
197
198 let leaves = PrunableBitMap::<N>::unpruned_chunk(bit_len);
199 let size = Position::try_from(Location::new_unchecked(leaves as u64))
201 .expect("chunk_loc returned invalid location");
202 let mut mmr_proof = Proof {
203 size,
204 digests: proof.digests.clone(),
205 };
206
207 let loc = PrunableBitMap::<N>::unpruned_chunk(bit);
208 if bit_len.is_multiple_of(Self::CHUNK_SIZE_BITS) {
209 return mmr_proof.verify_element_inclusion(
210 hasher,
211 chunk,
212 Location::new_unchecked(loc as u64),
213 root,
214 );
215 }
216
217 if proof.digests.is_empty() {
218 debug!("proof has no digests");
219 return false;
220 }
221 let last_digest = mmr_proof.digests.pop().unwrap();
222
223 if leaves == loc {
224 if !mmr_proof.digests.is_empty() {
228 debug!(
229 digests = mmr_proof.digests.len() + 1,
230 "proof over partial chunk should have exactly 1 digest"
231 );
232 return false;
233 }
234 let last_chunk_digest = hasher.digest(chunk);
235 let next_bit = bit_len % Self::CHUNK_SIZE_BITS;
236 let reconstructed_root = Self::partial_chunk_root(
237 hasher.inner(),
238 &last_digest,
239 next_bit,
240 &last_chunk_digest,
241 );
242 return reconstructed_root == *root;
243 };
244
245 let mmr_root =
248 match mmr_proof.reconstruct_root(hasher, &[chunk], Location::new_unchecked(loc as u64))
249 {
250 Ok(root) => root,
251 Err(error) => {
252 debug!(error = ?error, "invalid proof input");
253 return false;
254 }
255 };
256
257 let next_bit = bit_len % Self::CHUNK_SIZE_BITS;
258 let reconstructed_root =
259 Self::partial_chunk_root(hasher.inner(), &mmr_root, next_bit, &last_digest);
260
261 reconstructed_root == *root
262 }
263
264 pub async fn destroy<C: RStorage + Metrics + Clock>(
266 context: C,
267 partition: &str,
268 ) -> Result<(), Error> {
269 let metadata_cfg = MConfig {
270 partition: partition.to_string(),
271 codec_config: ((0..).into(), ()),
272 };
273 let metadata =
274 Metadata::<_, U64, Vec<u8>>::init(context.with_label("metadata"), metadata_cfg).await?;
275
276 metadata.destroy().await.map_err(MetadataError)
277 }
278}
279
280impl<D: Digest, const N: usize> CleanBitMap<D, N> {
281 pub fn new(hasher: &mut impl Hasher<D>, pool: Option<ThreadPool>) -> Self {
283 let mmr = CleanMmr::new(hasher);
284 let cached_root = *mmr.root();
286 Self {
287 bitmap: PrunableBitMap::new(),
288 authenticated_len: 0,
289 mmr,
290 dirty_chunks: HashSet::new(),
291 pool,
292 cached_root: Some(cached_root),
293 }
294 }
295
296 pub fn get_node(&self, position: Position) -> Option<D> {
297 self.mmr.get_node(position)
298 }
299
300 pub async fn restore_pruned<C: RStorage + Metrics + Clock>(
309 context: C,
310 partition: &str,
311 pool: Option<ThreadPool>,
312 hasher: &mut impl Hasher<D>,
313 ) -> Result<Self, Error> {
314 let metadata_cfg = MConfig {
315 partition: partition.to_string(),
316 codec_config: ((0..).into(), ()),
317 };
318 let mut metadata =
319 Metadata::<_, U64, Vec<u8>>::init(context.with_label("metadata"), metadata_cfg).await?;
320
321 let key: U64 = U64::new(PRUNED_CHUNKS_PREFIX, 0);
322 let pruned_chunks = match metadata.get(&key) {
323 Some(bytes) => u64::from_be_bytes(bytes.as_slice().try_into().map_err(|_| {
324 error!("pruned chunks value not a valid u64");
325 Error::DataCorrupted("pruned chunks value not a valid u64")
326 })?),
327 None => {
328 warn!("bitmap metadata does not contain pruned chunks, initializing as empty");
329 0
330 }
331 } as usize;
332 if pruned_chunks == 0 {
333 return Ok(Self::new(hasher, pool));
334 }
335 let mmr_size = Position::try_from(Location::new_unchecked(pruned_chunks as u64))?;
336
337 let mut pinned_nodes = Vec::new();
338 for (index, pos) in nodes_to_pin(mmr_size).enumerate() {
339 let Some(bytes) = metadata.get(&U64::new(NODE_PREFIX, index as u64)) else {
340 error!(?mmr_size, ?pos, "missing pinned node");
341 return Err(MissingNode(pos));
342 };
343 let digest = D::decode(bytes.as_ref());
344 let Ok(digest) = digest else {
345 error!(?mmr_size, ?pos, "could not convert node bytes to digest");
346 return Err(MissingNode(pos));
347 };
348 pinned_nodes.push(digest);
349 }
350
351 metadata.sync().await?;
352 drop(metadata);
353
354 let mmr = CleanMmr::init(
355 Config {
356 nodes: Vec::new(),
357 pruned_to_pos: mmr_size,
358 pinned_nodes,
359 },
360 hasher,
361 )?;
362
363 let bitmap = PrunableBitMap::new_with_pruned_chunks(pruned_chunks)
364 .expect("pruned_chunks should never overflow");
365 let cached_root = *mmr.root();
366 Ok(Self {
367 bitmap,
368 authenticated_len: 0,
369 mmr,
370 dirty_chunks: HashSet::new(),
371 pool,
372 cached_root: Some(cached_root),
373 })
374 }
375
376 pub async fn write_pruned<C: RStorage + Metrics + Clock>(
380 &self,
381 context: C,
382 partition: &str,
383 ) -> Result<(), Error> {
384 let metadata_cfg = MConfig {
385 partition: partition.to_string(),
386 codec_config: ((0..).into(), ()),
387 };
388 let mut metadata =
389 Metadata::<_, U64, Vec<u8>>::init(context.with_label("metadata"), metadata_cfg).await?;
390 metadata.clear();
391
392 let key = U64::new(PRUNED_CHUNKS_PREFIX, 0);
394 metadata.put(key, self.bitmap.pruned_chunks().to_be_bytes().to_vec());
395
396 let mmr_size =
399 Position::try_from(Location::new_unchecked(self.bitmap.pruned_chunks() as u64))?;
400 for (i, digest) in nodes_to_pin(mmr_size).enumerate() {
401 let digest = self.mmr.get_node_unchecked(digest);
402 let key = U64::new(NODE_PREFIX, i as u64);
403 metadata.put(key, digest.to_vec());
404 }
405
406 metadata.sync().await.map_err(MetadataError)
407 }
408
409 pub fn prune_to_bit(&mut self, bit: u64) -> Result<(), Error> {
417 let chunk = PrunableBitMap::<N>::unpruned_chunk(bit);
418 if chunk < self.bitmap.pruned_chunks() {
419 return Ok(());
420 }
421
422 self.bitmap.prune_to_bit(bit);
424
425 self.authenticated_len = self.complete_chunks();
427
428 let mmr_pos = Position::try_from(Location::new_unchecked(chunk as u64)).unwrap();
430 self.mmr.prune_to_pos(mmr_pos);
431 Ok(())
432 }
433
434 pub const fn root(&self) -> D {
446 self.cached_root
447 .expect("CleanBitMap should always have a cached root")
448 }
449
450 pub async fn proof(
462 &self,
463 hasher: &mut impl Hasher<D>,
464 bit: u64,
465 ) -> Result<(Proof<D>, [u8; N]), Error> {
466 if bit >= self.len() {
467 return Err(Error::BitOutOfBounds(bit, self.len()));
468 }
469
470 let chunk = *self.get_chunk_containing(bit);
471 let chunk_loc = Location::from(PrunableBitMap::<N>::unpruned_chunk(bit));
472 let (last_chunk, next_bit) = self.bitmap.last_chunk();
473
474 if chunk_loc == self.mmr.leaves() {
475 assert!(next_bit > 0);
476 return Ok((
479 Proof {
480 size: Position::new(self.len()),
481 digests: vec![*self.mmr.root()],
482 },
483 chunk,
484 ));
485 }
486
487 let range = chunk_loc..chunk_loc + 1;
488 let mut proof = verification::range_proof(&self.mmr, range).await?;
489 proof.size = Position::new(self.len());
490 if next_bit == Self::CHUNK_SIZE_BITS {
491 return Ok((proof, chunk));
493 }
494
495 let last_chunk_digest = hasher.digest(last_chunk);
498 proof.digests.push(last_chunk_digest);
499
500 Ok((proof, chunk))
501 }
502
503 pub fn into_dirty(self) -> DirtyBitMap<D, N> {
505 DirtyBitMap {
506 bitmap: self.bitmap,
507 authenticated_len: self.authenticated_len,
508 mmr: self.mmr.into_dirty(),
509 dirty_chunks: self.dirty_chunks,
510 pool: self.pool,
511 cached_root: self.cached_root,
512 }
513 }
514}
515
516impl<D: Digest, const N: usize> DirtyBitMap<D, N> {
517 pub fn push(&mut self, bit: bool) {
523 self.bitmap.push(bit);
524 }
525
526 pub fn set_bit(&mut self, bit: u64, value: bool) {
532 self.bitmap.set_bit(bit, value);
534
535 let chunk = self.bitmap.pruned_chunk(bit);
537 if chunk < self.authenticated_len {
538 self.dirty_chunks.insert(chunk);
539 }
540 }
541
542 pub fn dirty_chunks(&self) -> Vec<Location> {
544 let pruned_chunks = self.bitmap.pruned_chunks();
545 let mut chunks: Vec<Location> = self
546 .dirty_chunks
547 .iter()
548 .map(|&chunk| Location::new_unchecked((chunk + pruned_chunks) as u64))
549 .collect();
550
551 for i in self.authenticated_len..self.complete_chunks() {
553 chunks.push(Location::new_unchecked((i + pruned_chunks) as u64));
554 }
555
556 chunks
557 }
558
559 pub async fn merkleize(
561 mut self,
562 hasher: &mut impl Hasher<D>,
563 ) -> Result<CleanBitMap<D, N>, Error> {
564 let start = self.authenticated_len;
566 let end = self.complete_chunks();
567 for i in start..end {
568 self.mmr.add(hasher, self.bitmap.get_chunk(i));
569 }
570 self.authenticated_len = end;
571
572 let pruned_chunks = self.bitmap.pruned_chunks();
574 let updates = self
575 .dirty_chunks
576 .iter()
577 .map(|chunk| {
578 let loc = Location::new_unchecked((*chunk + pruned_chunks) as u64);
579 (loc, self.bitmap.get_chunk(*chunk))
580 })
581 .collect::<Vec<_>>();
582 self.mmr
583 .update_leaf_batched(hasher, self.pool.clone(), &updates)?;
584 self.dirty_chunks.clear();
585 let mmr = self.mmr.merkleize(hasher, self.pool.clone());
586
587 let mmr_root = *mmr.root();
589 let cached_root = if self.bitmap.is_chunk_aligned() {
590 mmr_root
591 } else {
592 let (last_chunk, next_bit) = self.bitmap.last_chunk();
593 let last_chunk_digest = hasher.digest(last_chunk);
594 CleanBitMap::<D, N>::partial_chunk_root(
595 hasher.inner(),
596 &mmr_root,
597 next_bit,
598 &last_chunk_digest,
599 )
600 };
601
602 Ok(CleanBitMap {
603 bitmap: self.bitmap,
604 authenticated_len: self.authenticated_len,
605 mmr,
606 dirty_chunks: self.dirty_chunks,
607 pool: self.pool,
608 cached_root: Some(cached_root),
609 })
610 }
611}
612
613impl<D: Digest, const N: usize> Storage<D> for CleanBitMap<D, N> {
614 fn size(&self) -> Position {
615 self.size()
616 }
617
618 async fn get_node(&self, position: Position) -> Result<Option<D>, Error> {
619 Ok(self.get_node(position))
620 }
621}
622
623#[cfg(test)]
624mod tests {
625 use super::*;
626 use crate::mmr::StandardHasher;
627 use commonware_codec::FixedSize;
628 use commonware_cryptography::{sha256, Hasher, Sha256};
629 use commonware_macros::test_traced;
630 use commonware_runtime::{deterministic, Runner as _};
631
632 const SHA256_SIZE: usize = sha256::Digest::SIZE;
633
634 impl<D: Digest, const N: usize, S: State<D>> BitMap<D, N, S> {
635 pub(crate) fn leaf_pos(bit: u64) -> Position {
637 let chunk = PrunableBitMap::<N>::unpruned_chunk(bit);
638 let chunk = Location::new_unchecked(chunk as u64);
639 Position::try_from(chunk).expect("chunk should never overflow MAX_LOCATION")
640 }
641 }
642
643 impl<D: Digest, const N: usize> DirtyBitMap<D, N> {
644 fn push_byte(&mut self, byte: u8) {
652 self.bitmap.push_byte(byte);
653 }
654
655 fn push_chunk(&mut self, chunk: &[u8; N]) {
663 self.bitmap.push_chunk(chunk);
664 }
665 }
666
667 fn test_chunk<const N: usize>(s: &[u8]) -> [u8; N] {
668 assert_eq!(N % 32, 0);
669 let mut vec: Vec<u8> = Vec::new();
670 for _ in 0..N / 32 {
671 vec.extend(Sha256::hash(s).iter());
672 }
673
674 vec.try_into().unwrap()
675 }
676
677 #[test_traced]
678 fn test_bitmap_verify_empty_proof() {
679 let executor = deterministic::Runner::default();
680 executor.start(|_| async move {
681 let mut hasher = StandardHasher::<Sha256>::new();
682 let proof = Proof {
683 size: Position::new(100),
684 digests: Vec::new(),
685 };
686 assert!(
687 !CleanBitMap::<_, SHA256_SIZE>::verify_bit_inclusion(
688 &mut hasher,
689 &proof,
690 &[0u8; SHA256_SIZE],
691 0,
692 &Sha256::fill(0x00),
693 ),
694 "proof without digests shouldn't verify or panic"
695 );
696 });
697 }
698
699 #[test_traced]
700 fn test_bitmap_empty_then_one() {
701 let executor = deterministic::Runner::default();
702 executor.start(|_| async move {
703 let mut hasher = StandardHasher::<Sha256>::new();
704 let mut bitmap: CleanBitMap<_, SHA256_SIZE> = CleanBitMap::new(&mut hasher, None);
705 assert_eq!(bitmap.len(), 0);
706 assert_eq!(bitmap.bitmap.pruned_chunks(), 0);
707 bitmap.prune_to_bit(0).unwrap();
708 assert_eq!(bitmap.bitmap.pruned_chunks(), 0);
709
710 let root = bitmap.root();
712 let mut dirty = bitmap.into_dirty();
713 dirty.push(true);
714 bitmap = dirty.merkleize(&mut hasher).await.unwrap();
715 let new_root = bitmap.root();
717 assert_ne!(root, new_root);
718 let root = new_root;
719 bitmap.prune_to_bit(1).unwrap();
720 assert_eq!(bitmap.len(), 1);
721 assert_ne!(bitmap.last_chunk().0, &[0u8; SHA256_SIZE]);
722 assert_eq!(bitmap.last_chunk().1, 1);
723 assert_eq!(bitmap.bitmap.pruned_chunks(), 0);
725 assert_eq!(root, bitmap.root());
726
727 let mut dirty = bitmap.into_dirty();
729 for i in 0..(CleanBitMap::<sha256::Digest, SHA256_SIZE>::CHUNK_SIZE_BITS - 1) {
730 dirty.push(i % 2 != 0);
731 }
732 bitmap = dirty.merkleize(&mut hasher).await.unwrap();
733 assert_eq!(bitmap.len(), 256);
734 assert_ne!(root, bitmap.root());
735 let root = bitmap.root();
736
737 let (proof, chunk) = bitmap.proof(&mut hasher, 0).await.unwrap();
739 assert!(
740 CleanBitMap::<_, SHA256_SIZE>::verify_bit_inclusion(
741 &mut hasher,
742 &proof,
743 &chunk,
744 255,
745 &root
746 ),
747 "failed to prove bit in only chunk"
748 );
749 assert!(
751 !CleanBitMap::<_, SHA256_SIZE>::verify_bit_inclusion(
752 &mut hasher,
753 &proof,
754 &chunk,
755 256,
756 &root
757 ),
758 "should not be able to prove bit outside of chunk"
759 );
760
761 bitmap.prune_to_bit(256).unwrap();
763 assert_eq!(bitmap.len(), 256);
764 assert_eq!(bitmap.bitmap.pruned_chunks(), 1);
765 assert_eq!(bitmap.bitmap.pruned_bits(), 256);
766 assert_eq!(root, bitmap.root());
767
768 bitmap.prune_to_bit(10).unwrap();
770 assert_eq!(root, bitmap.root());
771 });
772 }
773
774 #[test_traced]
775 fn test_bitmap_building() {
776 let executor = deterministic::Runner::default();
779 executor.start(|_| async move {
780 let test_chunk = test_chunk(b"test");
781 let mut hasher: StandardHasher<Sha256> = StandardHasher::new();
782
783 let bitmap = CleanBitMap::<_, SHA256_SIZE>::new(&mut hasher, None);
785 let mut dirty = bitmap.into_dirty();
786 dirty.push_chunk(&test_chunk);
787 for b in test_chunk {
788 for j in 0..8 {
789 let mask = 1 << j;
790 let bit = (b & mask) != 0;
791 dirty.push(bit);
792 }
793 }
794 assert_eq!(dirty.len(), 256 * 2);
795
796 let bitmap = dirty.merkleize(&mut hasher).await.unwrap();
797 let root = bitmap.root();
798 let inner_root = *bitmap.mmr.root();
799 assert_eq!(root, inner_root);
800
801 {
802 let bitmap = CleanBitMap::<_, SHA256_SIZE>::new(&mut hasher, None);
805 let mut dirty = bitmap.into_dirty();
806 dirty.push_chunk(&test_chunk);
807 dirty.push_chunk(&test_chunk);
808 let bitmap = dirty.merkleize(&mut hasher).await.unwrap();
809 let same_root = bitmap.root();
810 assert_eq!(root, same_root);
811 }
812 {
813 let bitmap = CleanBitMap::<_, SHA256_SIZE>::new(&mut hasher, None);
815 let mut dirty = bitmap.into_dirty();
816 dirty.push_chunk(&test_chunk);
817 for b in test_chunk {
818 dirty.push_byte(b);
819 }
820 let bitmap = dirty.merkleize(&mut hasher).await.unwrap();
821 let same_root = bitmap.root();
822 assert_eq!(root, same_root);
823 }
824 });
825 }
826
827 #[test_traced]
828 #[should_panic(expected = "cannot add chunk")]
829 fn test_bitmap_build_chunked_panic() {
830 let executor = deterministic::Runner::default();
831 executor.start(|_| async move {
832 let mut hasher: StandardHasher<Sha256> = StandardHasher::new();
833 let bitmap = CleanBitMap::<_, SHA256_SIZE>::new(&mut hasher, None);
834 let mut dirty = bitmap.into_dirty();
835 dirty.push_chunk(&test_chunk(b"test"));
836 dirty.push(true);
837 dirty.push_chunk(&test_chunk(b"panic"));
838 });
839 }
840
841 #[test_traced]
842 #[should_panic(expected = "cannot add byte")]
843 fn test_bitmap_build_byte_panic() {
844 let executor = deterministic::Runner::default();
845 executor.start(|_| async move {
846 let mut hasher: StandardHasher<Sha256> = StandardHasher::new();
847 let bitmap = CleanBitMap::<_, SHA256_SIZE>::new(&mut hasher, None);
848 let mut dirty = bitmap.into_dirty();
849 dirty.push_chunk(&test_chunk(b"test"));
850 dirty.push(true);
851 dirty.push_byte(0x01);
852 });
853 }
854
855 #[test_traced]
856 #[should_panic(expected = "out of bounds")]
857 fn test_bitmap_get_out_of_bounds_bit_panic() {
858 let executor = deterministic::Runner::default();
859 executor.start(|_| async move {
860 let mut hasher: StandardHasher<Sha256> = StandardHasher::new();
861 let bitmap = CleanBitMap::<_, SHA256_SIZE>::new(&mut hasher, None);
862 let mut dirty = bitmap.into_dirty();
863 dirty.push_chunk(&test_chunk(b"test"));
864 dirty.get_bit(256);
865 });
866 }
867
868 #[test_traced]
869 #[should_panic(expected = "pruned")]
870 fn test_bitmap_get_pruned_bit_panic() {
871 let executor = deterministic::Runner::default();
872 executor.start(|_| async move {
873 let mut hasher: StandardHasher<Sha256> = StandardHasher::new();
874 let bitmap = CleanBitMap::<_, SHA256_SIZE>::new(&mut hasher, None);
875 let mut dirty = bitmap.into_dirty();
876 dirty.push_chunk(&test_chunk(b"test"));
877 dirty.push_chunk(&test_chunk(b"test2"));
878 let mut bitmap = dirty.merkleize(&mut hasher).await.unwrap();
879
880 bitmap.prune_to_bit(256).unwrap();
881 bitmap.get_bit(255);
882 });
883 }
884
885 #[test_traced]
886 fn test_bitmap_root_boundaries() {
887 let executor = deterministic::Runner::default();
888 executor.start(|_| async move {
889 let mut hasher = StandardHasher::<Sha256>::new();
891 let bitmap = CleanBitMap::<_, SHA256_SIZE>::new(&mut hasher, None);
892 let mut dirty = bitmap.into_dirty();
893 dirty.push_chunk(&test_chunk(b"test"));
894 dirty.push_chunk(&test_chunk(b"test2"));
895 let mut bitmap = dirty.merkleize(&mut hasher).await.unwrap();
896
897 let root = bitmap.root();
898
899 let mut dirty = bitmap.into_dirty();
901 dirty.push(true);
902 bitmap = dirty.merkleize(&mut hasher).await.unwrap();
903 let new_root = bitmap.root();
904 assert_ne!(root, new_root);
905 assert_eq!(bitmap.mmr.size(), 3); for _ in 0..(SHA256_SIZE * 8 - 1) {
909 let mut dirty = bitmap.into_dirty();
910 dirty.push(false);
911 bitmap = dirty.merkleize(&mut hasher).await.unwrap();
912 let newer_root = bitmap.root();
913 assert_ne!(new_root, newer_root);
915 }
916 assert_eq!(bitmap.mmr.size(), 4); let mut dirty = bitmap.into_dirty();
920 dirty.push(false);
921 assert_eq!(dirty.len(), 256 * 3 + 1);
922 bitmap = dirty.merkleize(&mut hasher).await.unwrap();
923 let newer_root = bitmap.root();
924 assert_ne!(new_root, newer_root);
925
926 bitmap.prune_to_bit(bitmap.len()).unwrap();
928 assert_eq!(bitmap.bitmap.pruned_chunks(), 3);
929 assert_eq!(bitmap.len(), 256 * 3 + 1);
930 assert_eq!(newer_root, bitmap.root());
931 });
932 }
933
934 #[test_traced]
935 fn test_bitmap_get_set_bits() {
936 let executor = deterministic::Runner::default();
937 executor.start(|_| async move {
938 let mut hasher = StandardHasher::<Sha256>::new();
940 let bitmap = CleanBitMap::<_, SHA256_SIZE>::new(&mut hasher, None);
941 let mut dirty = bitmap.into_dirty();
942 dirty.push_chunk(&test_chunk(b"test"));
943 dirty.push_chunk(&test_chunk(b"test2"));
944 dirty.push_chunk(&test_chunk(b"test3"));
945 dirty.push_chunk(&test_chunk(b"test4"));
946 dirty.push_byte(0xF1);
948 dirty.push(true);
949 dirty.push(false);
950 dirty.push(true);
951
952 let mut bitmap = dirty.merkleize(&mut hasher).await.unwrap();
953 let root = bitmap.root();
954
955 for bit_pos in (0..bitmap.len()).rev() {
958 let bit = bitmap.get_bit(bit_pos);
959 let mut dirty = bitmap.into_dirty();
960 dirty.set_bit(bit_pos, !bit);
961 bitmap = dirty.merkleize(&mut hasher).await.unwrap();
962 let new_root = bitmap.root();
963 assert_ne!(root, new_root, "failed at bit {bit_pos}");
964 let mut dirty = bitmap.into_dirty();
966 dirty.set_bit(bit_pos, bit);
967 bitmap = dirty.merkleize(&mut hasher).await.unwrap();
968 let new_root = bitmap.root();
969 assert_eq!(root, new_root);
970 }
971
972 let start_bit = (SHA256_SIZE * 8 * 2) as u64;
974 bitmap.prune_to_bit(start_bit).unwrap();
975 for bit_pos in (start_bit..bitmap.len()).rev() {
976 let bit = bitmap.get_bit(bit_pos);
977 let mut dirty = bitmap.into_dirty();
978 dirty.set_bit(bit_pos, !bit);
979 bitmap = dirty.merkleize(&mut hasher).await.unwrap();
980 let new_root = bitmap.root();
981 assert_ne!(root, new_root, "failed at bit {bit_pos}");
982 let mut dirty = bitmap.into_dirty();
984 dirty.set_bit(bit_pos, bit);
985 bitmap = dirty.merkleize(&mut hasher).await.unwrap();
986 let new_root = bitmap.root();
987 assert_eq!(root, new_root);
988 }
989 });
990 }
991
992 fn flip_bit<const N: usize>(bit: u64, chunk: &[u8; N]) -> [u8; N] {
993 let byte = PrunableBitMap::<N>::chunk_byte_offset(bit);
994 let mask = PrunableBitMap::<N>::chunk_byte_bitmask(bit);
995 let mut tmp = chunk.to_vec();
996 tmp[byte] ^= mask;
997 tmp.try_into().unwrap()
998 }
999
1000 #[test_traced]
1001 fn test_bitmap_mmr_proof_verification() {
1002 test_bitmap_mmr_proof_verification_n::<32>();
1003 test_bitmap_mmr_proof_verification_n::<64>();
1004 }
1005
1006 fn test_bitmap_mmr_proof_verification_n<const N: usize>() {
1007 let executor = deterministic::Runner::default();
1008 executor.start(|_| async move {
1009 let mut hasher = StandardHasher::<Sha256>::new();
1011 let bitmap = CleanBitMap::<_, N>::new(&mut hasher, None);
1012 let mut dirty = bitmap.into_dirty();
1013 for i in 0u32..10 {
1014 dirty.push_chunk(&test_chunk(format!("test{i}").as_bytes()));
1015 }
1016 dirty.push_byte(0xA6);
1018 dirty.push(true);
1019 dirty.push(false);
1020 dirty.push(true);
1021 dirty.push(true);
1022 dirty.push(false);
1023
1024 let mut bitmap = dirty.merkleize(&mut hasher).await.unwrap();
1025 let root = bitmap.root();
1026
1027 for prune_to_bit in (0..bitmap.len()).step_by(251) {
1030 assert_eq!(bitmap.root(), root);
1031 bitmap.prune_to_bit(prune_to_bit).unwrap();
1032 for i in prune_to_bit..bitmap.len() {
1033 let (proof, chunk) = bitmap.proof(&mut hasher, i).await.unwrap();
1034
1035 assert!(
1037 CleanBitMap::<_, N>::verify_bit_inclusion(
1038 &mut hasher,
1039 &proof,
1040 &chunk,
1041 i,
1042 &root
1043 ),
1044 "failed to prove bit {i}",
1045 );
1046
1047 let corrupted = flip_bit(i, &chunk);
1049 assert!(
1050 !CleanBitMap::<_, N>::verify_bit_inclusion(
1051 &mut hasher,
1052 &proof,
1053 &corrupted,
1054 i,
1055 &root
1056 ),
1057 "proving bit {i} after flipping should have failed",
1058 );
1059 }
1060 }
1061 })
1062 }
1063
1064 #[test_traced]
1065 fn test_bitmap_persistence() {
1066 const PARTITION: &str = "bitmap_test";
1067 const FULL_CHUNK_COUNT: usize = 100;
1068
1069 let executor = deterministic::Runner::default();
1070 executor.start(|context| async move {
1071 let mut hasher = StandardHasher::<Sha256>::new();
1072 let bitmap = CleanBitMap::<_, SHA256_SIZE>::restore_pruned(
1074 context.clone(),
1075 PARTITION,
1076 None,
1077 &mut hasher,
1078 )
1079 .await
1080 .unwrap();
1081 assert_eq!(bitmap.len(), 0);
1082
1083 let mut dirty = bitmap.into_dirty();
1085 for i in 0..FULL_CHUNK_COUNT {
1086 dirty.push_chunk(&test_chunk(format!("test{i}").as_bytes()));
1087 }
1088 let mut bitmap = dirty.merkleize(&mut hasher).await.unwrap();
1089 let chunk_aligned_root = bitmap.root();
1090
1091 let mut dirty = bitmap.into_dirty();
1093 dirty.push_byte(0xA6);
1094 dirty.push(true);
1095 dirty.push(false);
1096 dirty.push(true);
1097 bitmap = dirty.merkleize(&mut hasher).await.unwrap();
1098 let root = bitmap.root();
1099
1100 for i in (10..=FULL_CHUNK_COUNT).step_by(10) {
1102 bitmap
1103 .prune_to_bit(
1104 (i * CleanBitMap::<sha256::Digest, SHA256_SIZE>::CHUNK_SIZE_BITS as usize)
1105 as u64,
1106 )
1107 .unwrap();
1108 bitmap
1109 .write_pruned(context.clone(), PARTITION)
1110 .await
1111 .unwrap();
1112 bitmap = CleanBitMap::<_, SHA256_SIZE>::restore_pruned(
1113 context.clone(),
1114 PARTITION,
1115 None,
1116 &mut hasher,
1117 )
1118 .await
1119 .unwrap();
1120 let _ = bitmap.root();
1121
1122 let mut dirty = bitmap.into_dirty();
1124 for j in i..FULL_CHUNK_COUNT {
1125 dirty.push_chunk(&test_chunk(format!("test{j}").as_bytes()));
1126 }
1127 assert_eq!(dirty.bitmap.pruned_chunks(), i);
1128 assert_eq!(dirty.len(), FULL_CHUNK_COUNT as u64 * 256);
1129 bitmap = dirty.merkleize(&mut hasher).await.unwrap();
1130 assert_eq!(bitmap.root(), chunk_aligned_root);
1131
1132 let mut dirty = bitmap.into_dirty();
1134 dirty.push_byte(0xA6);
1135 dirty.push(true);
1136 dirty.push(false);
1137 dirty.push(true);
1138 bitmap = dirty.merkleize(&mut hasher).await.unwrap();
1139 assert_eq!(bitmap.root(), root);
1140 }
1141 });
1142 }
1143
1144 #[test_traced]
1145 fn test_bitmap_proof_out_of_bounds() {
1146 let executor = deterministic::Runner::default();
1147 executor.start(|_| async move {
1148 let mut hasher = StandardHasher::<Sha256>::new();
1149 let bitmap = CleanBitMap::<_, SHA256_SIZE>::new(&mut hasher, None);
1150 let mut dirty = bitmap.into_dirty();
1151 dirty.push_chunk(&test_chunk(b"test"));
1152 let bitmap = dirty.merkleize(&mut hasher).await.unwrap();
1153
1154 let result = bitmap.proof(&mut hasher, 256).await;
1156 assert!(matches!(result, Err(Error::BitOutOfBounds(offset, size))
1157 if offset == 256 && size == 256));
1158
1159 let result = bitmap.proof(&mut hasher, 1000).await;
1160 assert!(matches!(result, Err(Error::BitOutOfBounds(offset, size))
1161 if offset == 1000 && size == 256));
1162
1163 assert!(bitmap.proof(&mut hasher, 0).await.is_ok());
1165 assert!(bitmap.proof(&mut hasher, 255).await.is_ok());
1166 });
1167 }
1168}