1use crate::{
14 metadata::{Config as MConfig, Metadata},
15 mmr::{
16 hasher::Hasher,
17 iterator::nodes_to_pin,
18 mem::{Clean, CleanMmr, Config, Dirty, Mmr, State},
19 storage::Storage,
20 verification,
21 Error::{self, *},
22 Location, Position, Proof,
23 },
24};
25use commonware_codec::DecodeExt;
26use commonware_cryptography::Digest;
27use commonware_runtime::{Clock, Metrics, Storage as RStorage, ThreadPool};
28use commonware_utils::{bitmap::Prunable as PrunableBitMap, sequence::prefixed_u64::U64};
29use std::collections::HashSet;
30use tracing::{debug, error, warn};
31
32pub type CleanBitMap<D, const N: usize> = BitMap<D, N, Clean<D>>;
34
35pub type DirtyBitMap<D, const N: usize> = BitMap<D, N, Dirty>;
37
38pub struct BitMap<D: Digest, const N: usize, S: State<D> = Clean<D>> {
55 bitmap: PrunableBitMap<N>,
57
58 authenticated_len: usize,
60
61 mmr: Mmr<D, S>,
71
72 dirty_chunks: HashSet<usize>,
77
78 pool: Option<ThreadPool>,
80
81 cached_root: Option<D>,
84}
85
86const NODE_PREFIX: u8 = 0;
88
89const PRUNED_CHUNKS_PREFIX: u8 = 1;
91
92impl<D: Digest, const N: usize, S: State<D>> BitMap<D, N, S> {
93 pub const CHUNK_SIZE_BITS: u64 = PrunableBitMap::<N>::CHUNK_SIZE_BITS;
95
96 #[inline]
98 pub fn size(&self) -> Position {
99 self.mmr.size()
100 }
101
102 #[inline]
104 pub const fn len(&self) -> u64 {
105 self.bitmap.len()
106 }
107
108 #[inline]
110 pub const fn is_empty(&self) -> bool {
111 self.len() == 0
112 }
113
114 #[inline]
116 pub const fn pruned_bits(&self) -> u64 {
117 self.bitmap.pruned_bits()
118 }
119
120 #[inline]
122 fn complete_chunks(&self) -> usize {
123 let chunks_len = self.bitmap.chunks_len();
124 if self.bitmap.is_chunk_aligned() {
125 chunks_len
126 } else {
127 chunks_len.checked_sub(1).unwrap()
129 }
130 }
131
132 #[inline]
135 pub fn last_chunk(&self) -> (&[u8; N], u64) {
136 self.bitmap.last_chunk()
137 }
138
139 #[inline]
145 pub fn get_chunk_containing(&self, bit: u64) -> &[u8; N] {
146 self.bitmap.get_chunk_containing(bit)
147 }
148
149 #[inline]
155 pub fn get_bit(&self, bit: u64) -> bool {
156 self.bitmap.get_bit(bit)
157 }
158
159 #[inline]
162 pub const fn get_bit_from_chunk(chunk: &[u8; N], bit: u64) -> bool {
163 PrunableBitMap::<N>::get_bit_from_chunk(chunk, bit)
164 }
165
166 pub fn partial_chunk_root(
169 hasher: &mut impl commonware_cryptography::Hasher<Digest = D>,
170 mmr_root: &D,
171 next_bit: u64,
172 last_chunk_digest: &D,
173 ) -> D {
174 assert!(next_bit > 0);
175 assert!(next_bit < Self::CHUNK_SIZE_BITS);
176 hasher.update(mmr_root);
177 hasher.update(&next_bit.to_be_bytes());
178 hasher.update(last_chunk_digest);
179 hasher.finalize()
180 }
181
182 pub fn verify_bit_inclusion(
185 hasher: &mut impl Hasher<D>,
186 proof: &Proof<D>,
187 chunk: &[u8; N],
188 bit: u64,
189 root: &D,
190 ) -> bool {
191 let bit_len = *proof.size;
192 if bit >= bit_len {
193 debug!(bit_len, bit, "tried to verify non-existent bit");
194 return false;
195 }
196
197 let leaves = PrunableBitMap::<N>::unpruned_chunk(bit_len);
198 let size = Position::try_from(Location::new_unchecked(leaves as u64))
200 .expect("chunk_loc returned invalid location");
201 let mut mmr_proof = Proof {
202 size,
203 digests: proof.digests.clone(),
204 };
205
206 let loc = PrunableBitMap::<N>::unpruned_chunk(bit);
207 if bit_len.is_multiple_of(Self::CHUNK_SIZE_BITS) {
208 return mmr_proof.verify_element_inclusion(
209 hasher,
210 chunk,
211 Location::new_unchecked(loc as u64),
212 root,
213 );
214 }
215
216 if proof.digests.is_empty() {
217 debug!("proof has no digests");
218 return false;
219 }
220 let last_digest = mmr_proof.digests.pop().unwrap();
221
222 if leaves == loc {
223 if !mmr_proof.digests.is_empty() {
227 debug!(
228 digests = mmr_proof.digests.len() + 1,
229 "proof over partial chunk should have exactly 1 digest"
230 );
231 return false;
232 }
233 let last_chunk_digest = hasher.digest(chunk);
234 let next_bit = bit_len % Self::CHUNK_SIZE_BITS;
235 let reconstructed_root = Self::partial_chunk_root(
236 hasher.inner(),
237 &last_digest,
238 next_bit,
239 &last_chunk_digest,
240 );
241 return reconstructed_root == *root;
242 };
243
244 let mmr_root =
247 match mmr_proof.reconstruct_root(hasher, &[chunk], Location::new_unchecked(loc as u64))
248 {
249 Ok(root) => root,
250 Err(error) => {
251 debug!(error = ?error, "invalid proof input");
252 return false;
253 }
254 };
255
256 let next_bit = bit_len % Self::CHUNK_SIZE_BITS;
257 let reconstructed_root =
258 Self::partial_chunk_root(hasher.inner(), &mmr_root, next_bit, &last_digest);
259
260 reconstructed_root == *root
261 }
262
263 pub async fn destroy<C: RStorage + Metrics + Clock>(
265 context: C,
266 partition: &str,
267 ) -> Result<(), Error> {
268 let metadata_cfg = MConfig {
269 partition: partition.to_string(),
270 codec_config: ((0..).into(), ()),
271 };
272 let metadata =
273 Metadata::<_, U64, Vec<u8>>::init(context.with_label("metadata"), metadata_cfg).await?;
274
275 metadata.destroy().await.map_err(MetadataError)
276 }
277}
278
279impl<D: Digest, const N: usize> CleanBitMap<D, N> {
280 pub fn new(hasher: &mut impl Hasher<D>, pool: Option<ThreadPool>) -> Self {
282 let mmr = CleanMmr::new(hasher);
283 let cached_root = *mmr.root();
285 Self {
286 bitmap: PrunableBitMap::new(),
287 authenticated_len: 0,
288 mmr,
289 dirty_chunks: HashSet::new(),
290 pool,
291 cached_root: Some(cached_root),
292 }
293 }
294
295 pub fn get_node(&self, position: Position) -> Option<D> {
296 self.mmr.get_node(position)
297 }
298
299 pub async fn restore_pruned<C: RStorage + Metrics + Clock>(
308 context: C,
309 partition: &str,
310 pool: Option<ThreadPool>,
311 hasher: &mut impl Hasher<D>,
312 ) -> Result<Self, Error> {
313 let metadata_cfg = MConfig {
314 partition: partition.to_string(),
315 codec_config: ((0..).into(), ()),
316 };
317 let metadata =
318 Metadata::<_, U64, Vec<u8>>::init(context.with_label("metadata"), metadata_cfg).await?;
319
320 let key: U64 = U64::new(PRUNED_CHUNKS_PREFIX, 0);
321 let pruned_chunks = match metadata.get(&key) {
322 Some(bytes) => u64::from_be_bytes(bytes.as_slice().try_into().map_err(|_| {
323 error!("pruned chunks value not a valid u64");
324 Error::DataCorrupted("pruned chunks value not a valid u64")
325 })?),
326 None => {
327 warn!("bitmap metadata does not contain pruned chunks, initializing as empty");
328 0
329 }
330 } as usize;
331 if pruned_chunks == 0 {
332 return Ok(Self::new(hasher, pool));
333 }
334 let mmr_size = Position::try_from(Location::new_unchecked(pruned_chunks as u64))?;
335
336 let mut pinned_nodes = Vec::new();
337 for (index, pos) in nodes_to_pin(mmr_size).enumerate() {
338 let Some(bytes) = metadata.get(&U64::new(NODE_PREFIX, index as u64)) else {
339 error!(?mmr_size, ?pos, "missing pinned node");
340 return Err(MissingNode(pos));
341 };
342 let digest = D::decode(bytes.as_ref());
343 let Ok(digest) = digest else {
344 error!(?mmr_size, ?pos, "could not convert node bytes to digest");
345 return Err(MissingNode(pos));
346 };
347 pinned_nodes.push(digest);
348 }
349
350 metadata.close().await?;
351
352 let mmr = CleanMmr::init(
353 Config {
354 nodes: Vec::new(),
355 pruned_to_pos: mmr_size,
356 pinned_nodes,
357 },
358 hasher,
359 )?;
360
361 let bitmap = PrunableBitMap::new_with_pruned_chunks(pruned_chunks)
362 .expect("pruned_chunks should never overflow");
363 let cached_root = *mmr.root();
364 Ok(Self {
365 bitmap,
366 authenticated_len: 0,
367 mmr,
368 dirty_chunks: HashSet::new(),
369 pool,
370 cached_root: Some(cached_root),
371 })
372 }
373
374 pub async fn write_pruned<C: RStorage + Metrics + Clock>(
378 &self,
379 context: C,
380 partition: &str,
381 ) -> Result<(), Error> {
382 let metadata_cfg = MConfig {
383 partition: partition.to_string(),
384 codec_config: ((0..).into(), ()),
385 };
386 let mut metadata =
387 Metadata::<_, U64, Vec<u8>>::init(context.with_label("metadata"), metadata_cfg).await?;
388 metadata.clear();
389
390 let key = U64::new(PRUNED_CHUNKS_PREFIX, 0);
392 metadata.put(key, self.bitmap.pruned_chunks().to_be_bytes().to_vec());
393
394 let mmr_size =
397 Position::try_from(Location::new_unchecked(self.bitmap.pruned_chunks() as u64))?;
398 for (i, digest) in nodes_to_pin(mmr_size).enumerate() {
399 let digest = self.mmr.get_node_unchecked(digest);
400 let key = U64::new(NODE_PREFIX, i as u64);
401 metadata.put(key, digest.to_vec());
402 }
403
404 metadata.close().await.map_err(MetadataError)
405 }
406
407 pub fn prune_to_bit(&mut self, bit: u64) -> Result<(), Error> {
415 let chunk = PrunableBitMap::<N>::unpruned_chunk(bit);
416 if chunk < self.bitmap.pruned_chunks() {
417 return Ok(());
418 }
419
420 self.bitmap.prune_to_bit(bit);
422
423 self.authenticated_len = self.complete_chunks();
425
426 let mmr_pos = Position::try_from(Location::new_unchecked(chunk as u64)).unwrap();
428 self.mmr.prune_to_pos(mmr_pos);
429 Ok(())
430 }
431
432 pub const fn root(&self) -> D {
444 self.cached_root
445 .expect("CleanBitMap should always have a cached root")
446 }
447
448 pub async fn proof(
460 &self,
461 hasher: &mut impl Hasher<D>,
462 bit: u64,
463 ) -> Result<(Proof<D>, [u8; N]), Error> {
464 if bit >= self.len() {
465 return Err(Error::BitOutOfBounds(bit, self.len()));
466 }
467
468 let chunk = *self.get_chunk_containing(bit);
469 let chunk_loc = Location::from(PrunableBitMap::<N>::unpruned_chunk(bit));
470 let (last_chunk, next_bit) = self.bitmap.last_chunk();
471
472 if chunk_loc == self.mmr.leaves() {
473 assert!(next_bit > 0);
474 return Ok((
477 Proof {
478 size: Position::new(self.len()),
479 digests: vec![*self.mmr.root()],
480 },
481 chunk,
482 ));
483 }
484
485 let range = chunk_loc..chunk_loc + 1;
486 let mut proof = verification::range_proof(&self.mmr, range).await?;
487 proof.size = Position::new(self.len());
488 if next_bit == Self::CHUNK_SIZE_BITS {
489 return Ok((proof, chunk));
491 }
492
493 let last_chunk_digest = hasher.digest(last_chunk);
496 proof.digests.push(last_chunk_digest);
497
498 Ok((proof, chunk))
499 }
500
501 pub fn into_dirty(self) -> DirtyBitMap<D, N> {
503 DirtyBitMap {
504 bitmap: self.bitmap,
505 authenticated_len: self.authenticated_len,
506 mmr: self.mmr.into_dirty(),
507 dirty_chunks: self.dirty_chunks,
508 pool: self.pool,
509 cached_root: self.cached_root,
510 }
511 }
512}
513
514impl<D: Digest, const N: usize> DirtyBitMap<D, N> {
515 pub fn push(&mut self, bit: bool) {
521 self.bitmap.push(bit);
522 }
523
524 pub fn set_bit(&mut self, bit: u64, value: bool) {
530 self.bitmap.set_bit(bit, value);
532
533 let chunk = self.bitmap.pruned_chunk(bit);
535 if chunk < self.authenticated_len {
536 self.dirty_chunks.insert(chunk);
537 }
538 }
539
540 pub fn dirty_chunks(&self) -> Vec<Location> {
542 let pruned_chunks = self.bitmap.pruned_chunks();
543 let mut chunks: Vec<Location> = self
544 .dirty_chunks
545 .iter()
546 .map(|&chunk| Location::new_unchecked((chunk + pruned_chunks) as u64))
547 .collect();
548
549 for i in self.authenticated_len..self.complete_chunks() {
551 chunks.push(Location::new_unchecked((i + pruned_chunks) as u64));
552 }
553
554 chunks
555 }
556
557 pub async fn merkleize(
559 mut self,
560 hasher: &mut impl Hasher<D>,
561 ) -> Result<CleanBitMap<D, N>, Error> {
562 let start = self.authenticated_len;
564 let end = self.complete_chunks();
565 for i in start..end {
566 self.mmr.add(hasher, self.bitmap.get_chunk(i));
567 }
568 self.authenticated_len = end;
569
570 let pruned_chunks = self.bitmap.pruned_chunks();
572 let updates = self
573 .dirty_chunks
574 .iter()
575 .map(|chunk| {
576 let loc = Location::new_unchecked((*chunk + pruned_chunks) as u64);
577 (loc, self.bitmap.get_chunk(*chunk))
578 })
579 .collect::<Vec<_>>();
580 self.mmr
581 .update_leaf_batched(hasher, self.pool.clone(), &updates)?;
582 self.dirty_chunks.clear();
583 let mmr = self.mmr.merkleize(hasher, self.pool.clone());
584
585 let mmr_root = *mmr.root();
587 let cached_root = if self.bitmap.is_chunk_aligned() {
588 mmr_root
589 } else {
590 let (last_chunk, next_bit) = self.bitmap.last_chunk();
591 let last_chunk_digest = hasher.digest(last_chunk);
592 CleanBitMap::<D, N>::partial_chunk_root(
593 hasher.inner(),
594 &mmr_root,
595 next_bit,
596 &last_chunk_digest,
597 )
598 };
599
600 Ok(CleanBitMap {
601 bitmap: self.bitmap,
602 authenticated_len: self.authenticated_len,
603 mmr,
604 dirty_chunks: self.dirty_chunks,
605 pool: self.pool,
606 cached_root: Some(cached_root),
607 })
608 }
609}
610
611impl<D: Digest, const N: usize> Storage<D> for CleanBitMap<D, N> {
612 fn size(&self) -> Position {
613 self.size()
614 }
615
616 async fn get_node(&self, position: Position) -> Result<Option<D>, Error> {
617 Ok(self.get_node(position))
618 }
619}
620
621#[cfg(test)]
622mod tests {
623 use super::*;
624 use crate::mmr::StandardHasher;
625 use commonware_codec::FixedSize;
626 use commonware_cryptography::{sha256, Hasher, Sha256};
627 use commonware_macros::test_traced;
628 use commonware_runtime::{deterministic, Runner as _};
629
630 const SHA256_SIZE: usize = sha256::Digest::SIZE;
631
632 impl<D: Digest, const N: usize, S: State<D>> BitMap<D, N, S> {
633 pub(crate) fn leaf_pos(bit: u64) -> Position {
635 let chunk = PrunableBitMap::<N>::unpruned_chunk(bit);
636 let chunk = Location::new_unchecked(chunk as u64);
637 Position::try_from(chunk).expect("chunk should never overflow MAX_LOCATION")
638 }
639 }
640
641 impl<D: Digest, const N: usize> DirtyBitMap<D, N> {
642 fn push_byte(&mut self, byte: u8) {
650 self.bitmap.push_byte(byte);
651 }
652
653 fn push_chunk(&mut self, chunk: &[u8; N]) {
661 self.bitmap.push_chunk(chunk);
662 }
663 }
664
665 fn test_chunk<const N: usize>(s: &[u8]) -> [u8; N] {
666 assert_eq!(N % 32, 0);
667 let mut vec: Vec<u8> = Vec::new();
668 for _ in 0..N / 32 {
669 vec.extend(Sha256::hash(s).iter());
670 }
671
672 vec.try_into().unwrap()
673 }
674
675 #[test_traced]
676 fn test_bitmap_verify_empty_proof() {
677 let executor = deterministic::Runner::default();
678 executor.start(|_| async move {
679 let mut hasher = StandardHasher::<Sha256>::new();
680 let proof = Proof {
681 size: Position::new(100),
682 digests: Vec::new(),
683 };
684 assert!(
685 !CleanBitMap::<_, SHA256_SIZE>::verify_bit_inclusion(
686 &mut hasher,
687 &proof,
688 &[0u8; SHA256_SIZE],
689 0,
690 &Sha256::fill(0x00),
691 ),
692 "proof without digests shouldn't verify or panic"
693 );
694 });
695 }
696
697 #[test_traced]
698 fn test_bitmap_empty_then_one() {
699 let executor = deterministic::Runner::default();
700 executor.start(|_| async move {
701 let mut hasher = StandardHasher::<Sha256>::new();
702 let mut bitmap: CleanBitMap<_, SHA256_SIZE> = CleanBitMap::new(&mut hasher, None);
703 assert_eq!(bitmap.len(), 0);
704 assert_eq!(bitmap.bitmap.pruned_chunks(), 0);
705 bitmap.prune_to_bit(0).unwrap();
706 assert_eq!(bitmap.bitmap.pruned_chunks(), 0);
707
708 let root = bitmap.root();
710 let mut dirty = bitmap.into_dirty();
711 dirty.push(true);
712 bitmap = dirty.merkleize(&mut hasher).await.unwrap();
713 let new_root = bitmap.root();
715 assert_ne!(root, new_root);
716 let root = new_root;
717 bitmap.prune_to_bit(1).unwrap();
718 assert_eq!(bitmap.len(), 1);
719 assert_ne!(bitmap.last_chunk().0, &[0u8; SHA256_SIZE]);
720 assert_eq!(bitmap.last_chunk().1, 1);
721 assert_eq!(bitmap.bitmap.pruned_chunks(), 0);
723 assert_eq!(root, bitmap.root());
724
725 let mut dirty = bitmap.into_dirty();
727 for i in 0..(CleanBitMap::<sha256::Digest, SHA256_SIZE>::CHUNK_SIZE_BITS - 1) {
728 dirty.push(i % 2 != 0);
729 }
730 bitmap = dirty.merkleize(&mut hasher).await.unwrap();
731 assert_eq!(bitmap.len(), 256);
732 assert_ne!(root, bitmap.root());
733 let root = bitmap.root();
734
735 let (proof, chunk) = bitmap.proof(&mut hasher, 0).await.unwrap();
737 assert!(
738 CleanBitMap::<_, SHA256_SIZE>::verify_bit_inclusion(
739 &mut hasher,
740 &proof,
741 &chunk,
742 255,
743 &root
744 ),
745 "failed to prove bit in only chunk"
746 );
747 assert!(
749 !CleanBitMap::<_, SHA256_SIZE>::verify_bit_inclusion(
750 &mut hasher,
751 &proof,
752 &chunk,
753 256,
754 &root
755 ),
756 "should not be able to prove bit outside of chunk"
757 );
758
759 bitmap.prune_to_bit(256).unwrap();
761 assert_eq!(bitmap.len(), 256);
762 assert_eq!(bitmap.bitmap.pruned_chunks(), 1);
763 assert_eq!(bitmap.bitmap.pruned_bits(), 256);
764 assert_eq!(root, bitmap.root());
765
766 bitmap.prune_to_bit(10).unwrap();
768 assert_eq!(root, bitmap.root());
769 });
770 }
771
772 #[test_traced]
773 fn test_bitmap_building() {
774 let executor = deterministic::Runner::default();
777 executor.start(|_| async move {
778 let test_chunk = test_chunk(b"test");
779 let mut hasher: StandardHasher<Sha256> = StandardHasher::new();
780
781 let bitmap = CleanBitMap::<_, SHA256_SIZE>::new(&mut hasher, None);
783 let mut dirty = bitmap.into_dirty();
784 dirty.push_chunk(&test_chunk);
785 for b in test_chunk {
786 for j in 0..8 {
787 let mask = 1 << j;
788 let bit = (b & mask) != 0;
789 dirty.push(bit);
790 }
791 }
792 assert_eq!(dirty.len(), 256 * 2);
793
794 let bitmap = dirty.merkleize(&mut hasher).await.unwrap();
795 let root = bitmap.root();
796 let inner_root = *bitmap.mmr.root();
797 assert_eq!(root, inner_root);
798
799 {
800 let bitmap = CleanBitMap::<_, SHA256_SIZE>::new(&mut hasher, None);
803 let mut dirty = bitmap.into_dirty();
804 dirty.push_chunk(&test_chunk);
805 dirty.push_chunk(&test_chunk);
806 let bitmap = dirty.merkleize(&mut hasher).await.unwrap();
807 let same_root = bitmap.root();
808 assert_eq!(root, same_root);
809 }
810 {
811 let bitmap = CleanBitMap::<_, SHA256_SIZE>::new(&mut hasher, None);
813 let mut dirty = bitmap.into_dirty();
814 dirty.push_chunk(&test_chunk);
815 for b in test_chunk {
816 dirty.push_byte(b);
817 }
818 let bitmap = dirty.merkleize(&mut hasher).await.unwrap();
819 let same_root = bitmap.root();
820 assert_eq!(root, same_root);
821 }
822 });
823 }
824
825 #[test_traced]
826 #[should_panic(expected = "cannot add chunk")]
827 fn test_bitmap_build_chunked_panic() {
828 let executor = deterministic::Runner::default();
829 executor.start(|_| async move {
830 let mut hasher: StandardHasher<Sha256> = StandardHasher::new();
831 let bitmap = CleanBitMap::<_, SHA256_SIZE>::new(&mut hasher, None);
832 let mut dirty = bitmap.into_dirty();
833 dirty.push_chunk(&test_chunk(b"test"));
834 dirty.push(true);
835 dirty.push_chunk(&test_chunk(b"panic"));
836 });
837 }
838
839 #[test_traced]
840 #[should_panic(expected = "cannot add byte")]
841 fn test_bitmap_build_byte_panic() {
842 let executor = deterministic::Runner::default();
843 executor.start(|_| async move {
844 let mut hasher: StandardHasher<Sha256> = StandardHasher::new();
845 let bitmap = CleanBitMap::<_, SHA256_SIZE>::new(&mut hasher, None);
846 let mut dirty = bitmap.into_dirty();
847 dirty.push_chunk(&test_chunk(b"test"));
848 dirty.push(true);
849 dirty.push_byte(0x01);
850 });
851 }
852
853 #[test_traced]
854 #[should_panic(expected = "out of bounds")]
855 fn test_bitmap_get_out_of_bounds_bit_panic() {
856 let executor = deterministic::Runner::default();
857 executor.start(|_| async move {
858 let mut hasher: StandardHasher<Sha256> = StandardHasher::new();
859 let bitmap = CleanBitMap::<_, SHA256_SIZE>::new(&mut hasher, None);
860 let mut dirty = bitmap.into_dirty();
861 dirty.push_chunk(&test_chunk(b"test"));
862 dirty.get_bit(256);
863 });
864 }
865
866 #[test_traced]
867 #[should_panic(expected = "pruned")]
868 fn test_bitmap_get_pruned_bit_panic() {
869 let executor = deterministic::Runner::default();
870 executor.start(|_| async move {
871 let mut hasher: StandardHasher<Sha256> = StandardHasher::new();
872 let bitmap = CleanBitMap::<_, SHA256_SIZE>::new(&mut hasher, None);
873 let mut dirty = bitmap.into_dirty();
874 dirty.push_chunk(&test_chunk(b"test"));
875 dirty.push_chunk(&test_chunk(b"test2"));
876 let mut bitmap = dirty.merkleize(&mut hasher).await.unwrap();
877
878 bitmap.prune_to_bit(256).unwrap();
879 bitmap.get_bit(255);
880 });
881 }
882
883 #[test_traced]
884 fn test_bitmap_root_boundaries() {
885 let executor = deterministic::Runner::default();
886 executor.start(|_| async move {
887 let mut hasher = StandardHasher::<Sha256>::new();
889 let bitmap = CleanBitMap::<_, SHA256_SIZE>::new(&mut hasher, None);
890 let mut dirty = bitmap.into_dirty();
891 dirty.push_chunk(&test_chunk(b"test"));
892 dirty.push_chunk(&test_chunk(b"test2"));
893 let mut bitmap = dirty.merkleize(&mut hasher).await.unwrap();
894
895 let root = bitmap.root();
896
897 let mut dirty = bitmap.into_dirty();
899 dirty.push(true);
900 bitmap = dirty.merkleize(&mut hasher).await.unwrap();
901 let new_root = bitmap.root();
902 assert_ne!(root, new_root);
903 assert_eq!(bitmap.mmr.size(), 3); for _ in 0..(SHA256_SIZE * 8 - 1) {
907 let mut dirty = bitmap.into_dirty();
908 dirty.push(false);
909 bitmap = dirty.merkleize(&mut hasher).await.unwrap();
910 let newer_root = bitmap.root();
911 assert_ne!(new_root, newer_root);
913 }
914 assert_eq!(bitmap.mmr.size(), 4); let mut dirty = bitmap.into_dirty();
918 dirty.push(false);
919 assert_eq!(dirty.len(), 256 * 3 + 1);
920 bitmap = dirty.merkleize(&mut hasher).await.unwrap();
921 let newer_root = bitmap.root();
922 assert_ne!(new_root, newer_root);
923
924 bitmap.prune_to_bit(bitmap.len()).unwrap();
926 assert_eq!(bitmap.bitmap.pruned_chunks(), 3);
927 assert_eq!(bitmap.len(), 256 * 3 + 1);
928 assert_eq!(newer_root, bitmap.root());
929 });
930 }
931
932 #[test_traced]
933 fn test_bitmap_get_set_bits() {
934 let executor = deterministic::Runner::default();
935 executor.start(|_| async move {
936 let mut hasher = StandardHasher::<Sha256>::new();
938 let bitmap = CleanBitMap::<_, SHA256_SIZE>::new(&mut hasher, None);
939 let mut dirty = bitmap.into_dirty();
940 dirty.push_chunk(&test_chunk(b"test"));
941 dirty.push_chunk(&test_chunk(b"test2"));
942 dirty.push_chunk(&test_chunk(b"test3"));
943 dirty.push_chunk(&test_chunk(b"test4"));
944 dirty.push_byte(0xF1);
946 dirty.push(true);
947 dirty.push(false);
948 dirty.push(true);
949
950 let mut bitmap = dirty.merkleize(&mut hasher).await.unwrap();
951 let root = bitmap.root();
952
953 for bit_pos in (0..bitmap.len()).rev() {
956 let bit = bitmap.get_bit(bit_pos);
957 let mut dirty = bitmap.into_dirty();
958 dirty.set_bit(bit_pos, !bit);
959 bitmap = dirty.merkleize(&mut hasher).await.unwrap();
960 let new_root = bitmap.root();
961 assert_ne!(root, new_root, "failed at bit {bit_pos}");
962 let mut dirty = bitmap.into_dirty();
964 dirty.set_bit(bit_pos, bit);
965 bitmap = dirty.merkleize(&mut hasher).await.unwrap();
966 let new_root = bitmap.root();
967 assert_eq!(root, new_root);
968 }
969
970 let start_bit = (SHA256_SIZE * 8 * 2) as u64;
972 bitmap.prune_to_bit(start_bit).unwrap();
973 for bit_pos in (start_bit..bitmap.len()).rev() {
974 let bit = bitmap.get_bit(bit_pos);
975 let mut dirty = bitmap.into_dirty();
976 dirty.set_bit(bit_pos, !bit);
977 bitmap = dirty.merkleize(&mut hasher).await.unwrap();
978 let new_root = bitmap.root();
979 assert_ne!(root, new_root, "failed at bit {bit_pos}");
980 let mut dirty = bitmap.into_dirty();
982 dirty.set_bit(bit_pos, bit);
983 bitmap = dirty.merkleize(&mut hasher).await.unwrap();
984 let new_root = bitmap.root();
985 assert_eq!(root, new_root);
986 }
987 });
988 }
989
990 fn flip_bit<const N: usize>(bit: u64, chunk: &[u8; N]) -> [u8; N] {
991 let byte = PrunableBitMap::<N>::chunk_byte_offset(bit);
992 let mask = PrunableBitMap::<N>::chunk_byte_bitmask(bit);
993 let mut tmp = chunk.to_vec();
994 tmp[byte] ^= mask;
995 tmp.try_into().unwrap()
996 }
997
998 #[test_traced]
999 fn test_bitmap_mmr_proof_verification() {
1000 test_bitmap_mmr_proof_verification_n::<32>();
1001 test_bitmap_mmr_proof_verification_n::<64>();
1002 }
1003
1004 fn test_bitmap_mmr_proof_verification_n<const N: usize>() {
1005 let executor = deterministic::Runner::default();
1006 executor.start(|_| async move {
1007 let mut hasher = StandardHasher::<Sha256>::new();
1009 let bitmap = CleanBitMap::<_, N>::new(&mut hasher, None);
1010 let mut dirty = bitmap.into_dirty();
1011 for i in 0u32..10 {
1012 dirty.push_chunk(&test_chunk(format!("test{i}").as_bytes()));
1013 }
1014 dirty.push_byte(0xA6);
1016 dirty.push(true);
1017 dirty.push(false);
1018 dirty.push(true);
1019 dirty.push(true);
1020 dirty.push(false);
1021
1022 let mut bitmap = dirty.merkleize(&mut hasher).await.unwrap();
1023 let root = bitmap.root();
1024
1025 for prune_to_bit in (0..bitmap.len()).step_by(251) {
1028 assert_eq!(bitmap.root(), root);
1029 bitmap.prune_to_bit(prune_to_bit).unwrap();
1030 for i in prune_to_bit..bitmap.len() {
1031 let (proof, chunk) = bitmap.proof(&mut hasher, i).await.unwrap();
1032
1033 assert!(
1035 CleanBitMap::<_, N>::verify_bit_inclusion(
1036 &mut hasher,
1037 &proof,
1038 &chunk,
1039 i,
1040 &root
1041 ),
1042 "failed to prove bit {i}",
1043 );
1044
1045 let corrupted = flip_bit(i, &chunk);
1047 assert!(
1048 !CleanBitMap::<_, N>::verify_bit_inclusion(
1049 &mut hasher,
1050 &proof,
1051 &corrupted,
1052 i,
1053 &root
1054 ),
1055 "proving bit {i} after flipping should have failed",
1056 );
1057 }
1058 }
1059 })
1060 }
1061
1062 #[test_traced]
1063 fn test_bitmap_persistence() {
1064 const PARTITION: &str = "bitmap_test";
1065 const FULL_CHUNK_COUNT: usize = 100;
1066
1067 let executor = deterministic::Runner::default();
1068 executor.start(|context| async move {
1069 let mut hasher = StandardHasher::<Sha256>::new();
1070 let bitmap = CleanBitMap::<_, SHA256_SIZE>::restore_pruned(
1072 context.clone(),
1073 PARTITION,
1074 None,
1075 &mut hasher,
1076 )
1077 .await
1078 .unwrap();
1079 assert_eq!(bitmap.len(), 0);
1080
1081 let mut dirty = bitmap.into_dirty();
1083 for i in 0..FULL_CHUNK_COUNT {
1084 dirty.push_chunk(&test_chunk(format!("test{i}").as_bytes()));
1085 }
1086 let mut bitmap = dirty.merkleize(&mut hasher).await.unwrap();
1087 let chunk_aligned_root = bitmap.root();
1088
1089 let mut dirty = bitmap.into_dirty();
1091 dirty.push_byte(0xA6);
1092 dirty.push(true);
1093 dirty.push(false);
1094 dirty.push(true);
1095 bitmap = dirty.merkleize(&mut hasher).await.unwrap();
1096 let root = bitmap.root();
1097
1098 for i in (10..=FULL_CHUNK_COUNT).step_by(10) {
1100 bitmap
1101 .prune_to_bit(
1102 (i * CleanBitMap::<sha256::Digest, SHA256_SIZE>::CHUNK_SIZE_BITS as usize)
1103 as u64,
1104 )
1105 .unwrap();
1106 bitmap
1107 .write_pruned(context.clone(), PARTITION)
1108 .await
1109 .unwrap();
1110 bitmap = CleanBitMap::<_, SHA256_SIZE>::restore_pruned(
1111 context.clone(),
1112 PARTITION,
1113 None,
1114 &mut hasher,
1115 )
1116 .await
1117 .unwrap();
1118 let _ = bitmap.root();
1119
1120 let mut dirty = bitmap.into_dirty();
1122 for j in i..FULL_CHUNK_COUNT {
1123 dirty.push_chunk(&test_chunk(format!("test{j}").as_bytes()));
1124 }
1125 assert_eq!(dirty.bitmap.pruned_chunks(), i);
1126 assert_eq!(dirty.len(), FULL_CHUNK_COUNT as u64 * 256);
1127 bitmap = dirty.merkleize(&mut hasher).await.unwrap();
1128 assert_eq!(bitmap.root(), chunk_aligned_root);
1129
1130 let mut dirty = bitmap.into_dirty();
1132 dirty.push_byte(0xA6);
1133 dirty.push(true);
1134 dirty.push(false);
1135 dirty.push(true);
1136 bitmap = dirty.merkleize(&mut hasher).await.unwrap();
1137 assert_eq!(bitmap.root(), root);
1138 }
1139 });
1140 }
1141
1142 #[test_traced]
1143 fn test_bitmap_proof_out_of_bounds() {
1144 let executor = deterministic::Runner::default();
1145 executor.start(|_| async move {
1146 let mut hasher = StandardHasher::<Sha256>::new();
1147 let bitmap = CleanBitMap::<_, SHA256_SIZE>::new(&mut hasher, None);
1148 let mut dirty = bitmap.into_dirty();
1149 dirty.push_chunk(&test_chunk(b"test"));
1150 let bitmap = dirty.merkleize(&mut hasher).await.unwrap();
1151
1152 let result = bitmap.proof(&mut hasher, 256).await;
1154 assert!(matches!(result, Err(Error::BitOutOfBounds(offset, size))
1155 if offset == 256 && size == 256));
1156
1157 let result = bitmap.proof(&mut hasher, 1000).await;
1158 assert!(matches!(result, Err(Error::BitOutOfBounds(offset, size))
1159 if offset == 1000 && size == 256));
1160
1161 assert!(bitmap.proof(&mut hasher, 0).await.is_ok());
1163 assert!(bitmap.proof(&mut hasher, 255).await.is_ok());
1164 });
1165 }
1166}