1use crate::{
14 metadata::{Config as MConfig, Metadata},
15 mmr::{
16 iterator::leaf_num_to_pos,
17 mem::{Config as MemConfig, Mmr},
18 verification::Proof,
19 Error,
20 Error::*,
21 Hasher,
22 },
23};
24use commonware_codec::DecodeExt;
25use commonware_cryptography::Hasher as CHasher;
26use commonware_runtime::{Clock, Metrics, Storage as RStorage, ThreadPool};
27use commonware_utils::sequence::prefixed_u64::U64;
28use std::collections::{HashSet, VecDeque};
29use tracing::{debug, error, warn};
30
31pub struct Bitmap<H: CHasher, const N: usize> {
41 bitmap: VecDeque<[u8; N]>,
49
50 authenticated_len: usize,
52
53 next_bit: u64,
57
58 mmr: Mmr<H>,
68
69 pruned_chunks: usize,
71
72 dirty_chunks: HashSet<usize>,
77}
78
79impl<H: CHasher, const N: usize> Default for Bitmap<H, N> {
80 fn default() -> Self {
81 Self::new()
82 }
83}
84
85const NODE_PREFIX: u8 = 0;
87
88const PRUNED_CHUNKS_PREFIX: u8 = 1;
90
91impl<H: CHasher, const N: usize> Bitmap<H, N> {
92 pub const CHUNK_SIZE: usize = N;
94
95 pub const CHUNK_SIZE_BITS: u64 = N as u64 * 8;
97
98 pub fn new() -> Self {
100 let bitmap = VecDeque::from([[0u8; N]]);
101
102 Bitmap {
103 bitmap,
104 authenticated_len: 0,
105 next_bit: 0,
106 mmr: Mmr::new(),
107 pruned_chunks: 0,
108 dirty_chunks: HashSet::new(),
109 }
110 }
111
112 pub fn size(&self) -> u64 {
113 self.mmr.size()
114 }
115
116 pub fn get_node(&self, position: u64) -> Option<H::Digest> {
117 self.mmr.get_node(position)
118 }
119
120 pub async fn restore_pruned<C: RStorage + Metrics + Clock>(
126 context: C,
127 partition: &str,
128 pool: Option<ThreadPool>,
129 ) -> Result<Self, Error> {
130 let metadata_cfg = MConfig {
131 partition: partition.to_string(),
132 codec_config: ((0..).into(), ()),
133 };
134 let metadata =
135 Metadata::<_, U64, Vec<u8>>::init(context.with_label("metadata"), metadata_cfg).await?;
136
137 let key: U64 = U64::new(PRUNED_CHUNKS_PREFIX, 0);
138 let pruned_chunks = match metadata.get(&key) {
139 Some(bytes) => u64::from_be_bytes(
140 bytes
141 .as_slice()
142 .try_into()
143 .expect("pruned_chunks bytes could not be converted to u64"),
144 ),
145 None => {
146 warn!("bitmap metadata does not contain pruned chunks, initializing as empty");
147 0
148 }
149 } as usize;
150 if pruned_chunks == 0 {
151 return Ok(Self::new());
152 }
153 let mmr_size = leaf_num_to_pos(pruned_chunks as u64);
154
155 let mut pinned_nodes = Vec::new();
156 for (index, pos) in Proof::<H::Digest>::nodes_to_pin(mmr_size).enumerate() {
157 let Some(bytes) = metadata.get(&U64::new(NODE_PREFIX, index as u64)) else {
158 error!(size = mmr_size, pos, "missing pinned node");
159 return Err(MissingNode(pos));
160 };
161 let digest = H::Digest::decode(bytes.as_ref());
162 let Ok(digest) = digest else {
163 error!(
164 size = mmr_size,
165 pos, "could not convert node bytes to digest"
166 );
167 return Err(MissingNode(pos));
168 };
169 pinned_nodes.push(digest);
170 }
171
172 metadata.close().await?;
173
174 let mmr = Mmr::init(MemConfig {
175 nodes: Vec::new(),
176 pruned_to_pos: mmr_size,
177 pinned_nodes,
178 pool,
179 });
180
181 Ok(Self {
182 bitmap: VecDeque::from([[0u8; N]]),
183 authenticated_len: 0,
184 next_bit: 0,
185 mmr,
186 pruned_chunks,
187 dirty_chunks: HashSet::new(),
188 })
189 }
190
191 pub async fn write_pruned<C: RStorage + Metrics + Clock>(
195 &self,
196 context: C,
197 partition: &str,
198 ) -> Result<(), Error> {
199 let metadata_cfg = MConfig {
200 partition: partition.to_string(),
201 codec_config: ((0..).into(), ()),
202 };
203 let mut metadata =
204 Metadata::<_, U64, Vec<u8>>::init(context.with_label("metadata"), metadata_cfg).await?;
205 metadata.clear();
206
207 let key = U64::new(PRUNED_CHUNKS_PREFIX, 0);
209 metadata.put(key, self.pruned_chunks.to_be_bytes().to_vec());
210
211 let mmr_size = leaf_num_to_pos(self.pruned_chunks as u64);
213 for (i, digest) in Proof::<H::Digest>::nodes_to_pin(mmr_size).enumerate() {
214 let digest = self.mmr.get_node_unchecked(digest);
215 let key = U64::new(NODE_PREFIX, i as u64);
216 metadata.put(key, digest.to_vec());
217 }
218
219 metadata.close().await.map_err(MetadataError)
220 }
221
222 #[inline]
224 pub fn bit_count(&self) -> u64 {
225 (self.pruned_chunks + self.bitmap.len()) as u64 * Self::CHUNK_SIZE_BITS
226 - Self::CHUNK_SIZE_BITS
227 + self.next_bit
228 }
229
230 pub fn pruned_bits(&self) -> u64 {
232 self.pruned_chunks as u64 * Self::CHUNK_SIZE_BITS
233 }
234
235 pub fn prune_to_bit(&mut self, bit_offset: u64) {
243 let chunk_num = Self::chunk_num(bit_offset);
244 if chunk_num < self.pruned_chunks {
245 return;
246 }
247 assert!(!self.is_dirty(), "cannot prune with unprocessed updates");
248
249 let chunk_index = chunk_num - self.pruned_chunks;
250 self.bitmap.drain(0..chunk_index);
251 self.pruned_chunks = chunk_num;
252 self.authenticated_len = self.bitmap.len() - 1;
253
254 let mmr_pos = leaf_num_to_pos(chunk_num as u64);
255 self.mmr.prune_to_pos(mmr_pos);
256 }
257
258 #[inline]
261 pub fn last_chunk(&self) -> (&[u8; N], u64) {
262 (self.bitmap.back().unwrap(), self.next_bit)
263 }
264
265 #[inline]
267 fn last_chunk_mut(&mut self) -> &mut [u8] {
268 self.bitmap.back_mut().unwrap()
269 }
270
271 #[inline]
277 pub fn get_chunk(&self, bit_offset: u64) -> &[u8; N] {
278 &self.bitmap[self.chunk_index(bit_offset)]
279 }
280
281 fn prepare_next_chunk(&mut self) {
284 self.next_bit = 0;
285 self.bitmap.push_back([0u8; N]);
286 }
287
288 pub fn append_chunk_unchecked(&mut self, chunk: &[u8; N]) {
296 assert!(
297 self.next_bit == 0,
298 "cannot add chunk when not chunk aligned"
299 );
300
301 self.last_chunk_mut().copy_from_slice(chunk.as_ref());
302 self.prepare_next_chunk();
303 }
304
305 pub fn append_byte_unchecked(&mut self, byte: u8) {
313 assert!(
314 self.next_bit.is_multiple_of(8),
315 "cannot add byte when not byte aligned"
316 );
317
318 let chunk_byte = (self.next_bit / 8) as usize;
319 self.last_chunk_mut()[chunk_byte] = byte;
320 self.next_bit += 8;
321 assert!(self.next_bit <= Self::CHUNK_SIZE_BITS);
322
323 if self.next_bit == Self::CHUNK_SIZE_BITS {
324 self.prepare_next_chunk();
325 }
326 }
327
328 pub fn append(&mut self, bit: bool) {
334 if bit {
335 let chunk_byte = (self.next_bit / 8) as usize;
336 self.last_chunk_mut()[chunk_byte] |= Self::chunk_byte_bitmask(self.next_bit);
337 }
338 self.next_bit += 1;
339 assert!(self.next_bit <= Self::CHUNK_SIZE_BITS);
340
341 if self.next_bit == Self::CHUNK_SIZE_BITS {
342 self.prepare_next_chunk();
343 }
344 }
345
346 #[inline]
348 pub(crate) fn chunk_byte_bitmask(bit_offset: u64) -> u8 {
349 1 << (bit_offset % 8)
350 }
351
352 #[inline]
354 pub(crate) fn chunk_byte_offset(bit_offset: u64) -> usize {
355 (bit_offset / 8) as usize % Self::CHUNK_SIZE
356 }
357
358 #[inline]
360 pub(crate) fn leaf_pos(bit_offset: u64) -> u64 {
361 leaf_num_to_pos(Self::chunk_num(bit_offset) as u64)
362 }
363
364 #[inline]
365 fn chunk_index(&self, bit_offset: u64) -> usize {
371 assert!(bit_offset < self.bit_count(), "out of bounds: {bit_offset}");
372 let chunk_num = Self::chunk_num(bit_offset);
373 assert!(chunk_num >= self.pruned_chunks, "bit pruned: {bit_offset}");
374
375 chunk_num - self.pruned_chunks
376 }
377
378 #[inline]
380 fn chunk_num(bit_offset: u64) -> usize {
381 (bit_offset / Self::CHUNK_SIZE_BITS) as usize
382 }
383
384 #[inline]
390 pub fn get_bit(&self, bit_offset: u64) -> bool {
391 Self::get_bit_from_chunk(self.get_chunk(bit_offset), bit_offset)
392 }
393
394 #[inline]
395 pub fn get_bit_from_chunk(chunk: &[u8; N], bit_offset: u64) -> bool {
397 let byte_offset = Self::chunk_byte_offset(bit_offset);
398 let byte = chunk[byte_offset];
399 let mask = Self::chunk_byte_bitmask(bit_offset);
400
401 (byte & mask) != 0
402 }
403
404 pub fn set_bit(&mut self, bit_offset: u64, bit: bool) {
410 let chunk_index = self.chunk_index(bit_offset);
411 let chunk = &mut self.bitmap[chunk_index];
412
413 let byte_offset = Self::chunk_byte_offset(bit_offset);
414 let mask = Self::chunk_byte_bitmask(bit_offset);
415
416 if bit {
417 chunk[byte_offset] |= mask;
418 } else {
419 chunk[byte_offset] &= !mask;
420 }
421
422 if chunk_index < self.authenticated_len {
424 self.dirty_chunks.insert(chunk_index);
425 }
426 }
427
428 pub fn is_dirty(&self) -> bool {
430 !self.dirty_chunks.is_empty() || self.authenticated_len < self.bitmap.len() - 1
431 }
432
433 pub fn dirty_chunks(&self) -> Vec<u64> {
435 let mut chunks: Vec<u64> = self
436 .dirty_chunks
437 .iter()
438 .map(|&chunk_index| (chunk_index + self.pruned_chunks) as u64)
439 .collect();
440 for i in self.authenticated_len..self.bitmap.len() - 1 {
441 chunks.push((i + self.pruned_chunks) as u64);
442 }
443
444 chunks
445 }
446
447 pub async fn sync(&mut self, hasher: &mut impl Hasher<H>) -> Result<(), Error> {
449 let start = self.authenticated_len;
451 assert!(!self.bitmap.is_empty());
452 let end = self.bitmap.len() - 1;
453 for i in start..end {
454 self.mmr.add_batched(hasher, &self.bitmap[i]);
455 }
456 self.authenticated_len = end;
457
458 let updates = self
460 .dirty_chunks
461 .iter()
462 .map(|chunk_index| {
463 let pos = leaf_num_to_pos((*chunk_index + self.pruned_chunks) as u64);
464 (pos, &self.bitmap[*chunk_index])
465 })
466 .collect::<Vec<_>>();
467 self.mmr.update_leaf_batched(hasher, &updates);
468 self.dirty_chunks.clear();
469 self.mmr.sync(hasher);
470
471 Ok(())
472 }
473
474 pub async fn root(&self, hasher: &mut impl Hasher<H>) -> Result<H::Digest, Error> {
488 assert!(
489 !self.is_dirty(),
490 "cannot compute root with unprocessed updates",
491 );
492 let mmr_root = self.mmr.root(hasher);
493 if self.next_bit == 0 {
494 return Ok(mmr_root);
495 }
496
497 let last_chunk_digest = hasher.digest(self.last_chunk().0);
499 Ok(Self::partial_chunk_root(
500 hasher.inner(),
501 &mmr_root,
502 self.next_bit,
503 &last_chunk_digest,
504 ))
505 }
506
507 pub fn partial_chunk_root(
510 hasher: &mut H,
511 mmr_root: &H::Digest,
512 next_bit: u64,
513 last_chunk_digest: &H::Digest,
514 ) -> H::Digest {
515 assert!(next_bit > 0);
516 assert!(next_bit < Self::CHUNK_SIZE_BITS);
517 hasher.update(mmr_root);
518 hasher.update(&next_bit.to_be_bytes());
519 hasher.update(last_chunk_digest);
520 hasher.finalize()
521 }
522
523 pub async fn proof(
530 &self,
531 hasher: &mut impl Hasher<H>,
532 bit_offset: u64,
533 ) -> Result<(Proof<H::Digest>, [u8; N]), Error> {
534 assert!(bit_offset < self.bit_count(), "out of bounds");
535 assert!(
536 !self.is_dirty(),
537 "cannot compute proof with unprocessed updates"
538 );
539
540 let leaf_pos = Self::leaf_pos(bit_offset);
541 let chunk = self.get_chunk(bit_offset);
542
543 if leaf_pos == self.mmr.size() {
544 assert!(self.next_bit > 0);
545 return Ok((
548 Proof {
549 size: self.bit_count(),
550 digests: vec![self.mmr.root(hasher)],
551 },
552 *chunk,
553 ));
554 }
555
556 let mut proof = Proof::<H::Digest>::range_proof(&self.mmr, leaf_pos, leaf_pos).await?;
557 proof.size = self.bit_count();
558 if self.next_bit == 0 {
559 return Ok((proof, *chunk));
561 }
562
563 let last_chunk_digest = hasher.digest(self.last_chunk().0);
566 proof.digests.push(last_chunk_digest);
567
568 Ok((proof, *chunk))
569 }
570
571 pub fn verify_bit_inclusion(
574 hasher: &mut impl Hasher<H>,
575 proof: &Proof<H::Digest>,
576 chunk: &[u8; N],
577 bit_offset: u64,
578 root: &H::Digest,
579 ) -> bool {
580 let bit_count = proof.size;
581 if bit_offset >= bit_count {
582 debug!(bit_count, bit_offset, "tried to verify non-existent bit");
583 return false;
584 }
585 let leaf_pos = Self::leaf_pos(bit_offset);
586
587 let mut mmr_proof = Proof::<H::Digest> {
588 size: leaf_num_to_pos(bit_count / Self::CHUNK_SIZE_BITS),
589 digests: proof.digests.clone(),
590 };
591
592 if bit_count % Self::CHUNK_SIZE_BITS == 0 {
593 return mmr_proof.verify_element_inclusion(hasher, chunk, leaf_pos, root);
594 }
595
596 if proof.digests.is_empty() {
597 debug!("proof has no digests");
598 return false;
599 }
600 let last_digest = mmr_proof.digests.pop().unwrap();
601
602 if mmr_proof.size == leaf_pos {
603 if !mmr_proof.digests.is_empty() {
607 debug!(
608 digests = mmr_proof.digests.len() + 1,
609 "proof over partial chunk should have exactly 1 digest"
610 );
611 return false;
612 }
613 let last_chunk_digest = hasher.digest(chunk);
614 let next_bit = bit_count % Self::CHUNK_SIZE_BITS;
615 let reconstructed_root = Self::partial_chunk_root(
616 hasher.inner(),
617 &last_digest,
618 next_bit,
619 &last_chunk_digest,
620 );
621 return reconstructed_root == *root;
622 };
623
624 let mmr_root = match mmr_proof.reconstruct_root(hasher, &[chunk], leaf_pos) {
627 Ok(root) => root,
628 Err(error) => {
629 debug!(error = ?error, "invalid proof input");
630 return false;
631 }
632 };
633
634 let next_bit = bit_count % Self::CHUNK_SIZE_BITS;
635 let reconstructed_root =
636 Self::partial_chunk_root(hasher.inner(), &mmr_root, next_bit, &last_digest);
637
638 reconstructed_root == *root
639 }
640
641 pub async fn destroy<C: RStorage + Metrics + Clock>(
643 context: C,
644 partition: &str,
645 ) -> Result<(), Error> {
646 let metadata_cfg = MConfig {
647 partition: partition.to_string(),
648 codec_config: ((0..).into(), ()),
649 };
650 let metadata =
651 Metadata::<_, U64, Vec<u8>>::init(context.with_label("metadata"), metadata_cfg).await?;
652 metadata.destroy().await.map_err(MetadataError)
653 }
654}
655
656#[cfg(test)]
657mod tests {
658 use super::*;
659 use crate::mmr::hasher::Standard;
660 use commonware_codec::FixedSize;
661 use commonware_cryptography::Sha256;
662 use commonware_macros::test_traced;
663 use commonware_runtime::{deterministic, Runner as _};
664
665 const SHA256_SIZE: usize = <Sha256 as CHasher>::Digest::SIZE;
666
667 fn test_chunk<const N: usize>(s: &[u8]) -> [u8; N] {
668 assert_eq!(N % 32, 0);
669 let mut vec: Vec<u8> = Vec::new();
670 for _ in 0..N / 32 {
671 vec.extend(Sha256::hash(s).iter());
672 }
673
674 vec.try_into().unwrap()
675 }
676
677 #[test_traced]
678 fn test_bitmap_verify_empty_proof() {
679 let executor = deterministic::Runner::default();
680 executor.start(|_| async move {
681 let mut hasher = Standard::new();
682 let proof = Proof {
683 size: 100,
684 digests: Vec::new(),
685 };
686 assert!(
687 !Bitmap::<Sha256, SHA256_SIZE>::verify_bit_inclusion(
688 &mut hasher,
689 &proof,
690 &[0u8; SHA256_SIZE],
691 0,
692 &Sha256::fill(0x00),
693 ),
694 "proof without digests shouldn't verify or panic"
695 );
696 });
697 }
698
699 #[test_traced]
700 fn test_bitmap_empty_then_one() {
701 let executor = deterministic::Runner::default();
702 executor.start(|_| async move {
703 let mut bitmap: Bitmap<Sha256, SHA256_SIZE> = Bitmap::new();
704 assert_eq!(bitmap.bit_count(), 0);
705 assert_eq!(bitmap.pruned_chunks, 0);
706 bitmap.prune_to_bit(0);
707 assert_eq!(bitmap.pruned_chunks, 0);
708 assert_eq!(bitmap.last_chunk().0, &[0u8; SHA256_SIZE]);
709 assert_eq!(bitmap.last_chunk().1, 0);
710
711 let mut hasher = Standard::new();
713 let root = bitmap.root(&mut hasher).await.unwrap();
714 bitmap.append(true);
715 bitmap.sync(&mut hasher).await.unwrap();
716 let new_root = bitmap.root(&mut hasher).await.unwrap();
718 assert_ne!(root, new_root);
719 let root = new_root;
720 bitmap.prune_to_bit(1);
721 assert_eq!(bitmap.bit_count(), 1);
722 assert_ne!(bitmap.last_chunk().0, &[0u8; SHA256_SIZE]);
723 assert_eq!(bitmap.last_chunk().1, 1);
724 assert_eq!(bitmap.pruned_chunks, 0);
726 assert_eq!(root, bitmap.root(&mut hasher).await.unwrap());
727
728 for i in 0..(Bitmap::<Sha256, SHA256_SIZE>::CHUNK_SIZE_BITS - 1) {
730 bitmap.append(i % 2 != 0);
731 }
732 bitmap.sync(&mut hasher).await.unwrap();
733 assert_eq!(bitmap.bit_count(), 256);
734 assert_ne!(root, bitmap.root(&mut hasher).await.unwrap());
735 let root = bitmap.root(&mut hasher).await.unwrap();
736
737 let (proof, chunk) = bitmap.proof(&mut hasher, 0).await.unwrap();
739 assert!(
740 Bitmap::verify_bit_inclusion(&mut hasher, &proof, &chunk, 255, &root),
741 "failed to prove bit in only chunk"
742 );
743 assert!(
745 !Bitmap::verify_bit_inclusion(&mut hasher, &proof, &chunk, 256, &root),
746 "should not be able to prove bit outside of chunk"
747 );
748
749 bitmap.prune_to_bit(256);
751 assert_eq!(bitmap.bit_count(), 256);
752 assert_eq!(bitmap.pruned_chunks, 1);
753 assert_eq!(root, bitmap.root(&mut hasher).await.unwrap());
754 assert_eq!(bitmap.last_chunk().0, &[0u8; SHA256_SIZE]);
756 assert_eq!(bitmap.last_chunk().1, 0);
757
758 bitmap.prune_to_bit(10);
760 assert_eq!(root, bitmap.root(&mut hasher).await.unwrap());
761 });
762 }
763
764 #[test_traced]
765 fn test_bitmap_building() {
766 let executor = deterministic::Runner::default();
769 executor.start(|_| async move {
770 let test_chunk = test_chunk(b"test");
771 let mut hasher: Standard<Sha256> = Standard::new();
772
773 let mut bitmap = Bitmap::<_, SHA256_SIZE>::new();
775 bitmap.append_chunk_unchecked(&test_chunk);
776 for b in test_chunk {
777 for j in 0..8 {
778 let mask = 1 << j;
779 let bit = (b & mask) != 0;
780 bitmap.append(bit);
781 }
782 }
783 assert_eq!(bitmap.bit_count(), 256 * 2);
784
785 bitmap.sync(&mut hasher).await.unwrap();
786 let root = bitmap.root(&mut hasher).await.unwrap();
787 let inner_root = bitmap.mmr.root(&mut hasher);
788 assert_eq!(root, inner_root);
789
790 {
791 let mut bitmap = Bitmap::<_, SHA256_SIZE>::default();
794 bitmap.append_chunk_unchecked(&test_chunk);
795 bitmap.append_chunk_unchecked(&test_chunk);
796 bitmap.sync(&mut hasher).await.unwrap();
797 let same_root = bitmap.root(&mut hasher).await.unwrap();
798 assert_eq!(root, same_root);
799 }
800 {
801 let mut bitmap = Bitmap::<_, SHA256_SIZE>::default();
803 bitmap.append_chunk_unchecked(&test_chunk);
804 for b in test_chunk {
805 bitmap.append_byte_unchecked(b);
806 }
807 bitmap.sync(&mut hasher).await.unwrap();
808 let same_root = bitmap.root(&mut hasher).await.unwrap();
809 assert_eq!(root, same_root);
810 }
811 });
812 }
813
814 #[test_traced]
815 #[should_panic(expected = "cannot add chunk")]
816 fn test_bitmap_build_chunked_panic() {
817 let executor = deterministic::Runner::default();
818 executor.start(|_| async move {
819 let mut bitmap = Bitmap::<Sha256, SHA256_SIZE>::new();
820 bitmap.append_chunk_unchecked(&test_chunk(b"test"));
821 bitmap.append(true);
822 bitmap.append_chunk_unchecked(&test_chunk(b"panic"));
823 });
824 }
825
826 #[test_traced]
827 #[should_panic(expected = "cannot add byte")]
828 fn test_bitmap_build_byte_panic() {
829 let executor = deterministic::Runner::default();
830 executor.start(|_| async move {
831 let mut bitmap = Bitmap::<Sha256, SHA256_SIZE>::new();
832 bitmap.append_chunk_unchecked(&test_chunk(b"test"));
833 bitmap.append(true);
834 bitmap.append_byte_unchecked(0x01);
835 });
836 }
837
838 #[test_traced]
839 #[should_panic(expected = "out of bounds")]
840 fn test_bitmap_get_out_of_bounds_bit_panic() {
841 let executor = deterministic::Runner::default();
842 executor.start(|_| async move {
843 let mut bitmap = Bitmap::<Sha256, SHA256_SIZE>::new();
844 bitmap.append_chunk_unchecked(&test_chunk(b"test"));
845 bitmap.get_bit(256);
846 });
847 }
848
849 #[test_traced]
850 #[should_panic(expected = "pruned")]
851 fn test_bitmap_get_pruned_bit_panic() {
852 let executor = deterministic::Runner::default();
853 executor.start(|_| async move {
854 let mut bitmap = Bitmap::<Sha256, SHA256_SIZE>::new();
855 bitmap.append_chunk_unchecked(&test_chunk(b"test"));
856 bitmap.append_chunk_unchecked(&test_chunk(b"test2"));
857 let mut hasher = Standard::new();
858 bitmap.sync(&mut hasher).await.unwrap();
859
860 bitmap.prune_to_bit(256);
861 bitmap.get_bit(255);
862 });
863 }
864
865 #[test_traced]
866 fn test_bitmap_root_boundaries() {
867 let executor = deterministic::Runner::default();
868 executor.start(|_| async move {
869 let mut bitmap = Bitmap::<Sha256, SHA256_SIZE>::default();
871 let mut hasher = Standard::new();
872 bitmap.append_chunk_unchecked(&test_chunk(b"test"));
873 bitmap.append_chunk_unchecked(&test_chunk(b"test2"));
874 bitmap.sync(&mut hasher).await.unwrap();
875
876 let root = bitmap.root(&mut hasher).await.unwrap();
877
878 bitmap.append(true);
880 bitmap.sync(&mut hasher).await.unwrap();
881 let new_root = bitmap.root(&mut hasher).await.unwrap();
882 assert_ne!(root, new_root);
883 assert_eq!(bitmap.mmr.size(), 3); for _ in 0..(Bitmap::<Sha256, SHA256_SIZE>::CHUNK_SIZE * 8 - 1) {
887 bitmap.append(false);
888 bitmap.sync(&mut hasher).await.unwrap();
889 let newer_root = bitmap.root(&mut hasher).await.unwrap();
890 assert_ne!(new_root, newer_root);
892 }
893 assert_eq!(bitmap.mmr.size(), 4); bitmap.append(false);
897 assert_eq!(bitmap.bit_count(), 256 * 3 + 1);
898 bitmap.sync(&mut hasher).await.unwrap();
899 let newer_root = bitmap.root(&mut hasher).await.unwrap();
900 assert_ne!(new_root, newer_root);
901
902 bitmap.prune_to_bit(bitmap.bit_count());
904 assert_eq!(bitmap.pruned_chunks, 3);
905 assert_eq!(bitmap.bit_count(), 256 * 3 + 1);
906 assert_eq!(newer_root, bitmap.root(&mut hasher).await.unwrap());
907 });
908 }
909
910 #[test_traced]
911 fn test_bitmap_get_set_bits() {
912 let executor = deterministic::Runner::default();
913 executor.start(|_| async move {
914 let mut bitmap = Bitmap::<Sha256, SHA256_SIZE>::default();
916 let mut hasher = Standard::new();
917 bitmap.append_chunk_unchecked(&test_chunk(b"test"));
918 bitmap.append_chunk_unchecked(&test_chunk(b"test2"));
919 bitmap.append_chunk_unchecked(&test_chunk(b"test3"));
920 bitmap.append_chunk_unchecked(&test_chunk(b"test4"));
921 bitmap.append_byte_unchecked(0xF1);
923 bitmap.append(true);
924 bitmap.append(false);
925 bitmap.append(true);
926
927 bitmap.sync(&mut hasher).await.unwrap();
928 let root = bitmap.root(&mut hasher).await.unwrap();
929
930 for bit_pos in (0..bitmap.bit_count()).rev() {
933 let bit = bitmap.get_bit(bit_pos);
934 bitmap.set_bit(bit_pos, !bit);
935 bitmap.sync(&mut hasher).await.unwrap();
936 let new_root = bitmap.root(&mut hasher).await.unwrap();
937 assert_ne!(root, new_root, "failed at bit {bit_pos}");
938 bitmap.set_bit(bit_pos, bit);
940 bitmap.sync(&mut hasher).await.unwrap();
941 let new_root = bitmap.root(&mut hasher).await.unwrap();
942 assert_eq!(root, new_root);
943 }
944
945 let start_bit = SHA256_SIZE as u64 * 8 * 2;
947 bitmap.prune_to_bit(start_bit);
948 for bit_pos in (start_bit..bitmap.bit_count()).rev() {
949 let bit = bitmap.get_bit(bit_pos);
950 bitmap.set_bit(bit_pos, !bit);
951 bitmap.sync(&mut hasher).await.unwrap();
952 let new_root = bitmap.root(&mut hasher).await.unwrap();
953 assert_ne!(root, new_root, "failed at bit {bit_pos}");
954 bitmap.set_bit(bit_pos, bit);
956 bitmap.sync(&mut hasher).await.unwrap();
957 let new_root = bitmap.root(&mut hasher).await.unwrap();
958 assert_eq!(root, new_root);
959 }
960 });
961 }
962
963 fn flip_bit<const N: usize>(bit_offset: u64, chunk: &[u8; N]) -> [u8; N] {
964 let byte_offset = Bitmap::<Sha256, SHA256_SIZE>::chunk_byte_offset(bit_offset);
965 let mask = Bitmap::<Sha256, SHA256_SIZE>::chunk_byte_bitmask(bit_offset);
966 let mut tmp = chunk.to_vec();
967 tmp[byte_offset] ^= mask;
968 tmp.try_into().unwrap()
969 }
970
971 #[test_traced]
972 fn test_bitmap_mmr_proof_verification() {
973 test_bitmap_mmr_proof_verification_n::<32>();
974 test_bitmap_mmr_proof_verification_n::<64>();
975 }
976
977 fn test_bitmap_mmr_proof_verification_n<const N: usize>() {
978 let executor = deterministic::Runner::default();
979 executor.start(|_| async move {
980 let mut hasher = Standard::new();
982 let mut bitmap = Bitmap::<Sha256, N>::new();
983 for i in 0u32..10 {
984 bitmap.append_chunk_unchecked(&test_chunk(format!("test{i}").as_bytes()));
985 }
986 bitmap.append_byte_unchecked(0xA6);
988 bitmap.append(true);
989 bitmap.append(false);
990 bitmap.append(true);
991 bitmap.append(true);
992 bitmap.append(false);
993
994 bitmap.sync(&mut hasher).await.unwrap();
995 let root = bitmap.root(&mut hasher).await.unwrap();
996
997 for prune_to_bit in (0..bitmap.bit_count()).step_by(251) {
1000 assert_eq!(bitmap.root(&mut hasher).await.unwrap(), root);
1001 bitmap.prune_to_bit(prune_to_bit);
1002 for i in prune_to_bit..bitmap.bit_count() {
1003 let (proof, chunk) = bitmap.proof(&mut hasher, i).await.unwrap();
1004
1005 assert!(
1007 Bitmap::<_, N>::verify_bit_inclusion(&mut hasher, &proof, &chunk, i, &root),
1008 "failed to prove bit {i}",
1009 );
1010
1011 let corrupted = flip_bit(i, &chunk);
1013 assert!(
1014 !Bitmap::<_, N>::verify_bit_inclusion(
1015 &mut hasher,
1016 &proof,
1017 &corrupted,
1018 i,
1019 &root
1020 ),
1021 "proving bit {i} after flipping should have failed",
1022 );
1023 }
1024 }
1025 })
1026 }
1027
1028 #[test_traced]
1029 fn test_bitmap_persistence() {
1030 const PARTITION: &str = "bitmap_test";
1031 const FULL_CHUNK_COUNT: usize = 100;
1032
1033 let executor = deterministic::Runner::default();
1034 executor.start(|context| async move {
1035 let mut bitmap =
1037 Bitmap::<Sha256, SHA256_SIZE>::restore_pruned(context.clone(), PARTITION, None)
1038 .await
1039 .unwrap();
1040 assert_eq!(bitmap.bit_count(), 0);
1041
1042 let mut hasher = Standard::new();
1044 for i in 0..FULL_CHUNK_COUNT {
1045 bitmap.append_chunk_unchecked(&test_chunk(format!("test{i}").as_bytes()));
1046 }
1047 bitmap.sync(&mut hasher).await.unwrap();
1048 let chunk_aligned_root = bitmap.root(&mut hasher).await.unwrap();
1049
1050 bitmap.append_byte_unchecked(0xA6);
1052 bitmap.append(true);
1053 bitmap.append(false);
1054 bitmap.append(true);
1055 bitmap.sync(&mut hasher).await.unwrap();
1056 let root = bitmap.root(&mut hasher).await.unwrap();
1057
1058 for i in (10..=FULL_CHUNK_COUNT).step_by(10) {
1060 bitmap.prune_to_bit(i as u64 * Bitmap::<Sha256, SHA256_SIZE>::CHUNK_SIZE_BITS);
1061 bitmap
1062 .write_pruned(context.clone(), PARTITION)
1063 .await
1064 .unwrap();
1065 bitmap = Bitmap::<_, SHA256_SIZE>::restore_pruned(context.clone(), PARTITION, None)
1066 .await
1067 .unwrap();
1068 let _ = bitmap.root(&mut hasher).await.unwrap();
1069
1070 for j in i..FULL_CHUNK_COUNT {
1072 bitmap.append_chunk_unchecked(&test_chunk(format!("test{j}").as_bytes()));
1073 }
1074 assert_eq!(bitmap.pruned_chunks, i);
1075 assert_eq!(bitmap.bit_count(), FULL_CHUNK_COUNT as u64 * 256);
1076 bitmap.sync(&mut hasher).await.unwrap();
1077 assert_eq!(bitmap.root(&mut hasher).await.unwrap(), chunk_aligned_root);
1078
1079 bitmap.append_byte_unchecked(0xA6);
1081 bitmap.append(true);
1082 bitmap.append(false);
1083 bitmap.append(true);
1084 assert_eq!(bitmap.root(&mut hasher).await.unwrap(), root);
1085 }
1086 });
1087 }
1088}