commonware_storage/qmdb/current/
mod.rs1use crate::{
8 bitmap::{CleanBitMap, DirtyBitMap},
9 mmr::{
10 grafting::{Hasher as GraftingHasher, Storage as GraftingStorage},
11 hasher::Hasher,
12 journaled::Mmr,
13 mem::Clean,
14 StandardHasher,
15 },
16 qmdb::{any::FixedConfig as AConfig, Error},
17 translator::Translator,
18};
19use commonware_cryptography::{DigestOf, Hasher as CHasher};
20use commonware_parallel::ThreadPool;
21use commonware_runtime::{buffer::PoolRef, Clock, Metrics, Storage as RStorage};
22use std::num::{NonZeroU64, NonZeroUsize};
23
24pub mod ordered;
25pub mod proof;
26pub mod unordered;
27
28#[derive(Clone)]
30pub struct FixedConfig<T: Translator> {
31 pub mmr_journal_partition: String,
33
34 pub mmr_items_per_blob: NonZeroU64,
36
37 pub mmr_write_buffer: NonZeroUsize,
39
40 pub mmr_metadata_partition: String,
42
43 pub log_journal_partition: String,
45
46 pub log_items_per_blob: NonZeroU64,
48
49 pub log_write_buffer: NonZeroUsize,
51
52 pub bitmap_metadata_partition: String,
54
55 pub translator: T,
57
58 pub thread_pool: Option<ThreadPool>,
60
61 pub buffer_pool: PoolRef,
63}
64
65impl<T: Translator> FixedConfig<T> {
66 pub fn to_any_config(self) -> AConfig<T> {
68 AConfig {
69 mmr_journal_partition: self.mmr_journal_partition,
70 mmr_metadata_partition: self.mmr_metadata_partition,
71 mmr_items_per_blob: self.mmr_items_per_blob,
72 mmr_write_buffer: self.mmr_write_buffer,
73 log_journal_partition: self.log_journal_partition,
74 log_items_per_blob: self.log_items_per_blob,
75 log_write_buffer: self.log_write_buffer,
76 translator: self.translator,
77 thread_pool: self.thread_pool,
78 buffer_pool: self.buffer_pool,
79 }
80 }
81}
82
83async fn root<E: RStorage + Clock + Metrics, H: CHasher, const N: usize>(
85 hasher: &mut StandardHasher<H>,
86 height: u32,
87 status: &CleanBitMap<H::Digest, N>,
88 mmr: &Mmr<E, H::Digest, Clean<DigestOf<H>>>,
89) -> Result<H::Digest, Error> {
90 let grafted_mmr = GraftingStorage::<'_, H, _, _>::new(status, mmr, height);
91 let mmr_root = grafted_mmr.root(hasher).await?;
92
93 let (last_chunk, next_bit) = status.last_chunk();
95 if next_bit == CleanBitMap::<H::Digest, N>::CHUNK_SIZE_BITS {
96 return Ok(mmr_root);
98 }
99
100 hasher.inner().update(last_chunk);
104 let last_chunk_digest = hasher.inner().finalize();
105
106 Ok(CleanBitMap::<H::Digest, N>::partial_chunk_root(
107 hasher.inner(),
108 &mmr_root,
109 next_bit,
110 &last_chunk_digest,
111 ))
112}
113
114async fn merkleize_grafted_bitmap<H, const N: usize>(
123 hasher: &mut StandardHasher<H>,
124 status: DirtyBitMap<H::Digest, N>,
125 mmr: &impl crate::mmr::storage::Storage<H::Digest>,
126 grafting_height: u32,
127) -> Result<CleanBitMap<H::Digest, N>, Error>
128where
129 H: CHasher,
130{
131 let mut grafter = GraftingHasher::new(hasher, grafting_height);
132 grafter
133 .load_grafted_digests(&status.dirty_chunks(), mmr)
134 .await?;
135 status.merkleize(&mut grafter).await.map_err(Into::into)
136}