commonware_storage/qmdb/current/
mod.rs1use crate::{
8 bitmap::{CleanBitMap, DirtyBitMap},
9 mmr::{
10 grafting::{Hasher as GraftingHasher, Storage as GraftingStorage},
11 hasher::Hasher,
12 journaled::Mmr,
13 mem::Clean,
14 StandardHasher,
15 },
16 qmdb::{any::FixedConfig as AConfig, Error},
17 translator::Translator,
18};
19use commonware_cryptography::{DigestOf, Hasher as CHasher};
20use commonware_runtime::{buffer::PoolRef, Clock, Metrics, Storage as RStorage, ThreadPool};
21use std::num::{NonZeroU64, NonZeroUsize};
22
23pub mod ordered;
24pub mod proof;
25pub mod unordered;
26
27#[derive(Clone)]
29pub struct FixedConfig<T: Translator> {
30 pub mmr_journal_partition: String,
32
33 pub mmr_items_per_blob: NonZeroU64,
35
36 pub mmr_write_buffer: NonZeroUsize,
38
39 pub mmr_metadata_partition: String,
41
42 pub log_journal_partition: String,
44
45 pub log_items_per_blob: NonZeroU64,
47
48 pub log_write_buffer: NonZeroUsize,
50
51 pub bitmap_metadata_partition: String,
53
54 pub translator: T,
56
57 pub thread_pool: Option<ThreadPool>,
59
60 pub buffer_pool: PoolRef,
62}
63
64impl<T: Translator> FixedConfig<T> {
65 pub fn to_any_config(self) -> AConfig<T> {
67 AConfig {
68 mmr_journal_partition: self.mmr_journal_partition,
69 mmr_metadata_partition: self.mmr_metadata_partition,
70 mmr_items_per_blob: self.mmr_items_per_blob,
71 mmr_write_buffer: self.mmr_write_buffer,
72 log_journal_partition: self.log_journal_partition,
73 log_items_per_blob: self.log_items_per_blob,
74 log_write_buffer: self.log_write_buffer,
75 translator: self.translator,
76 thread_pool: self.thread_pool,
77 buffer_pool: self.buffer_pool,
78 }
79 }
80}
81
82async fn root<E: RStorage + Clock + Metrics, H: CHasher, const N: usize>(
84 hasher: &mut StandardHasher<H>,
85 height: u32,
86 status: &CleanBitMap<H::Digest, N>,
87 mmr: &Mmr<E, H::Digest, Clean<DigestOf<H>>>,
88) -> Result<H::Digest, Error> {
89 let grafted_mmr = GraftingStorage::<'_, H, _, _>::new(status, mmr, height);
90 let mmr_root = grafted_mmr.root(hasher).await?;
91
92 let (last_chunk, next_bit) = status.last_chunk();
94 if next_bit == CleanBitMap::<H::Digest, N>::CHUNK_SIZE_BITS {
95 return Ok(mmr_root);
97 }
98
99 hasher.inner().update(last_chunk);
103 let last_chunk_digest = hasher.inner().finalize();
104
105 Ok(CleanBitMap::<H::Digest, N>::partial_chunk_root(
106 hasher.inner(),
107 &mmr_root,
108 next_bit,
109 &last_chunk_digest,
110 ))
111}
112
113async fn merkleize_grafted_bitmap<H, const N: usize>(
122 hasher: &mut StandardHasher<H>,
123 status: DirtyBitMap<H::Digest, N>,
124 mmr: &impl crate::mmr::storage::Storage<H::Digest>,
125 grafting_height: u32,
126) -> Result<CleanBitMap<H::Digest, N>, Error>
127where
128 H: CHasher,
129{
130 let mut grafter = GraftingHasher::new(hasher, grafting_height);
131 grafter
132 .load_grafted_digests(&status.dirty_chunks(), mmr)
133 .await?;
134 status.merkleize(&mut grafter).await.map_err(Into::into)
135}