commonware_storage/qmdb/current/
proof.rs1use crate::{
8 bitmap::{partial_chunk_root, MerkleizedBitMap},
9 journal::contiguous::Contiguous,
10 mmr::{
11 grafting::{Storage as GraftingStorage, Verifier},
12 hasher::Hasher,
13 journaled::Mmr,
14 mem::Clean,
15 storage::Storage,
16 verification, Location, Proof,
17 },
18 qmdb::Error,
19};
20use commonware_codec::Codec;
21use commonware_cryptography::{Digest, Hasher as CHasher};
22use commonware_runtime::{Clock, Metrics, Storage as RStorage};
23use commonware_utils::bitmap::BitMap;
24use core::ops::Range;
25use futures::future::try_join_all;
26use std::num::NonZeroU64;
27use tracing::debug;
28
29#[derive(Clone, Eq, PartialEq, Debug)]
31pub struct RangeProof<D: Digest> {
32 pub proof: Proof<D>,
34
35 pub partial_chunk_digest: Option<D>,
37}
38
39impl<D: Digest> RangeProof<D> {
40 pub async fn new<
42 E: RStorage + Clock + Metrics,
43 H: CHasher<Digest = D>,
44 S: Storage<D>,
45 const N: usize,
46 >(
47 hasher: &mut H,
48 status: &MerkleizedBitMap<E, D, N>,
49 grafting_height: u32,
50 mmr: &S,
51 range: Range<Location>,
52 ) -> Result<Self, Error> {
53 let grafted_mmr = GraftingStorage::<'_, H, _, _>::new(status, mmr, grafting_height);
54 let proof = verification::range_proof(&grafted_mmr, range).await?;
55
56 let (last_chunk, next_bit) = status.last_chunk();
57 let partial_chunk_digest = if next_bit != MerkleizedBitMap::<E, D, N>::CHUNK_SIZE_BITS {
58 hasher.update(last_chunk);
61 Some(hasher.finalize())
62 } else {
63 None
64 };
65
66 Ok(Self {
67 proof,
68 partial_chunk_digest,
69 })
70 }
71
72 pub async fn new_with_ops<
81 E: RStorage + Clock + Metrics,
82 H: CHasher<Digest = D>,
83 C: Contiguous,
84 const N: usize,
85 >(
86 hasher: &mut H,
87 status: &MerkleizedBitMap<E, D, N>,
88 height: u32,
89 mmr: &Mmr<E, D, Clean<D>>,
90 log: &C,
91 start_loc: Location,
92 max_ops: NonZeroU64,
93 ) -> Result<(Self, Vec<C::Item>, Vec<[u8; N]>), Error> {
94 let leaves = mmr.leaves();
96 if start_loc >= leaves {
97 return Err(crate::mmr::Error::RangeOutOfBounds(start_loc).into());
98 }
99 let max_loc = start_loc.saturating_add(max_ops.get());
100 let end_loc = core::cmp::min(max_loc, leaves);
101
102 let proof = Self::new(hasher, status, height, mmr, start_loc..end_loc).await?;
104
105 let mut ops = Vec::with_capacity((*end_loc - *start_loc) as usize);
107 let futures = (*start_loc..*end_loc)
108 .map(|i| log.read(i))
109 .collect::<Vec<_>>();
110 try_join_all(futures)
111 .await?
112 .into_iter()
113 .for_each(|op| ops.push(op));
114
115 let chunk_bits = BitMap::<N>::CHUNK_SIZE_BITS;
117 let start = *start_loc / chunk_bits; let end = (*end_loc - 1) / chunk_bits; let mut chunks = Vec::with_capacity((end - start + 1) as usize);
120 for i in start..=end {
121 let bit_offset = i * chunk_bits;
122 let chunk = *status.get_chunk_containing(bit_offset);
123 chunks.push(chunk);
124 }
125
126 Ok((proof, ops, chunks))
127 }
128
129 pub fn verify<H: CHasher<Digest = D>, O: Codec, const N: usize>(
132 &self,
133 hasher: &mut H,
134 grafting_height: u32,
135 start_loc: Location,
136 ops: &[O],
137 chunks: &[[u8; N]],
138 root: &H::Digest,
139 ) -> bool {
140 if ops.is_empty() || chunks.is_empty() {
141 debug!("verification failed, empty input");
142 return false;
143 }
144
145 let Some(end_loc) = start_loc.checked_add(ops.len() as u64) else {
147 debug!("verification failed, end_loc overflow");
148 return false;
149 };
150
151 let leaves = self.proof.leaves;
152 if end_loc > leaves {
153 debug!(
154 loc = ?end_loc,
155 ?leaves, "verification failed, invalid range"
156 );
157 return false;
158 }
159
160 let chunk_bits = BitMap::<N>::CHUNK_SIZE_BITS;
162 let start = *start_loc / chunk_bits; let end = (*end_loc.saturating_sub(1)) / chunk_bits; let expected = end - start + 1;
165 let actual = chunks.len() as u64;
166 if expected != actual {
167 debug!(expected, actual, "verification failed, chunk mismatch");
168 return false;
169 }
170
171 let elements = ops.iter().map(|op| op.encode()).collect::<Vec<_>>();
172
173 let chunk_vec = chunks.iter().map(|c| c.as_ref()).collect::<Vec<_>>();
174 let start_chunk_loc = *start_loc / BitMap::<N>::CHUNK_SIZE_BITS;
175 let mut verifier = Verifier::<H>::new(
176 grafting_height,
177 Location::new_unchecked(start_chunk_loc),
178 chunk_vec,
179 );
180
181 let next_bit = *leaves % BitMap::<N>::CHUNK_SIZE_BITS;
182 if next_bit == 0 {
183 return self
184 .proof
185 .verify_range_inclusion(&mut verifier, &elements, start_loc, root);
186 }
187
188 let Some(last_chunk_digest) = self.partial_chunk_digest else {
190 debug!("proof has no partial chunk digest");
191 return false;
192 };
193
194 if *(end_loc - 1) / BitMap::<N>::CHUNK_SIZE_BITS == *leaves / BitMap::<N>::CHUNK_SIZE_BITS {
198 let Some(last_chunk) = chunks.last() else {
199 debug!("chunks is empty");
200 return false;
201 };
202 let expected_last_chunk_digest = verifier.digest(last_chunk);
203 if last_chunk_digest != expected_last_chunk_digest {
204 debug!("last chunk digest does not match expected value");
205 return false;
206 }
207 }
208
209 let mmr_root = match self
211 .proof
212 .reconstruct_root(&mut verifier, &elements, start_loc)
213 {
214 Ok(root) => root,
215 Err(error) => {
216 debug!(error = ?error, "invalid proof input");
217 return false;
218 }
219 };
220
221 let reconstructed_root =
222 partial_chunk_root::<H, N>(hasher, &mmr_root, next_bit, &last_chunk_digest);
223
224 reconstructed_root == *root
225 }
226}
227
228#[derive(Clone, Eq, PartialEq, Debug)]
230pub struct OperationProof<D: Digest, const N: usize> {
231 pub loc: Location,
233
234 pub chunk: [u8; N],
236
237 pub range_proof: RangeProof<D>,
239}
240
241impl<D: Digest, const N: usize> OperationProof<D, N> {
242 pub async fn new<E: RStorage + Clock + Metrics, H: CHasher<Digest = D>, S: Storage<D>>(
249 hasher: &mut H,
250 status: &MerkleizedBitMap<E, D, N>,
251 grafting_height: u32,
252 mmr: &S,
253 loc: Location,
254 ) -> Result<Self, Error> {
255 let range_proof =
257 RangeProof::<D>::new(hasher, status, grafting_height, mmr, loc..loc + 1).await?;
258 let chunk = *status.get_chunk_containing(*loc);
259
260 Ok(Self {
261 loc,
262 chunk,
263 range_proof,
264 })
265 }
266
267 pub fn verify<H: CHasher<Digest = D>, O: Codec>(
270 &self,
271 hasher: &mut H,
272 grafting_height: u32,
273 operation: O,
274 root: &D,
275 ) -> bool {
276 if !BitMap::<N>::get_bit_from_chunk(&self.chunk, *self.loc) {
279 debug!(
280 ?self.loc,
281 "proof verification failed, operation is inactive"
282 );
283 return false;
284 }
285
286 self.range_proof.verify(
287 hasher,
288 grafting_height,
289 self.loc,
290 &[operation],
291 &[self.chunk],
292 root,
293 )
294 }
295}