commonware_storage/qmdb/current/
proof.rs1use crate::{
8 journal::contiguous::{Contiguous, Reader as _},
9 mmr::{hasher::Hasher as _, storage::Storage, verification, Location, Proof},
10 qmdb::{current::grafting, Error},
11};
12use commonware_codec::Codec;
13use commonware_cryptography::{Digest, Hasher as CHasher};
14use commonware_utils::bitmap::Prunable as BitMap;
15use core::ops::Range;
16use futures::future::try_join_all;
17use std::num::NonZeroU64;
18use tracing::debug;
19
20#[derive(Clone, Eq, PartialEq, Debug)]
22pub struct RangeProof<D: Digest> {
23 pub proof: Proof<D>,
25
26 pub partial_chunk_digest: Option<D>,
28
29 pub ops_root: D,
32}
33
34impl<D: Digest> RangeProof<D> {
35 pub async fn new<H: CHasher<Digest = D>, S: Storage<D>, const N: usize>(
37 hasher: &mut H,
38 status: &BitMap<N>,
39 storage: &S,
40 range: Range<Location>,
41 ops_root: D,
42 ) -> Result<Self, Error> {
43 let proof = verification::range_proof(storage, range).await?;
44
45 let (last_chunk, next_bit) = status.last_chunk();
46 let partial_chunk_digest = if next_bit != BitMap::<N>::CHUNK_SIZE_BITS {
47 hasher.update(last_chunk);
50 Some(hasher.finalize())
51 } else {
52 None
53 };
54
55 Ok(Self {
56 proof,
57 partial_chunk_digest,
58 ops_root,
59 })
60 }
61
62 pub async fn new_with_ops<
72 H: CHasher<Digest = D>,
73 C: Contiguous,
74 S: Storage<D>,
75 const N: usize,
76 >(
77 hasher: &mut H,
78 status: &BitMap<N>,
79 storage: &S,
80 log: &C,
81 start_loc: Location,
82 max_ops: NonZeroU64,
83 ops_root: D,
84 ) -> Result<(Self, Vec<C::Item>, Vec<[u8; N]>), Error> {
85 let leaves = Location::new(status.len());
87 if start_loc >= leaves {
88 return Err(crate::mmr::Error::RangeOutOfBounds(start_loc).into());
89 }
90
91 let chunk_bits = BitMap::<N>::CHUNK_SIZE_BITS;
93 let start = *start_loc / chunk_bits;
94 if (start as usize) < status.pruned_chunks() {
95 return Err(Error::OperationPruned(start_loc));
96 }
97
98 let max_loc = start_loc.saturating_add(max_ops.get());
99 let end_loc = core::cmp::min(max_loc, leaves);
100
101 let proof = Self::new(hasher, status, storage, start_loc..end_loc, ops_root).await?;
103
104 let mut ops = Vec::with_capacity((*end_loc - *start_loc) as usize);
106 let reader = log.reader().await;
107 let futures = (*start_loc..*end_loc)
108 .map(|i| reader.read(i))
109 .collect::<Vec<_>>();
110 try_join_all(futures)
111 .await?
112 .into_iter()
113 .for_each(|op| ops.push(op));
114
115 let end = (*end_loc - 1) / chunk_bits; let mut chunks = Vec::with_capacity((end - start + 1) as usize);
118 for i in start..=end {
119 chunks.push(*status.get_chunk(i as usize));
120 }
121
122 Ok((proof, ops, chunks))
123 }
124
125 pub fn verify<H: CHasher<Digest = D>, O: Codec, const N: usize>(
128 &self,
129 hasher: &mut H,
130 start_loc: Location,
131 ops: &[O],
132 chunks: &[[u8; N]],
133 root: &H::Digest,
134 ) -> bool {
135 if ops.is_empty() || chunks.is_empty() {
136 debug!("verification failed, empty input");
137 return false;
138 }
139
140 let Some(end_loc) = start_loc.checked_add(ops.len() as u64) else {
142 debug!("verification failed, end_loc overflow");
143 return false;
144 };
145
146 let leaves = self.proof.leaves;
147 if end_loc > leaves {
148 debug!(
149 loc = ?end_loc,
150 ?leaves, "verification failed, invalid range"
151 );
152 return false;
153 }
154
155 let chunk_bits = BitMap::<N>::CHUNK_SIZE_BITS;
157 let start = *start_loc / chunk_bits; let end = (*end_loc.saturating_sub(1)) / chunk_bits; let expected = end - start + 1;
160 let actual = chunks.len() as u64;
161 if expected != actual {
162 debug!(expected, actual, "verification failed, chunk mismatch");
163 return false;
164 }
165
166 let elements = ops.iter().map(|op| op.encode()).collect::<Vec<_>>();
167
168 let chunk_vec = chunks.iter().map(|c| c.as_ref()).collect::<Vec<_>>();
169 let start_chunk_idx = *start_loc / BitMap::<N>::CHUNK_SIZE_BITS;
170 let mut verifier =
171 grafting::Verifier::<H>::new(grafting::height::<N>(), start_chunk_idx, chunk_vec);
172
173 let next_bit = *leaves % BitMap::<N>::CHUNK_SIZE_BITS;
174 let has_partial_chunk = next_bit != 0;
175
176 if has_partial_chunk {
178 let Some(last_chunk_digest) = self.partial_chunk_digest else {
179 debug!("proof has no partial chunk digest");
180 return false;
181 };
182
183 if *(end_loc - 1) / BitMap::<N>::CHUNK_SIZE_BITS
186 == *leaves / BitMap::<N>::CHUNK_SIZE_BITS
187 {
188 let Some(last_chunk) = chunks.last() else {
189 debug!("chunks is empty");
190 return false;
191 };
192 let expected_last_chunk_digest = verifier.digest(last_chunk);
193 if last_chunk_digest != expected_last_chunk_digest {
194 debug!("last chunk digest does not match expected value");
195 return false;
196 }
197 }
198 }
199
200 let mmr_root = match self
202 .proof
203 .reconstruct_root(&mut verifier, &elements, start_loc)
204 {
205 Ok(root) => root,
206 Err(error) => {
207 debug!(error = ?error, "invalid proof input");
208 return false;
209 }
210 };
211
212 hasher.update(&self.ops_root);
214 hasher.update(&mmr_root);
215 if has_partial_chunk {
216 hasher.update(&next_bit.to_be_bytes());
218 hasher.update(self.partial_chunk_digest.as_ref().unwrap());
219 }
220 let reconstructed_root = hasher.finalize();
221 reconstructed_root == *root
222 }
223}
224
225#[derive(Clone, Eq, PartialEq, Debug)]
227pub struct OperationProof<D: Digest, const N: usize> {
228 pub loc: Location,
230
231 pub chunk: [u8; N],
233
234 pub range_proof: RangeProof<D>,
236}
237
238impl<D: Digest, const N: usize> OperationProof<D, N> {
239 pub async fn new<H: CHasher<Digest = D>, S: Storage<D>>(
246 hasher: &mut H,
247 status: &BitMap<N>,
248 storage: &S,
249 loc: Location,
250 ops_root: D,
251 ) -> Result<Self, Error> {
252 if BitMap::<N>::to_chunk_index(*loc) < status.pruned_chunks() {
254 return Err(Error::OperationPruned(loc));
255 }
256 let range_proof = RangeProof::new(hasher, status, storage, loc..loc + 1, ops_root).await?;
257 let chunk = *status.get_chunk_containing(*loc);
258 Ok(Self {
259 loc,
260 chunk,
261 range_proof,
262 })
263 }
264
265 pub fn verify<H: CHasher<Digest = D>, O: Codec>(
268 &self,
269 hasher: &mut H,
270 operation: O,
271 root: &D,
272 ) -> bool {
273 if !BitMap::<N>::get_bit_from_chunk(&self.chunk, *self.loc) {
276 debug!(
277 ?self.loc,
278 "proof verification failed, operation is inactive"
279 );
280 return false;
281 }
282
283 self.range_proof
284 .verify(hasher, self.loc, &[operation], &[self.chunk], root)
285 }
286}