ark_vrf/utils/
transcript.rs1use ark_serialize::{CanonicalDeserialize, CanonicalSerialize};
2use ark_std::io;
3use digest::Digest;
4use generic_array::GenericArray;
5use sha2::Sha512;
6
7pub trait Transcript: Clone + io::Read + io::Write {
23 fn new(id: crate::suites::SuiteId) -> Self;
25
26 fn absorb_raw(&mut self, data: &[u8]);
36
37 fn squeeze_raw(&mut self, buf: &mut [u8]);
43
44 fn absorb_serialize(&mut self, obj: &impl CanonicalSerialize) {
49 obj.serialize_compressed(self).unwrap();
50 }
51
52 fn squeeze_deserialize<T: CanonicalDeserialize>(&mut self) -> T {
57 T::deserialize_compressed(self).unwrap()
58 }
59
60 fn to_rng(self) -> TranscriptRng<Self>
62 where
63 Self: Sized,
64 {
65 TranscriptRng(self)
66 }
67}
68
69pub struct TranscriptRng<T>(T);
71
72impl<T: Transcript> ark_std::rand::RngCore for TranscriptRng<T> {
73 fn next_u32(&mut self) -> u32 {
74 let mut b = [0u8; 4];
75 self.0.squeeze_raw(&mut b);
76 u32::from_le_bytes(b)
77 }
78 fn next_u64(&mut self) -> u64 {
79 let mut b = [0u8; 8];
80 self.0.squeeze_raw(&mut b);
81 u64::from_le_bytes(b)
82 }
83 fn fill_bytes(&mut self, dest: &mut [u8]) {
84 self.0.squeeze_raw(dest);
85 }
86 fn try_fill_bytes(&mut self, dest: &mut [u8]) -> Result<(), ark_std::rand::Error> {
87 self.fill_bytes(dest);
88 Ok(())
89 }
90}
91
92impl<T: Transcript> ark_std::rand::CryptoRng for TranscriptRng<T> {}
93
94pub struct XofTranscript<H: digest::ExtendableOutput + Clone> {
104 state: XofState<H>,
105}
106
107enum XofState<H: digest::ExtendableOutput + Clone> {
108 Absorbing(H),
109 Squeezing(H::Reader),
110}
111
112impl<H: digest::ExtendableOutput + Default + Clone> Default for XofState<H> {
113 fn default() -> Self {
114 Self::Absorbing(H::default())
115 }
116}
117
118impl<H: digest::ExtendableOutput + Clone> Clone for XofTranscript<H>
119where
120 H::Reader: Clone,
121{
122 fn clone(&self) -> Self {
123 Self {
124 state: match &self.state {
125 XofState::Absorbing(h) => XofState::Absorbing(h.clone()),
126 XofState::Squeezing(r) => XofState::Squeezing(r.clone()),
127 },
128 }
129 }
130}
131
132impl<H: digest::ExtendableOutput + Default + Clone> XofTranscript<H> {
133 fn reader(&mut self) -> &mut H::Reader {
135 if let XofState::Absorbing(_) = &self.state {
136 let XofState::Absorbing(h) = core::mem::take(&mut self.state) else {
137 unreachable!()
138 };
139 self.state = XofState::Squeezing(h.finalize_xof());
140 }
141 let XofState::Squeezing(reader) = &mut self.state else {
142 unreachable!()
143 };
144 reader
145 }
146}
147
148impl<H: digest::ExtendableOutput + Default + Clone> io::Read for XofTranscript<H>
149where
150 H::Reader: Clone,
151{
152 fn read(&mut self, buf: &mut [u8]) -> io::Result<usize> {
153 self.squeeze_raw(buf);
154 Ok(buf.len())
155 }
156}
157
158impl<H: digest::ExtendableOutput + Default + Clone> io::Write for XofTranscript<H>
159where
160 H::Reader: Clone,
161{
162 fn write(&mut self, buf: &[u8]) -> io::Result<usize> {
163 self.absorb_raw(buf);
164 Ok(buf.len())
165 }
166
167 fn flush(&mut self) -> io::Result<()> {
168 Ok(())
169 }
170}
171
172impl<H: digest::ExtendableOutput + Default + Clone> Transcript for XofTranscript<H>
173where
174 H::Reader: Clone,
175{
176 fn new(id: crate::suites::SuiteId) -> Self {
177 let mut h = H::default();
178 h.update(&id.to_bytes());
179 Self {
180 state: XofState::Absorbing(h),
181 }
182 }
183
184 fn absorb_raw(&mut self, data: &[u8]) {
185 match &mut self.state {
186 XofState::Absorbing(h) => h.update(data),
187 XofState::Squeezing { .. } => panic!("cannot absorb after squeeze"),
188 }
189 }
190
191 fn squeeze_raw(&mut self, buf: &mut [u8]) {
192 use digest::XofReader;
193 self.reader().read(buf);
194 }
195}
196
197#[derive(Clone)]
209pub struct DigestXof<H: Digest + Clone>(H);
210
211impl<H: Digest + Clone> Default for DigestXof<H> {
212 fn default() -> Self {
213 Self(H::new())
214 }
215}
216
217impl<H: Digest + Clone> digest::Update for DigestXof<H> {
218 fn update(&mut self, data: &[u8]) {
219 self.0.update(data);
220 }
221}
222
223impl<H: Digest + Clone> digest::OutputSizeUser for DigestXof<H> {
224 type OutputSize = H::OutputSize;
225}
226
227impl<H: Digest + Clone> digest::ExtendableOutput for DigestXof<H> {
228 type Reader = DigestXofReader<H>;
229
230 fn finalize_xof(self) -> Self::Reader {
231 let seed = self.0.finalize();
232 let buffer = H::new()
233 .chain_update(&seed)
234 .chain_update(0u32.to_le_bytes())
235 .finalize();
236 DigestXofReader {
237 seed,
238 counter: 1,
239 buffer,
240 buf_offset: 0,
241 }
242 }
243}
244
245#[derive(Clone)]
247pub struct DigestXofReader<H: Digest> {
248 seed: GenericArray<u8, H::OutputSize>,
249 counter: u32,
250 buffer: GenericArray<u8, H::OutputSize>,
251 buf_offset: usize,
252}
253
254impl<H: Digest> digest::XofReader for DigestXofReader<H> {
255 fn read(&mut self, buf: &mut [u8]) {
256 let mut remaining = buf;
257 while !remaining.is_empty() {
258 if self.buf_offset >= self.buffer.len() {
259 self.buffer = H::new()
260 .chain_update(&self.seed)
261 .chain_update(self.counter.to_le_bytes())
262 .finalize();
263 self.counter += 1;
264 self.buf_offset = 0;
265 }
266 let avail = self.buffer.len() - self.buf_offset;
267 let take = avail.min(remaining.len());
268 remaining[..take]
269 .copy_from_slice(&self.buffer[self.buf_offset..self.buf_offset + take]);
270 self.buf_offset += take;
271 remaining = &mut remaining[take..];
272 }
273 }
274}
275
276pub type HashTranscript<H = Sha512> = XofTranscript<DigestXof<H>>;
290
291#[cfg(feature = "shake128")]
293pub type Shake128Transcript = XofTranscript<sha3::Shake128>;
294
295#[cfg(test)]
296mod tests {
297 macro_rules! transcript_tests {
298 ($T:ty, $mod:ident) => {
299 mod $mod {
300 use super::super::*;
301 use crate::suites::SuiteId;
302
303 const ID_A: SuiteId = SuiteId::new(1, 2, 3, 4);
304 const ID_B: SuiteId = SuiteId::new(5, 6, 7, 8);
305
306 #[test]
307 fn deterministic_squeeze() {
308 let mut t1 = <$T>::new(ID_A);
309 t1.absorb_raw(b"hello");
310 let mut out1 = [0u8; 64];
311 t1.squeeze_raw(&mut out1);
312
313 let mut t2 = <$T>::new(ID_A);
314 t2.absorb_raw(b"hello");
315 let mut out2 = [0u8; 64];
316 t2.squeeze_raw(&mut out2);
317 assert_eq!(out1, out2);
318 }
319
320 #[test]
321 fn incremental_matches_bulk() {
322 let mut t1 = <$T>::new(ID_A);
323 t1.absorb_raw(b"data");
324 let mut t2 = t1.clone();
325
326 let mut bulk = [0u8; 100];
327 t1.squeeze_raw(&mut bulk);
328
329 let mut inc = [0u8; 100];
330 t2.squeeze_raw(&mut inc[..10]);
331 t2.squeeze_raw(&mut inc[10..64]);
332 t2.squeeze_raw(&mut inc[64..]);
333 assert_eq!(bulk, inc);
334 }
335
336 #[test]
337 fn clone_produces_independent_streams() {
338 let mut t = <$T>::new(ID_A);
339 t.absorb_raw(b"shared");
340
341 let mut fork = t.clone();
342 t.absorb_raw(b"branch_a");
343 fork.absorb_raw(b"branch_b");
344
345 let mut a = [0u8; 32];
346 let mut b = [0u8; 32];
347 t.squeeze_raw(&mut a);
348 fork.squeeze_raw(&mut b);
349 assert_ne!(a, b);
350 }
351
352 #[test]
353 #[should_panic(expected = "cannot absorb after squeeze")]
354 fn absorb_after_squeeze_panics() {
355 let mut t = <$T>::new(ID_A);
356 t.absorb_raw(b"x");
357 let mut out = [0u8; 1];
358 t.squeeze_raw(&mut out);
359 t.absorb_raw(b"y");
360 }
361
362 #[test]
363 fn different_labels_produce_different_output() {
364 let mut t1 = <$T>::new(ID_A);
365 let mut t2 = <$T>::new(ID_B);
366 t1.absorb_raw(b"same");
367 t2.absorb_raw(b"same");
368 let mut o1 = [0u8; 32];
369 let mut o2 = [0u8; 32];
370 t1.squeeze_raw(&mut o1);
371 t2.squeeze_raw(&mut o2);
372 assert_ne!(o1, o2);
373 }
374 }
375 };
376 }
377
378 transcript_tests!(HashTranscript<sha2::Sha512>, hash_sha512);
379 transcript_tests!(HashTranscript<sha2::Sha256>, hash_sha256);
380
381 #[cfg(feature = "shake128")]
382 transcript_tests!(Shake128Transcript, shake128_xof);
383}