Skip to main content

ark_vrf/utils/
transcript.rs

1use ark_serialize::{CanonicalDeserialize, CanonicalSerialize};
2use ark_std::io;
3use digest::Digest;
4use generic_array::GenericArray;
5use sha2::Sha512;
6
7/// Fiat-Shamir transcript with absorb/squeeze interface.
8///
9/// Provides a streaming hash abstraction where data is absorbed into an
10/// internal state and arbitrary-length output is squeezed out.
11///
12/// Implements [`io::Write`] so that serializable types (points, scalars)
13/// can be written directly into the transcript without intermediate buffers.
14///
15/// Implementations do **not** need to handle domain separation or
16/// length-prefixing of variable-length inputs. The protocol layer
17/// takes care of this by absorbing domain-separation tags and explicit
18/// lengths before variable-length data. Since `absorb_raw` is a plain
19/// concatenation into a single hash stream (absorb then squeeze, no
20/// resets), the domain-separation bytes injected by the caller are
21/// sufficient to prevent ambiguous parses.
22pub trait Transcript: Clone + io::Read + io::Write {
23    /// Create a new transcript from the suite identifier.
24    fn new(id: crate::suites::SuiteId) -> Self;
25
26    /// Absorb raw bytes into the transcript.
27    ///
28    /// This is a plain concatenation into the internal hash state.
29    /// Domain separation and length-prefixing of variable-length fields
30    /// are the caller's responsibility (handled by the protocol layer).
31    ///
32    /// # Panics
33    ///
34    /// Panics if called after `squeeze_raw`.
35    fn absorb_raw(&mut self, data: &[u8]);
36
37    /// Squeeze output bytes from the transcript.
38    ///
39    /// The first call finalizes the internal hash and transitions to squeezing.
40    ///
41    /// After the first `squeeze_raw` call, `absorb_raw` must not be called.
42    fn squeeze_raw(&mut self, buf: &mut [u8]);
43
44    /// Absorb a serializable object into the transcript.
45    ///
46    /// Serializes the object directly into the transcript via the
47    /// [`io::Write`] implementation, avoiding intermediate allocations.
48    fn absorb_serialize(&mut self, obj: &impl CanonicalSerialize) {
49        obj.serialize_compressed(self).unwrap();
50    }
51
52    /// Squeeze and deserialize an object from the transcript.
53    ///
54    /// Reads bytes from the squeeze_raw stream via the [`io::Read`]
55    /// implementation and deserializes them directly.
56    fn squeeze_deserialize<T: CanonicalDeserialize>(&mut self) -> T {
57        T::deserialize_compressed(self).unwrap()
58    }
59
60    /// Consume the transcript and return an RNG that draws from the squeeze stream.
61    fn to_rng(self) -> TranscriptRng<Self>
62    where
63        Self: Sized,
64    {
65        TranscriptRng(self)
66    }
67}
68
69/// RNG wrapper over a [`Transcript`] squeeze stream.
70pub struct TranscriptRng<T>(T);
71
72impl<T: Transcript> ark_std::rand::RngCore for TranscriptRng<T> {
73    fn next_u32(&mut self) -> u32 {
74        let mut b = [0u8; 4];
75        self.0.squeeze_raw(&mut b);
76        u32::from_le_bytes(b)
77    }
78    fn next_u64(&mut self) -> u64 {
79        let mut b = [0u8; 8];
80        self.0.squeeze_raw(&mut b);
81        u64::from_le_bytes(b)
82    }
83    fn fill_bytes(&mut self, dest: &mut [u8]) {
84        self.0.squeeze_raw(dest);
85    }
86    fn try_fill_bytes(&mut self, dest: &mut [u8]) -> Result<(), ark_std::rand::Error> {
87        self.fill_bytes(dest);
88        Ok(())
89    }
90}
91
92impl<T: Transcript> ark_std::rand::CryptoRng for TranscriptRng<T> {}
93
94// ---------------------------------------------------------------------------
95// XofTranscript: single transcript implementation for all XOF-like hashers
96// ---------------------------------------------------------------------------
97
98/// Transcript backed by any [`ExtendableOutput`](digest::ExtendableOutput) hasher.
99///
100/// All provided transcript variants are built on this type:
101/// - [`HashTranscript`]: fixed-output hashes (SHA-512, SHA-256) via [`DigestXof`]
102/// - `Shake128Transcript`: SHAKE128 native XOF (requires `shake128` feature)
103pub struct XofTranscript<H: digest::ExtendableOutput + Clone> {
104    state: XofState<H>,
105}
106
107enum XofState<H: digest::ExtendableOutput + Clone> {
108    Absorbing(H),
109    Squeezing(H::Reader),
110}
111
112impl<H: digest::ExtendableOutput + Default + Clone> Default for XofState<H> {
113    fn default() -> Self {
114        Self::Absorbing(H::default())
115    }
116}
117
118impl<H: digest::ExtendableOutput + Clone> Clone for XofTranscript<H>
119where
120    H::Reader: Clone,
121{
122    fn clone(&self) -> Self {
123        Self {
124            state: match &self.state {
125                XofState::Absorbing(h) => XofState::Absorbing(h.clone()),
126                XofState::Squeezing(r) => XofState::Squeezing(r.clone()),
127            },
128        }
129    }
130}
131
132impl<H: digest::ExtendableOutput + Default + Clone> XofTranscript<H> {
133    /// Transition to squeezing (if needed) and return the XOF reader.
134    fn reader(&mut self) -> &mut H::Reader {
135        if let XofState::Absorbing(_) = &self.state {
136            let XofState::Absorbing(h) = core::mem::take(&mut self.state) else {
137                unreachable!()
138            };
139            self.state = XofState::Squeezing(h.finalize_xof());
140        }
141        let XofState::Squeezing(reader) = &mut self.state else {
142            unreachable!()
143        };
144        reader
145    }
146}
147
148impl<H: digest::ExtendableOutput + Default + Clone> io::Read for XofTranscript<H>
149where
150    H::Reader: Clone,
151{
152    fn read(&mut self, buf: &mut [u8]) -> io::Result<usize> {
153        self.squeeze_raw(buf);
154        Ok(buf.len())
155    }
156}
157
158impl<H: digest::ExtendableOutput + Default + Clone> io::Write for XofTranscript<H>
159where
160    H::Reader: Clone,
161{
162    fn write(&mut self, buf: &[u8]) -> io::Result<usize> {
163        self.absorb_raw(buf);
164        Ok(buf.len())
165    }
166
167    fn flush(&mut self) -> io::Result<()> {
168        Ok(())
169    }
170}
171
172impl<H: digest::ExtendableOutput + Default + Clone> Transcript for XofTranscript<H>
173where
174    H::Reader: Clone,
175{
176    fn new(id: crate::suites::SuiteId) -> Self {
177        let mut h = H::default();
178        h.update(&id.to_bytes());
179        Self {
180            state: XofState::Absorbing(h),
181        }
182    }
183
184    fn absorb_raw(&mut self, data: &[u8]) {
185        match &mut self.state {
186            XofState::Absorbing(h) => h.update(data),
187            XofState::Squeezing { .. } => panic!("cannot absorb after squeeze"),
188        }
189    }
190
191    fn squeeze_raw(&mut self, buf: &mut [u8]) {
192        use digest::XofReader;
193        self.reader().read(buf);
194    }
195}
196
197// ---------------------------------------------------------------------------
198// DigestXof: counter-mode XOF adapter for fixed-output hashes
199// ---------------------------------------------------------------------------
200
201/// Wraps any [`Digest`] hash into an [`ExtendableOutput`](digest::ExtendableOutput)
202/// function using counter-mode expansion.
203///
204/// ```text
205/// seed = H(absorbed_data)
206/// block_i = H(seed || i.to_le_bytes())    for i = 0, 1, 2, ...
207/// ```
208#[derive(Clone)]
209pub struct DigestXof<H: Digest + Clone>(H);
210
211impl<H: Digest + Clone> Default for DigestXof<H> {
212    fn default() -> Self {
213        Self(H::new())
214    }
215}
216
217impl<H: Digest + Clone> digest::Update for DigestXof<H> {
218    fn update(&mut self, data: &[u8]) {
219        self.0.update(data);
220    }
221}
222
223impl<H: Digest + Clone> digest::OutputSizeUser for DigestXof<H> {
224    type OutputSize = H::OutputSize;
225}
226
227impl<H: Digest + Clone> digest::ExtendableOutput for DigestXof<H> {
228    type Reader = DigestXofReader<H>;
229
230    fn finalize_xof(self) -> Self::Reader {
231        let seed = self.0.finalize();
232        let buffer = H::new()
233            .chain_update(&seed)
234            .chain_update(0u32.to_le_bytes())
235            .finalize();
236        DigestXofReader {
237            seed,
238            counter: 1,
239            buffer,
240            buf_offset: 0,
241        }
242    }
243}
244
245/// Counter-mode XOF reader for [`DigestXof`].
246#[derive(Clone)]
247pub struct DigestXofReader<H: Digest> {
248    seed: GenericArray<u8, H::OutputSize>,
249    counter: u32,
250    buffer: GenericArray<u8, H::OutputSize>,
251    buf_offset: usize,
252}
253
254impl<H: Digest> digest::XofReader for DigestXofReader<H> {
255    fn read(&mut self, buf: &mut [u8]) {
256        let mut remaining = buf;
257        while !remaining.is_empty() {
258            if self.buf_offset >= self.buffer.len() {
259                self.buffer = H::new()
260                    .chain_update(&self.seed)
261                    .chain_update(self.counter.to_le_bytes())
262                    .finalize();
263                self.counter += 1;
264                self.buf_offset = 0;
265            }
266            let avail = self.buffer.len() - self.buf_offset;
267            let take = avail.min(remaining.len());
268            remaining[..take]
269                .copy_from_slice(&self.buffer[self.buf_offset..self.buf_offset + take]);
270            self.buf_offset += take;
271            remaining = &mut remaining[take..];
272        }
273    }
274}
275
276// ---------------------------------------------------------------------------
277// Type aliases
278// ---------------------------------------------------------------------------
279
280/// Hash-based transcript using counter-mode expansion for fixed-output hashes.
281///
282/// The squeeze output is produced by hashing a seed with an incrementing
283/// counter, generating `H::OutputSize` bytes per block:
284///
285/// ```text
286/// seed = H(label || absorbed_data)
287/// block_i = H(seed || i.to_le_bytes())    for i = 0, 1, 2, ...
288/// ```
289pub type HashTranscript<H = Sha512> = XofTranscript<DigestXof<H>>;
290
291/// SHAKE128 native XOF transcript.
292#[cfg(feature = "shake128")]
293pub type Shake128Transcript = XofTranscript<sha3::Shake128>;
294
295#[cfg(test)]
296mod tests {
297    macro_rules! transcript_tests {
298        ($T:ty, $mod:ident) => {
299            mod $mod {
300                use super::super::*;
301                use crate::suites::SuiteId;
302
303                const ID_A: SuiteId = SuiteId::new(1, 2, 3, 4);
304                const ID_B: SuiteId = SuiteId::new(5, 6, 7, 8);
305
306                #[test]
307                fn deterministic_squeeze() {
308                    let mut t1 = <$T>::new(ID_A);
309                    t1.absorb_raw(b"hello");
310                    let mut out1 = [0u8; 64];
311                    t1.squeeze_raw(&mut out1);
312
313                    let mut t2 = <$T>::new(ID_A);
314                    t2.absorb_raw(b"hello");
315                    let mut out2 = [0u8; 64];
316                    t2.squeeze_raw(&mut out2);
317                    assert_eq!(out1, out2);
318                }
319
320                #[test]
321                fn incremental_matches_bulk() {
322                    let mut t1 = <$T>::new(ID_A);
323                    t1.absorb_raw(b"data");
324                    let mut t2 = t1.clone();
325
326                    let mut bulk = [0u8; 100];
327                    t1.squeeze_raw(&mut bulk);
328
329                    let mut inc = [0u8; 100];
330                    t2.squeeze_raw(&mut inc[..10]);
331                    t2.squeeze_raw(&mut inc[10..64]);
332                    t2.squeeze_raw(&mut inc[64..]);
333                    assert_eq!(bulk, inc);
334                }
335
336                #[test]
337                fn clone_produces_independent_streams() {
338                    let mut t = <$T>::new(ID_A);
339                    t.absorb_raw(b"shared");
340
341                    let mut fork = t.clone();
342                    t.absorb_raw(b"branch_a");
343                    fork.absorb_raw(b"branch_b");
344
345                    let mut a = [0u8; 32];
346                    let mut b = [0u8; 32];
347                    t.squeeze_raw(&mut a);
348                    fork.squeeze_raw(&mut b);
349                    assert_ne!(a, b);
350                }
351
352                #[test]
353                #[should_panic(expected = "cannot absorb after squeeze")]
354                fn absorb_after_squeeze_panics() {
355                    let mut t = <$T>::new(ID_A);
356                    t.absorb_raw(b"x");
357                    let mut out = [0u8; 1];
358                    t.squeeze_raw(&mut out);
359                    t.absorb_raw(b"y");
360                }
361
362                #[test]
363                fn different_labels_produce_different_output() {
364                    let mut t1 = <$T>::new(ID_A);
365                    let mut t2 = <$T>::new(ID_B);
366                    t1.absorb_raw(b"same");
367                    t2.absorb_raw(b"same");
368                    let mut o1 = [0u8; 32];
369                    let mut o2 = [0u8; 32];
370                    t1.squeeze_raw(&mut o1);
371                    t2.squeeze_raw(&mut o2);
372                    assert_ne!(o1, o2);
373                }
374            }
375        };
376    }
377
378    transcript_tests!(HashTranscript<sha2::Sha512>, hash_sha512);
379    transcript_tests!(HashTranscript<sha2::Sha256>, hash_sha256);
380
381    #[cfg(feature = "shake128")]
382    transcript_tests!(Shake128Transcript, shake128_xof);
383}