p3_uni_stark/
verifier.rs

1//! See [`crate::prover`] for an overview of the protocol and a more detailed soundness analysis.
2
3use alloc::string::String;
4use alloc::vec::Vec;
5use alloc::{format, vec};
6
7use itertools::Itertools;
8use p3_air::Air;
9use p3_challenger::{CanObserve, FieldChallenger};
10use p3_commit::{Pcs, PolynomialSpace};
11use p3_field::{BasedVectorSpace, Field, PrimeCharacteristicRing};
12use p3_matrix::dense::RowMajorMatrixView;
13use p3_matrix::stack::VerticalPair;
14use p3_util::zip_eq::zip_eq;
15use thiserror::Error;
16use tracing::instrument;
17
18use crate::symbolic_builder::{SymbolicAirBuilder, get_log_num_quotient_chunks};
19use crate::{
20    Domain, PcsError, PreprocessedVerifierKey, Proof, StarkGenericConfig, Val,
21    VerifierConstraintFolder,
22};
23
24/// Recomposes the quotient polynomial from its chunks evaluated at a point.
25///
26/// Given quotient chunks and their domains, this computes the Lagrange
27/// interpolation coefficients (zps) and reconstructs quotient(zeta).
28pub fn recompose_quotient_from_chunks<SC>(
29    quotient_chunks_domains: &[Domain<SC>],
30    quotient_chunks: &[Vec<SC::Challenge>],
31    zeta: SC::Challenge,
32) -> SC::Challenge
33where
34    SC: StarkGenericConfig,
35{
36    let zps = quotient_chunks_domains
37        .iter()
38        .enumerate()
39        .map(|(i, domain)| {
40            quotient_chunks_domains
41                .iter()
42                .enumerate()
43                .filter(|(j, _)| *j != i)
44                .map(|(_, other_domain)| {
45                    other_domain.vanishing_poly_at_point(zeta)
46                        * other_domain
47                            .vanishing_poly_at_point(domain.first_point())
48                            .inverse()
49                })
50                .product::<SC::Challenge>()
51        })
52        .collect_vec();
53
54    quotient_chunks
55        .iter()
56        .enumerate()
57        .map(|(ch_i, ch)| {
58            // We checked in valid_shape the length of "ch" is equal to
59            // <SC::Challenge as BasedVectorSpace<Val<SC>>>::DIMENSION. Hence
60            // the unwrap() will never panic.
61            zps[ch_i]
62                * ch.iter()
63                    .enumerate()
64                    .map(|(e_i, &c)| SC::Challenge::ith_basis_element(e_i).unwrap() * c)
65                    .sum::<SC::Challenge>()
66        })
67        .sum::<SC::Challenge>()
68}
69
70/// Verifies that the folded constraints match the quotient polynomial at zeta.
71///
72/// This evaluates the [`Air`] constraints at the out-of-domain point and checks
73/// that constraints(zeta) / Z_H(zeta) = quotient(zeta).
74#[allow(clippy::too_many_arguments)]
75pub fn verify_constraints<SC, A, PcsErr>(
76    air: &A,
77    trace_local: &[SC::Challenge],
78    trace_next: &[SC::Challenge],
79    preprocessed_local: Option<&[SC::Challenge]>,
80    preprocessed_next: Option<&[SC::Challenge]>,
81    public_values: &[Val<SC>],
82    trace_domain: Domain<SC>,
83    zeta: SC::Challenge,
84    alpha: SC::Challenge,
85    quotient: SC::Challenge,
86) -> Result<(), VerificationError<PcsErr>>
87where
88    SC: StarkGenericConfig,
89    A: for<'a> Air<VerifierConstraintFolder<'a, SC>>,
90    PcsErr: core::fmt::Debug,
91{
92    let sels = trace_domain.selectors_at_point(zeta);
93
94    let main = VerticalPair::new(
95        RowMajorMatrixView::new_row(trace_local),
96        RowMajorMatrixView::new_row(trace_next),
97    );
98
99    let preprocessed = match (preprocessed_local, preprocessed_next) {
100        (Some(local), Some(next)) => Some(VerticalPair::new(
101            RowMajorMatrixView::new_row(local),
102            RowMajorMatrixView::new_row(next),
103        )),
104        _ => None,
105    };
106
107    let mut folder = VerifierConstraintFolder {
108        main,
109        preprocessed,
110        public_values,
111        is_first_row: sels.is_first_row,
112        is_last_row: sels.is_last_row,
113        is_transition: sels.is_transition,
114        alpha,
115        accumulator: SC::Challenge::ZERO,
116    };
117    air.eval(&mut folder);
118    let folded_constraints = folder.accumulator;
119
120    // Check that constraints(zeta) / Z_H(zeta) = quotient(zeta)
121    if folded_constraints * sels.inv_vanishing != quotient {
122        return Err(VerificationError::OodEvaluationMismatch { index: None });
123    }
124
125    Ok(())
126}
127
128/// Validates and commits the preprocessed trace if present.
129/// Returns the preprocessed width and its commitment hash (available iff width > 0).
130#[allow(clippy::type_complexity)]
131fn process_preprocessed_trace<SC, A>(
132    air: &A,
133    opened_values: &crate::proof::OpenedValues<SC::Challenge>,
134    preprocessed_vk: Option<&PreprocessedVerifierKey<SC>>,
135) -> Result<
136    (
137        usize,
138        Option<<SC::Pcs as Pcs<SC::Challenge, SC::Challenger>>::Commitment>,
139    ),
140    VerificationError<PcsError<SC>>,
141>
142where
143    SC: StarkGenericConfig,
144    A: for<'a> Air<VerifierConstraintFolder<'a, SC>>,
145{
146    // Determine expected preprocessed width.
147    // - If a verifier key is provided, trust its width.
148    // - Otherwise, derive width from the AIR's preprocessed trace (if any).
149    let preprocessed_width = preprocessed_vk
150        .map(|vk| vk.width)
151        .or_else(|| air.preprocessed_trace().as_ref().map(|m| m.width))
152        .unwrap_or(0);
153
154    // Check that the proof's opened preprocessed values match the expected width.
155    let preprocessed_local_len = opened_values
156        .preprocessed_local
157        .as_ref()
158        .map_or(0, |v| v.len());
159    let preprocessed_next_len = opened_values
160        .preprocessed_next
161        .as_ref()
162        .map_or(0, |v| v.len());
163    if preprocessed_width != preprocessed_local_len || preprocessed_width != preprocessed_next_len {
164        // Verifier expects preprocessed trace while proof does not have it, or vice versa
165        return Err(VerificationError::InvalidProofShape);
166    }
167
168    // Validate consistency between width, verifier key, and zk settings.
169    match (preprocessed_width, preprocessed_vk) {
170        // Case: No preprocessed columns.
171        //
172        // Valid only if no verifier key is provided.
173        (0, None) => Ok((0, None)),
174
175        // Case: Preprocessed columns exist.
176        //
177        // Valid only if VK exists, widths match, and we are NOT in zk mode.
178        (w, Some(vk)) if w == vk.width => Ok((w, Some(vk.commitment.clone()))),
179
180        // Catch-all for invalid states, such as:
181        // - Width is 0 but VK is provided.
182        // - Width > 0 but VK is missing.
183        // - Width > 0 but VK width mismatches the expected width.
184        _ => Err(VerificationError::InvalidProofShape),
185    }
186}
187
188#[instrument(skip_all)]
189pub fn verify<SC, A>(
190    config: &SC,
191    air: &A,
192    proof: &Proof<SC>,
193    public_values: &[Val<SC>],
194) -> Result<(), VerificationError<PcsError<SC>>>
195where
196    SC: StarkGenericConfig,
197    A: Air<SymbolicAirBuilder<Val<SC>>> + for<'a> Air<VerifierConstraintFolder<'a, SC>>,
198{
199    verify_with_preprocessed(config, air, proof, public_values, None)
200}
201
202#[instrument(skip_all)]
203pub fn verify_with_preprocessed<SC, A>(
204    config: &SC,
205    air: &A,
206    proof: &Proof<SC>,
207    public_values: &[Val<SC>],
208    preprocessed_vk: Option<&PreprocessedVerifierKey<SC>>,
209) -> Result<(), VerificationError<PcsError<SC>>>
210where
211    SC: StarkGenericConfig,
212    A: Air<SymbolicAirBuilder<Val<SC>>> + for<'a> Air<VerifierConstraintFolder<'a, SC>>,
213{
214    let Proof {
215        commitments,
216        opened_values,
217        opening_proof,
218        degree_bits,
219    } = proof;
220
221    let pcs = config.pcs();
222    let degree = 1 << degree_bits;
223    let trace_domain = pcs.natural_domain_for_degree(degree);
224    // TODO: allow moving preprocessed commitment to preprocess time, if known in advance
225    let (preprocessed_width, preprocessed_commit) =
226        process_preprocessed_trace::<SC, A>(air, opened_values, preprocessed_vk)?;
227
228    // Ensure the preprocessed trace and main trace have the same height.
229    if let Some(vk) = preprocessed_vk
230        && preprocessed_width > 0
231        && vk.degree_bits != *degree_bits
232    {
233        return Err(VerificationError::InvalidProofShape);
234    }
235
236    let log_num_quotient_chunks = get_log_num_quotient_chunks::<Val<SC>, A>(
237        air,
238        preprocessed_width,
239        public_values.len(),
240        config.is_zk(),
241    );
242    let num_quotient_chunks = 1 << (log_num_quotient_chunks + config.is_zk());
243    let mut challenger = config.initialise_challenger();
244    let init_trace_domain = pcs.natural_domain_for_degree(degree >> (config.is_zk()));
245
246    let quotient_domain =
247        trace_domain.create_disjoint_domain(1 << (degree_bits + log_num_quotient_chunks));
248    let quotient_chunks_domains = quotient_domain.split_domains(num_quotient_chunks);
249
250    let randomized_quotient_chunks_domains = quotient_chunks_domains
251        .iter()
252        .map(|domain| pcs.natural_domain_for_degree(domain.size() << (config.is_zk())))
253        .collect_vec();
254    // Check that the random commitments are/are not present depending on the ZK setting.
255    // - If ZK is enabled, the prover should have random commitments.
256    // - If ZK is not enabled, the prover should not have random commitments.
257    if (opened_values.random.is_some() != SC::Pcs::ZK)
258        || (commitments.random.is_some() != SC::Pcs::ZK)
259    {
260        return Err(VerificationError::RandomizationError);
261    }
262
263    let air_width = A::width(air);
264    let valid_shape = opened_values.trace_local.len() == air_width
265        && opened_values.trace_next.len() == air_width
266        && opened_values.quotient_chunks.len() == num_quotient_chunks
267        && opened_values
268            .quotient_chunks
269            .iter()
270            .all(|qc| qc.len() == SC::Challenge::DIMENSION)
271        // We've already checked that opened_values.random is present if and only if ZK is enabled.
272        && opened_values.random.as_ref().is_none_or(|r_comm| r_comm.len() == SC::Challenge::DIMENSION);
273    if !valid_shape {
274        return Err(VerificationError::InvalidProofShape);
275    }
276
277    // Observe the instance.
278    challenger.observe(Val::<SC>::from_usize(proof.degree_bits));
279    challenger.observe(Val::<SC>::from_usize(proof.degree_bits - config.is_zk()));
280    challenger.observe(Val::<SC>::from_usize(preprocessed_width));
281    // TODO: Might be best practice to include other instance data here in the transcript, like some
282    // encoding of the AIR. This protects against transcript collisions between distinct instances.
283    // Practically speaking though, the only related known attack is from failing to include public
284    // values. It's not clear if failing to include other instance data could enable a transcript
285    // collision, since most such changes would completely change the set of satisfying witnesses.
286    challenger.observe(commitments.trace.clone());
287    if preprocessed_width > 0 {
288        challenger.observe(preprocessed_commit.as_ref().unwrap().clone());
289    }
290    challenger.observe_slice(public_values);
291
292    // Get the first Fiat Shamir challenge which will be used to combine all constraint polynomials
293    // into a single polynomial.
294    //
295    // Soundness Error: n/|EF| where n is the number of constraints.
296    let alpha = challenger.sample_algebra_element();
297    challenger.observe(commitments.quotient_chunks.clone());
298
299    // We've already checked that commitments.random is present if and only if ZK is enabled.
300    // Observe the random commitment if it is present.
301    if let Some(r_commit) = commitments.random.clone() {
302        challenger.observe(r_commit);
303    }
304
305    // Get an out-of-domain point to open our values at.
306    //
307    // Soundness Error: dN/|EF| where `N` is the trace length and our constraint polynomial has degree `d`.
308    let zeta = challenger.sample_algebra_element();
309    let zeta_next = init_trace_domain
310        .next_point(zeta)
311        .ok_or(VerificationError::NextPointUnavailable)?;
312
313    // We've already checked that commitments.random and opened_values.random are present if and only if ZK is enabled.
314    let mut coms_to_verify = if let Some(random_commit) = &commitments.random {
315        let random_values = opened_values
316            .random
317            .as_ref()
318            .ok_or(VerificationError::RandomizationError)?;
319        vec![(
320            random_commit.clone(),
321            vec![(trace_domain, vec![(zeta, random_values.clone())])],
322        )]
323    } else {
324        vec![]
325    };
326    coms_to_verify.extend(vec![
327        (
328            commitments.trace.clone(),
329            vec![(
330                trace_domain,
331                vec![
332                    (zeta, opened_values.trace_local.clone()),
333                    (zeta_next, opened_values.trace_next.clone()),
334                ],
335            )],
336        ),
337        (
338            commitments.quotient_chunks.clone(),
339            // Check the commitment on the randomized domains.
340            zip_eq(
341                randomized_quotient_chunks_domains.iter(),
342                &opened_values.quotient_chunks,
343                VerificationError::InvalidProofShape,
344            )?
345            .map(|(domain, values)| (*domain, vec![(zeta, values.clone())]))
346            .collect_vec(),
347        ),
348    ]);
349
350    // Add preprocessed commitment verification if present
351    if preprocessed_width > 0 {
352        coms_to_verify.push((
353            preprocessed_commit.unwrap(),
354            vec![(
355                trace_domain,
356                vec![
357                    (zeta, opened_values.preprocessed_local.clone().unwrap()),
358                    (zeta_next, opened_values.preprocessed_next.clone().unwrap()),
359                ],
360            )],
361        ));
362    }
363
364    pcs.verify(coms_to_verify, opening_proof, &mut challenger)
365        .map_err(VerificationError::InvalidOpeningArgument)?;
366
367    let quotient = recompose_quotient_from_chunks::<SC>(
368        &quotient_chunks_domains,
369        &opened_values.quotient_chunks,
370        zeta,
371    );
372
373    verify_constraints::<SC, A, PcsError<SC>>(
374        air,
375        &opened_values.trace_local,
376        &opened_values.trace_next,
377        opened_values.preprocessed_local.as_deref(),
378        opened_values.preprocessed_next.as_deref(),
379        public_values,
380        init_trace_domain,
381        zeta,
382        alpha,
383        quotient,
384    )?;
385
386    Ok(())
387}
388
389/// Defines errors that can occur during lookup verification.
390#[derive(Debug)]
391pub enum LookupError {
392    /// Error indicating that the global cumulative sum is incorrect.
393    GlobalCumulativeMismatch(Option<String>),
394}
395
396#[derive(Debug, Error)]
397pub enum VerificationError<PcsErr>
398where
399    PcsErr: core::fmt::Debug,
400{
401    #[error("invalid proof shape")]
402    InvalidProofShape,
403    /// An error occurred while verifying the claimed openings.
404    #[error("invalid opening argument: {0:?}")]
405    InvalidOpeningArgument(PcsErr),
406    /// Out-of-domain evaluation mismatch, i.e. `constraints(zeta)` did not match
407    /// `quotient(zeta) Z_H(zeta)`.
408    #[error("out-of-domain evaluation mismatch{}", .index.map(|i| format!(" at index {}", i)).unwrap_or_default())]
409    OodEvaluationMismatch { index: Option<usize> },
410    /// The FRI batch randomization does not correspond to the ZK setting.
411    #[error("randomization error: FRI batch randomization does not match ZK setting")]
412    RandomizationError,
413    /// The domain does not support computing the next point algebraically.
414    #[error(
415        "next point unavailable: domain does not support computing the next point algebraically"
416    )]
417    NextPointUnavailable,
418    /// Lookup related error
419    #[error("lookup error: {0:?}")]
420    LookupError(LookupError),
421}