p3_uni_stark/
verifier.rs

1//! See [`crate::prover`] for an overview of the protocol and a more detailed soundness analysis.
2
3use alloc::string::String;
4use alloc::vec::Vec;
5use alloc::{format, vec};
6
7use itertools::Itertools;
8use p3_air::Air;
9use p3_challenger::{CanObserve, FieldChallenger};
10use p3_commit::{Pcs, PolynomialSpace};
11use p3_field::{BasedVectorSpace, Field, PrimeCharacteristicRing};
12use p3_matrix::dense::RowMajorMatrixView;
13use p3_matrix::stack::VerticalPair;
14use p3_util::zip_eq::zip_eq;
15use thiserror::Error;
16use tracing::instrument;
17
18use crate::symbolic_builder::{SymbolicAirBuilder, get_log_num_quotient_chunks};
19use crate::{
20    Domain, PcsError, PreprocessedVerifierKey, Proof, StarkGenericConfig, Val,
21    VerifierConstraintFolder,
22};
23
24/// Recomposes the quotient polynomial from its chunks evaluated at a point.
25///
26/// Given quotient chunks and their domains, this computes the Lagrange
27/// interpolation coefficients (zps) and reconstructs quotient(zeta).
28pub fn recompose_quotient_from_chunks<SC>(
29    quotient_chunks_domains: &[Domain<SC>],
30    quotient_chunks: &[Vec<SC::Challenge>],
31    zeta: SC::Challenge,
32) -> SC::Challenge
33where
34    SC: StarkGenericConfig,
35{
36    let zps = quotient_chunks_domains
37        .iter()
38        .enumerate()
39        .map(|(i, domain)| {
40            quotient_chunks_domains
41                .iter()
42                .enumerate()
43                .filter(|(j, _)| *j != i)
44                .map(|(_, other_domain)| {
45                    other_domain.vanishing_poly_at_point(zeta)
46                        * other_domain
47                            .vanishing_poly_at_point(domain.first_point())
48                            .inverse()
49                })
50                .product::<SC::Challenge>()
51        })
52        .collect_vec();
53
54    quotient_chunks
55        .iter()
56        .enumerate()
57        .map(|(ch_i, ch)| {
58            // We checked in valid_shape the length of "ch" is equal to
59            // <SC::Challenge as BasedVectorSpace<Val<SC>>>::DIMENSION. Hence
60            // the unwrap() will never panic.
61            zps[ch_i]
62                * ch.iter()
63                    .enumerate()
64                    .map(|(e_i, &c)| SC::Challenge::ith_basis_element(e_i).unwrap() * c)
65                    .sum::<SC::Challenge>()
66        })
67        .sum::<SC::Challenge>()
68}
69
70/// Verifies that the folded constraints match the quotient polynomial at zeta.
71///
72/// This evaluates the [`Air`] constraints at the out-of-domain point and checks
73/// that constraints(zeta) / Z_H(zeta) = quotient(zeta).
74#[allow(clippy::too_many_arguments)]
75pub fn verify_constraints<SC, A, PcsErr>(
76    air: &A,
77    trace_local: &[SC::Challenge],
78    trace_next: &[SC::Challenge],
79    preprocessed_local: Option<&[SC::Challenge]>,
80    preprocessed_next: Option<&[SC::Challenge]>,
81    public_values: &[Val<SC>],
82    trace_domain: Domain<SC>,
83    zeta: SC::Challenge,
84    alpha: SC::Challenge,
85    quotient: SC::Challenge,
86) -> Result<(), VerificationError<PcsErr>>
87where
88    SC: StarkGenericConfig,
89    A: for<'a> Air<VerifierConstraintFolder<'a, SC>>,
90    PcsErr: core::fmt::Debug,
91{
92    let sels = trace_domain.selectors_at_point(zeta);
93
94    let main = VerticalPair::new(
95        RowMajorMatrixView::new_row(trace_local),
96        RowMajorMatrixView::new_row(trace_next),
97    );
98
99    let preprocessed = match (preprocessed_local, preprocessed_next) {
100        (Some(local), Some(next)) => Some(VerticalPair::new(
101            RowMajorMatrixView::new_row(local),
102            RowMajorMatrixView::new_row(next),
103        )),
104        _ => None,
105    };
106
107    let mut folder = VerifierConstraintFolder {
108        main,
109        preprocessed,
110        public_values,
111        is_first_row: sels.is_first_row,
112        is_last_row: sels.is_last_row,
113        is_transition: sels.is_transition,
114        alpha,
115        accumulator: SC::Challenge::ZERO,
116    };
117    air.eval(&mut folder);
118    let folded_constraints = folder.accumulator;
119
120    // Check that constraints(zeta) / Z_H(zeta) = quotient(zeta)
121    if folded_constraints * sels.inv_vanishing != quotient {
122        return Err(VerificationError::OodEvaluationMismatch { index: None });
123    }
124
125    Ok(())
126}
127
128/// Validates and commits the preprocessed trace if present.
129/// Returns the preprocessed width and its commitment hash (available iff width > 0).
130#[allow(clippy::type_complexity)]
131fn process_preprocessed_trace<SC, A>(
132    air: &A,
133    opened_values: &crate::proof::OpenedValues<SC::Challenge>,
134    is_zk: usize,
135    preprocessed_vk: Option<&PreprocessedVerifierKey<SC>>,
136) -> Result<
137    (
138        usize,
139        Option<<SC::Pcs as Pcs<SC::Challenge, SC::Challenger>>::Commitment>,
140    ),
141    VerificationError<PcsError<SC>>,
142>
143where
144    SC: StarkGenericConfig,
145    A: for<'a> Air<VerifierConstraintFolder<'a, SC>>,
146{
147    // Determine expected preprocessed width.
148    // - If a verifier key is provided, trust its width.
149    // - Otherwise, derive width from the AIR's preprocessed trace (if any).
150    let preprocessed_width = preprocessed_vk
151        .map(|vk| vk.width)
152        .or_else(|| air.preprocessed_trace().as_ref().map(|m| m.width))
153        .unwrap_or(0);
154
155    // Check that the proof's opened preprocessed values match the expected width.
156    let preprocessed_local_len = opened_values
157        .preprocessed_local
158        .as_ref()
159        .map_or(0, |v| v.len());
160    let preprocessed_next_len = opened_values
161        .preprocessed_next
162        .as_ref()
163        .map_or(0, |v| v.len());
164    if preprocessed_width != preprocessed_local_len || preprocessed_width != preprocessed_next_len {
165        // Verifier expects preprocessed trace while proof does not have it, or vice versa
166        return Err(VerificationError::InvalidProofShape);
167    }
168
169    // Validate consistency between width, verifier key, and zk settings.
170    match (preprocessed_width, preprocessed_vk) {
171        // Case: No preprocessed columns.
172        //
173        // Valid only if no verifier key is provided.
174        (0, None) => Ok((0, None)),
175
176        // Case: Preprocessed columns exist.
177        //
178        // Valid only if VK exists, widths match, and we are NOT in zk mode.
179        (w, Some(vk)) if w == vk.width => {
180            // Preprocessed columns are currently only supported in non-zk mode.
181            assert_eq!(is_zk, 0, "preprocessed columns not supported in zk mode");
182            Ok((w, Some(vk.commitment.clone())))
183        }
184
185        // Catch-all for invalid states, such as:
186        // - Width is 0 but VK is provided.
187        // - Width > 0 but VK is missing.
188        // - Width > 0 but VK width mismatches the expected width.
189        _ => Err(VerificationError::InvalidProofShape),
190    }
191}
192
193#[instrument(skip_all)]
194pub fn verify<SC, A>(
195    config: &SC,
196    air: &A,
197    proof: &Proof<SC>,
198    public_values: &[Val<SC>],
199) -> Result<(), VerificationError<PcsError<SC>>>
200where
201    SC: StarkGenericConfig,
202    A: Air<SymbolicAirBuilder<Val<SC>>> + for<'a> Air<VerifierConstraintFolder<'a, SC>>,
203{
204    verify_with_preprocessed(config, air, proof, public_values, None)
205}
206
207#[instrument(skip_all)]
208pub fn verify_with_preprocessed<SC, A>(
209    config: &SC,
210    air: &A,
211    proof: &Proof<SC>,
212    public_values: &[Val<SC>],
213    preprocessed_vk: Option<&PreprocessedVerifierKey<SC>>,
214) -> Result<(), VerificationError<PcsError<SC>>>
215where
216    SC: StarkGenericConfig,
217    A: Air<SymbolicAirBuilder<Val<SC>>> + for<'a> Air<VerifierConstraintFolder<'a, SC>>,
218{
219    let Proof {
220        commitments,
221        opened_values,
222        opening_proof,
223        degree_bits,
224    } = proof;
225
226    let pcs = config.pcs();
227    let degree = 1 << degree_bits;
228    let trace_domain = pcs.natural_domain_for_degree(degree);
229    // TODO: allow moving preprocessed commitment to preprocess time, if known in advance
230    let (preprocessed_width, preprocessed_commit) =
231        process_preprocessed_trace::<SC, A>(air, opened_values, config.is_zk(), preprocessed_vk)?;
232
233    // Ensure the preprocessed trace and main trace have the same height.
234    if let Some(vk) = preprocessed_vk
235        && preprocessed_width > 0
236        && vk.degree_bits != *degree_bits
237    {
238        return Err(VerificationError::InvalidProofShape);
239    }
240
241    let log_num_quotient_chunks = get_log_num_quotient_chunks::<Val<SC>, A>(
242        air,
243        preprocessed_width,
244        public_values.len(),
245        config.is_zk(),
246    );
247    let num_quotient_chunks = 1 << (log_num_quotient_chunks + config.is_zk());
248    let mut challenger = config.initialise_challenger();
249    let init_trace_domain = pcs.natural_domain_for_degree(degree >> (config.is_zk()));
250
251    let quotient_domain =
252        trace_domain.create_disjoint_domain(1 << (degree_bits + log_num_quotient_chunks));
253    let quotient_chunks_domains = quotient_domain.split_domains(num_quotient_chunks);
254
255    let randomized_quotient_chunks_domains = quotient_chunks_domains
256        .iter()
257        .map(|domain| pcs.natural_domain_for_degree(domain.size() << (config.is_zk())))
258        .collect_vec();
259    // Check that the random commitments are/are not present depending on the ZK setting.
260    // - If ZK is enabled, the prover should have random commitments.
261    // - If ZK is not enabled, the prover should not have random commitments.
262    if (opened_values.random.is_some() != SC::Pcs::ZK)
263        || (commitments.random.is_some() != SC::Pcs::ZK)
264    {
265        return Err(VerificationError::RandomizationError);
266    }
267
268    let air_width = A::width(air);
269    let valid_shape = opened_values.trace_local.len() == air_width
270        && opened_values.trace_next.len() == air_width
271        && opened_values.quotient_chunks.len() == num_quotient_chunks
272        && opened_values
273            .quotient_chunks
274            .iter()
275            .all(|qc| qc.len() == SC::Challenge::DIMENSION)
276        // We've already checked that opened_values.random is present if and only if ZK is enabled.
277        && opened_values.random.as_ref().is_none_or(|r_comm| r_comm.len() == SC::Challenge::DIMENSION);
278    if !valid_shape {
279        return Err(VerificationError::InvalidProofShape);
280    }
281
282    // Observe the instance.
283    challenger.observe(Val::<SC>::from_usize(proof.degree_bits));
284    challenger.observe(Val::<SC>::from_usize(proof.degree_bits - config.is_zk()));
285    challenger.observe(Val::<SC>::from_usize(preprocessed_width));
286    // TODO: Might be best practice to include other instance data here in the transcript, like some
287    // encoding of the AIR. This protects against transcript collisions between distinct instances.
288    // Practically speaking though, the only related known attack is from failing to include public
289    // values. It's not clear if failing to include other instance data could enable a transcript
290    // collision, since most such changes would completely change the set of satisfying witnesses.
291    challenger.observe(commitments.trace.clone());
292    if preprocessed_width > 0 {
293        challenger.observe(preprocessed_commit.as_ref().unwrap().clone());
294    }
295    challenger.observe_slice(public_values);
296
297    // Get the first Fiat Shamir challenge which will be used to combine all constraint polynomials
298    // into a single polynomial.
299    //
300    // Soundness Error: n/|EF| where n is the number of constraints.
301    let alpha = challenger.sample_algebra_element();
302    challenger.observe(commitments.quotient_chunks.clone());
303
304    // We've already checked that commitments.random is present if and only if ZK is enabled.
305    // Observe the random commitment if it is present.
306    if let Some(r_commit) = commitments.random.clone() {
307        challenger.observe(r_commit);
308    }
309
310    // Get an out-of-domain point to open our values at.
311    //
312    // Soundness Error: dN/|EF| where `N` is the trace length and our constraint polynomial has degree `d`.
313    let zeta = challenger.sample_algebra_element();
314    let zeta_next = init_trace_domain
315        .next_point(zeta)
316        .ok_or(VerificationError::NextPointUnavailable)?;
317
318    // We've already checked that commitments.random and opened_values.random are present if and only if ZK is enabled.
319    let mut coms_to_verify = if let Some(random_commit) = &commitments.random {
320        let random_values = opened_values
321            .random
322            .as_ref()
323            .ok_or(VerificationError::RandomizationError)?;
324        vec![(
325            random_commit.clone(),
326            vec![(trace_domain, vec![(zeta, random_values.clone())])],
327        )]
328    } else {
329        vec![]
330    };
331    coms_to_verify.extend(vec![
332        (
333            commitments.trace.clone(),
334            vec![(
335                trace_domain,
336                vec![
337                    (zeta, opened_values.trace_local.clone()),
338                    (zeta_next, opened_values.trace_next.clone()),
339                ],
340            )],
341        ),
342        (
343            commitments.quotient_chunks.clone(),
344            // Check the commitment on the randomized domains.
345            zip_eq(
346                randomized_quotient_chunks_domains.iter(),
347                &opened_values.quotient_chunks,
348                VerificationError::InvalidProofShape,
349            )?
350            .map(|(domain, values)| (*domain, vec![(zeta, values.clone())]))
351            .collect_vec(),
352        ),
353    ]);
354
355    // Add preprocessed commitment verification if present
356    if preprocessed_width > 0 {
357        coms_to_verify.push((
358            preprocessed_commit.unwrap(),
359            vec![(
360                trace_domain,
361                vec![
362                    (zeta, opened_values.preprocessed_local.clone().unwrap()),
363                    (zeta_next, opened_values.preprocessed_next.clone().unwrap()),
364                ],
365            )],
366        ));
367    }
368
369    pcs.verify(coms_to_verify, opening_proof, &mut challenger)
370        .map_err(VerificationError::InvalidOpeningArgument)?;
371
372    let quotient = recompose_quotient_from_chunks::<SC>(
373        &quotient_chunks_domains,
374        &opened_values.quotient_chunks,
375        zeta,
376    );
377
378    verify_constraints::<SC, A, PcsError<SC>>(
379        air,
380        &opened_values.trace_local,
381        &opened_values.trace_next,
382        opened_values.preprocessed_local.as_deref(),
383        opened_values.preprocessed_next.as_deref(),
384        public_values,
385        init_trace_domain,
386        zeta,
387        alpha,
388        quotient,
389    )?;
390
391    Ok(())
392}
393
394/// Defines errors that can occur during lookup verification.
395#[derive(Debug)]
396pub enum LookupError {
397    /// Error indicating that the global cumulative sum is incorrect.
398    GlobalCumulativeMismatch(Option<String>),
399}
400
401#[derive(Debug, Error)]
402pub enum VerificationError<PcsErr>
403where
404    PcsErr: core::fmt::Debug,
405{
406    #[error("invalid proof shape")]
407    InvalidProofShape,
408    /// An error occurred while verifying the claimed openings.
409    #[error("invalid opening argument: {0:?}")]
410    InvalidOpeningArgument(PcsErr),
411    /// Out-of-domain evaluation mismatch, i.e. `constraints(zeta)` did not match
412    /// `quotient(zeta) Z_H(zeta)`.
413    #[error("out-of-domain evaluation mismatch{}", .index.map(|i| format!(" at index {}", i)).unwrap_or_default())]
414    OodEvaluationMismatch { index: Option<usize> },
415    /// The FRI batch randomization does not correspond to the ZK setting.
416    #[error("randomization error: FRI batch randomization does not match ZK setting")]
417    RandomizationError,
418    /// The domain does not support computing the next point algebraically.
419    #[error(
420        "next point unavailable: domain does not support computing the next point algebraically"
421    )]
422    NextPointUnavailable,
423    /// Lookup related error
424    #[error("lookup error: {0:?}")]
425    LookupError(LookupError),
426}