triton_vm/
stark.rs

1use std::ops::Mul;
2
3use arbitrary::Arbitrary;
4use arbitrary::Unstructured;
5use itertools::Itertools;
6use itertools::izip;
7use ndarray::Zip;
8use ndarray::prelude::*;
9use num_traits::ConstOne;
10use num_traits::ConstZero;
11use num_traits::Zero;
12use rand::prelude::*;
13use rand::random;
14use rayon::prelude::*;
15use serde::Deserialize;
16use serde::Serialize;
17use twenty_first::math::ntt::intt;
18use twenty_first::math::ntt::ntt;
19use twenty_first::math::traits::FiniteField;
20use twenty_first::math::traits::ModPowU64;
21use twenty_first::math::traits::PrimitiveRootOfUnity;
22use twenty_first::prelude::*;
23
24use crate::aet::AlgebraicExecutionTrace;
25use crate::arithmetic_domain::ArithmeticDomain;
26use crate::challenges::Challenges;
27use crate::error::ProvingError;
28use crate::error::VerificationError;
29use crate::fri;
30use crate::fri::Fri;
31use crate::ndarray_helper;
32use crate::ndarray_helper::COL_AXIS;
33use crate::ndarray_helper::ROW_AXIS;
34use crate::profiler::profiler;
35use crate::proof::Claim;
36use crate::proof::Proof;
37use crate::proof_item::ProofItem;
38use crate::proof_stream::ProofStream;
39use crate::table::QuotientSegments;
40use crate::table::auxiliary_table::Evaluable;
41use crate::table::master_table::BfeSlice;
42use crate::table::master_table::MasterAuxTable;
43use crate::table::master_table::MasterMainTable;
44use crate::table::master_table::MasterTable;
45use crate::table::master_table::all_quotients_combined;
46use crate::table::master_table::max_degree_with_origin;
47
48/// The number of segments the quotient polynomial is split into.
49/// Helps keeping the FRI domain small.
50pub const NUM_QUOTIENT_SEGMENTS: usize = air::TARGET_DEGREE as usize;
51
52/// The number of randomizer polynomials over the [extension
53/// field](XFieldElement) used in the [`STARK`](Stark). Integral for achieving
54/// zero-knowledge in [FRI](Fri).
55pub const NUM_RANDOMIZER_POLYNOMIALS: usize = 1;
56
57const NUM_DEEP_CODEWORD_COMPONENTS: usize = 3;
58
59/// Parameters for the Zero-Knowledge
60/// [Scalable Transparent ARgument of Knowledge (STARK)][stark] for Triton VM.
61///
62/// The two core methods are [`Stark::prove`] and [`Stark::verify`].
63///
64/// [stark]: https://www.iacr.org/archive/crypto2019/116940201/116940201.pdf
65#[derive(Debug, Copy, Clone, Eq, PartialEq, Hash, Serialize, Deserialize)]
66pub struct Stark {
67    /// The conjectured security level in bits. Concretely, the system
68    /// - is perfectly complete, and
69    /// - has soundness error 2^(-security_level).
70    pub security_level: usize,
71
72    /// The ratio between the lengths of the randomized trace domain and the
73    /// [FRI domain](Stark::fri). Must be a power of 2 for efficiency reasons.
74    pub fri_expansion_factor: usize,
75
76    /// The number of randomizers for the execution trace. The trace randomizers
77    /// are integral for achieving zero-knowledge. In particular, they
78    /// achieve ZK for the (DEEP) ALI part of the zk-STARK.
79    //
80    // See also [`MasterTable::trace_randomizer_for_column`].
81    pub num_trace_randomizers: usize,
82
83    /// The number of collinearity checks to perform in [FRI](Fri).
84    pub num_collinearity_checks: usize,
85}
86
87/// The prover for Triton VM's [zk-STARK](Stark). The core method is
88/// [`prove`](Prover::prove). It is probably more convenient to call
89/// [`Stark::prove`] directly.
90///
91/// It is possible to [set the randomness seed][seed] used for deriving all
92/// prover randomness. To prevent accidental randomness re-use, the [`Prover`]
93/// does not implement [`Clone`].
94///
95/// [seed]: Prover::set_randomness_seed_which_may_break_zero_knowledge
96#[expect(missing_copy_implementations)]
97#[derive(Debug, Eq, PartialEq, Arbitrary)]
98pub struct Prover {
99    parameters: Stark,
100
101    /// The seed for all randomness used while [proving][Stark::prove].
102    ///
103    /// For Triton VM's proofs to be zero knowledge, this seed must be sampled
104    /// uniformly at random, and independently of all other input. No
105    /// information about it must reach the verifier.
106    randomness_seed: <StdRng as SeedableRng>::Seed,
107}
108
109/// The verifier for Triton VM's [zs-STARK](Stark). The core method is
110/// [`verify`](Verifier::verify). It is probably more convenient to call
111/// [`Stark::verify`] directly.
112#[derive(Debug, Copy, Clone, Default, Eq, PartialEq, Hash, Serialize, Deserialize, Arbitrary)]
113pub struct Verifier {
114    parameters: Stark,
115}
116
117/// The domains used by the [Prover].
118///
119/// Triton VM's STARK prover & verifier heavily rely on the
120/// [Schwartz-Zippel Lemma][lemma]. Therefore, they use polynomials extensively.
121/// In order to lower the concrete computational cost, these polynomials are
122/// usually represented in value form as opposed to monomial coefficient
123/// form, _i.e._, they are usually codewords. To reduce computational costs and
124/// memory requirements even further, both the prover and the verifier generally
125/// represent a polynomial using the shortest possible codewords. For a
126/// polynomial of degree _n_, the shortest possible codeword has _n + 1_
127/// entries.
128///
129/// Using the shortest possible codewords means that the
130/// [arithmetic domains](ArithmeticDomain) associated with those codewords have
131/// different lengths. Also for performance reasons, the lengths of all
132/// arithmetic domains are powers of two. If _n + 1_ is not a power of two, the
133/// next power of two is used. This allows efficient interpolation (and
134/// evaluation) using the (inverse) [Number Theoretic Transform (NTT)](ntt).
135///
136/// The most important of these domains are contained in this struct. In some
137/// cases, a domain _could_ be replaced by another existing, longer domain to
138/// achieve the same functionality in principle. These instances are pointed
139/// out in the fields' documentation below.
140///
141/// [lemma]: https://dl.acm.org/doi/pdf/10.1145/322217.322225
142#[derive(Debug, Copy, Clone, Eq, PartialEq)]
143pub(crate) struct ProverDomains {
144    /// One of the most central domains.
145    /// Its length is dictated by the [AET][AlgebraicExecutionTrace]: must be at
146    /// least as long as the tallest column in the AET.
147    pub trace: ArithmeticDomain,
148
149    /// Derived from the trace domain. Long enough to account for trace
150    /// randomizers in addition to the trace. The trace randomizers are required
151    /// for zero-knowledge.
152    ///
153    /// For the current approach to trace randomizers to work, the randomized
154    /// trace domain must be _exactly_ twice as long as the trace domain. If the
155    /// number of trace randomizers exceeds the length of the trace, this bumps
156    /// the length of the trace domain.
157    pub randomized_trace: ArithmeticDomain,
158
159    /// This domain is _just_ large enough to perform all the necessary
160    /// computations on polynomials. Concretely, the maximal degree of a
161    /// polynomial over the quotient domain is at most only slightly larger than
162    /// the maximal degree allowed in the STARK proof, and could be equal. This
163    /// makes computation for the prover much faster.
164    ///
165    /// This domain _could_ be replaced by the [FRI domain](ProverDomains::fri)
166    /// to achieve the same functionality in principle. However, the FRI domain
167    /// is usually longer.
168    pub quotient: ArithmeticDomain,
169
170    /// See also [Stark::fri].
171    pub fri: ArithmeticDomain,
172}
173
174impl ProverDomains {
175    pub fn derive(
176        padded_height: usize,
177        num_trace_randomizers: usize,
178        fri_domain: ArithmeticDomain,
179        max_degree: isize,
180    ) -> Self {
181        let randomized_trace_len =
182            Stark::randomized_trace_len(padded_height, num_trace_randomizers);
183        let randomized_trace_domain = ArithmeticDomain::of_length(randomized_trace_len).unwrap();
184        let trace_domain = randomized_trace_domain.halve().unwrap();
185
186        let max_degree = usize::try_from(max_degree).expect("AIR should constrain the VM");
187        let quotient_domain_length = max_degree.next_power_of_two();
188        let quotient_domain = ArithmeticDomain::of_length(quotient_domain_length)
189            .unwrap()
190            .with_offset(fri_domain.offset);
191
192        Self {
193            trace: trace_domain,
194            randomized_trace: randomized_trace_domain,
195            quotient: quotient_domain,
196            fri: fri_domain,
197        }
198    }
199}
200
201impl Prover {
202    /// A [`Prover`] with a sane [randomness seed][seed].
203    ///
204    /// [seed]: Prover::set_randomness_seed_which_may_break_zero_knowledge
205    pub fn new(parameters: Stark) -> Self {
206        Self {
207            parameters,
208            randomness_seed: random(),
209        }
210    }
211
212    /// Manually set the seed for the randomness used during
213    /// [proving](Self::prove). This makes the generated [proof](Proof)
214    /// deterministic.
215    ///
216    /// # WARNING!
217    ///
218    /// Careless use of this method can break Zero-Knowledge of Triton VM. In
219    /// particular, the [verifier](Stark::verify) must learn nothing about the
220    /// supplied seed, must be unable to influence the supplied seed, and must
221    /// be unable to guess anything about the supplied seed. The latter
222    /// implies that whatever source of randomness is chosen must have
223    /// sufficient entropy.
224    ///
225    /// ### If in doubt, don't use this method.
226    //
227    // Even though this method can be used to disable or cripple one of the
228    // core promises of Triton VM, it is not marked `unsafe` because it is
229    // always _memory_ safe. See also:
230    // https://doc.rust-lang.org/std/keyword.unsafe.html
231    //
232    // The length of the name is intended to discourage use.
233    #[must_use]
234    pub fn set_randomness_seed_which_may_break_zero_knowledge(
235        mut self,
236        seed: <StdRng as SeedableRng>::Seed,
237    ) -> Self {
238        self.randomness_seed = seed;
239        self
240    }
241
242    /// See also [`Stark::prove`].
243    pub fn prove(
244        self,
245        claim: &Claim,
246        aet: &AlgebraicExecutionTrace,
247    ) -> Result<Proof, ProvingError> {
248        profiler!(start "Fiat-Shamir: claim" ("hash"));
249        let mut proof_stream = ProofStream::new();
250        proof_stream.alter_fiat_shamir_state_with(claim);
251        profiler!(stop "Fiat-Shamir: claim");
252
253        profiler!(start "derive additional parameters");
254        let padded_height = aet.padded_height();
255        let fri = self.parameters.fri(padded_height)?;
256        let domains = ProverDomains::derive(
257            padded_height,
258            self.parameters.num_trace_randomizers,
259            fri.domain,
260            self.parameters.max_degree(padded_height),
261        );
262        proof_stream.enqueue(ProofItem::Log2PaddedHeight(padded_height.ilog2()));
263        profiler!(stop "derive additional parameters");
264
265        profiler!(start "main tables");
266        profiler!(start "create" ("gen"));
267        let mut master_main_table = MasterMainTable::new(
268            aet,
269            domains,
270            self.parameters.num_trace_randomizers,
271            self.randomness_seed,
272        );
273        profiler!(stop "create");
274
275        profiler!(start "pad" ("gen"));
276        master_main_table.pad();
277        profiler!(stop "pad");
278
279        master_main_table.maybe_low_degree_extend_all_columns();
280
281        profiler!(start "Merkle tree");
282        let main_merkle_tree = master_main_table.merkle_tree();
283        profiler!(stop "Merkle tree");
284
285        profiler!(start "Fiat-Shamir" ("hash"));
286        proof_stream.enqueue(ProofItem::MerkleRoot(main_merkle_tree.root()));
287        let challenges = proof_stream.sample_scalars(Challenges::SAMPLE_COUNT);
288        let challenges = Challenges::new(challenges, claim);
289        profiler!(stop "Fiat-Shamir");
290
291        profiler!(start "extend" ("gen"));
292        let mut master_aux_table = master_main_table.extend(&challenges);
293        profiler!(stop "extend");
294        profiler!(stop "main tables");
295
296        profiler!(start "aux tables");
297        master_aux_table.maybe_low_degree_extend_all_columns();
298
299        profiler!(start "Merkle tree");
300        let aux_merkle_tree = master_aux_table.merkle_tree();
301        profiler!(stop "Merkle tree");
302
303        profiler!(start "Fiat-Shamir" ("hash"));
304        proof_stream.enqueue(ProofItem::MerkleRoot(aux_merkle_tree.root()));
305
306        // Get the weights with which to compress the many quotients into one.
307        let quotient_combination_weights =
308            proof_stream.sample_scalars(MasterAuxTable::NUM_CONSTRAINTS);
309        profiler!(stop "Fiat-Shamir");
310        profiler!(stop "aux tables");
311
312        let (fri_domain_quotient_segment_codewords, quotient_segment_polynomials) =
313            Self::compute_quotient_segments(
314                &mut master_main_table,
315                &mut master_aux_table,
316                domains.quotient,
317                &challenges,
318                &quotient_combination_weights,
319            );
320
321        profiler!(start "hash rows of quotient segments" ("hash"));
322        let interpret_xfe_as_bfes = |xfe: &XFieldElement| xfe.coefficients.to_vec();
323        let hash_row = |row: ArrayView1<_>| {
324            let row_as_bfes = row.iter().map(interpret_xfe_as_bfes).concat();
325            Tip5::hash_varlen(&row_as_bfes)
326        };
327        let quotient_segments_rows = fri_domain_quotient_segment_codewords
328            .axis_iter(ROW_AXIS)
329            .into_par_iter();
330        let fri_domain_quotient_segment_codewords_digests =
331            quotient_segments_rows.map(hash_row).collect::<Vec<_>>();
332        profiler!(stop "hash rows of quotient segments");
333        profiler!(start "Merkle tree" ("hash"));
334        let quot_merkle_tree = MerkleTree::par_new(&fri_domain_quotient_segment_codewords_digests)?;
335        let quot_merkle_tree_root = quot_merkle_tree.root();
336        proof_stream.enqueue(ProofItem::MerkleRoot(quot_merkle_tree_root));
337        profiler!(stop "Merkle tree");
338
339        debug_assert_eq!(domains.fri.length, quot_merkle_tree.num_leafs());
340
341        profiler!(start "out-of-domain rows");
342        let trace_domain_generator = master_main_table.domains().trace.generator;
343        let out_of_domain_point_curr_row = proof_stream.sample_scalars(1)[0];
344        let out_of_domain_point_next_row = trace_domain_generator * out_of_domain_point_curr_row;
345
346        let ood_main_row = master_main_table.out_of_domain_row(out_of_domain_point_curr_row);
347        let ood_main_row = MasterMainTable::try_to_main_row(ood_main_row)?;
348        proof_stream.enqueue(ProofItem::OutOfDomainMainRow(Box::new(ood_main_row)));
349
350        let ood_aux_row = master_aux_table.out_of_domain_row(out_of_domain_point_curr_row);
351        let ood_aux_row = MasterAuxTable::try_to_aux_row(ood_aux_row)?;
352        proof_stream.enqueue(ProofItem::OutOfDomainAuxRow(Box::new(ood_aux_row)));
353
354        let ood_next_main_row = master_main_table.out_of_domain_row(out_of_domain_point_next_row);
355        let ood_next_main_row = MasterMainTable::try_to_main_row(ood_next_main_row)?;
356        proof_stream.enqueue(ProofItem::OutOfDomainMainRow(Box::new(ood_next_main_row)));
357
358        let ood_next_aux_row = master_aux_table.out_of_domain_row(out_of_domain_point_next_row);
359        let ood_next_aux_row = MasterAuxTable::try_to_aux_row(ood_next_aux_row)?;
360        proof_stream.enqueue(ProofItem::OutOfDomainAuxRow(Box::new(ood_next_aux_row)));
361
362        let out_of_domain_point_curr_row_pow_num_segments =
363            out_of_domain_point_curr_row.mod_pow_u32(NUM_QUOTIENT_SEGMENTS as u32);
364        let out_of_domain_curr_row_quot_segments = quotient_segment_polynomials
365            .map(|poly| poly.evaluate(out_of_domain_point_curr_row_pow_num_segments))
366            .to_vec()
367            .try_into()
368            .unwrap();
369        proof_stream.enqueue(ProofItem::OutOfDomainQuotientSegments(
370            out_of_domain_curr_row_quot_segments,
371        ));
372        profiler!(stop "out-of-domain rows");
373
374        profiler!(start "Fiat-Shamir" ("hash"));
375        let weights = LinearCombinationWeights::sample(&mut proof_stream);
376        profiler!(stop "Fiat-Shamir");
377
378        let fri_domain_is_short_domain = domains.fri.length <= domains.quotient.length;
379        let short_domain = if fri_domain_is_short_domain {
380            domains.fri
381        } else {
382            domains.quotient
383        };
384
385        profiler!(start "linear combination");
386        profiler!(start "main" ("CC"));
387        let main_combination_poly = master_main_table.weighted_sum_of_columns(weights.main);
388        profiler!(stop "main");
389
390        profiler!(start "aux" ("CC"));
391        let aux_combination_poly = master_aux_table.weighted_sum_of_columns(weights.aux);
392        profiler!(stop "aux");
393        let main_and_aux_combination_polynomial = main_combination_poly + aux_combination_poly;
394        let main_and_aux_codeword = short_domain.evaluate(&main_and_aux_combination_polynomial);
395
396        profiler!(start "quotient" ("CC"));
397        let quotient_segments_combination_polynomial = quotient_segment_polynomials
398            .into_iter()
399            .zip_eq(weights.quot_segments)
400            .fold(Polynomial::zero(), |acc, (poly, w)| acc + poly * w);
401        let quotient_segments_combination_codeword =
402            short_domain.evaluate(&quotient_segments_combination_polynomial);
403        profiler!(stop "quotient");
404
405        profiler!(stop "linear combination");
406
407        profiler!(start "DEEP");
408        // There are (at least) two possible ways to perform the DEEP update.
409        // 1. The one used here, where main & aux codewords are DEEP'd twice:
410        //    once with the out-of- domain point for the current row (i.e., α)
411        //    and once using the out-of-domain point for the next row
412        //    (i.e., ω·α). The DEEP update's denominator is a degree-1
413        //    polynomial in both cases, namely (ω^i - α) and (ω^i - ω·α)
414        //    respectively.
415        // 2. One where the main & aux codewords are DEEP'd only once, using the
416        //    degree-2 polynomial (ω^i - α)·(ω^i - ω·α) as the denominator. This
417        //    requires a linear interpolation in the numerator:
418        //    b(ω^i) - i((b(α), α) + (b(ω·α), ω·α))(w^i).
419        //
420        // In either case, the DEEP'd quotient polynomial is an additional
421        // summand for the combination codeword: (q(ω^i) - q(α)) / (ω^i - α).
422        // All (three or two) summands are weighted and summed to form the
423        // combination codeword. The weights are sampled through the Fiat-Shamir
424        // heuristic.
425        //
426        // Both approaches are sound. The first approach is more efficient, as
427        // it requires fewer operations.
428        profiler!(start "main&aux curr row");
429        let out_of_domain_curr_row_main_and_aux_value =
430            main_and_aux_combination_polynomial.evaluate(out_of_domain_point_curr_row);
431        let main_and_aux_curr_row_deep_codeword = Self::deep_codeword(
432            &main_and_aux_codeword,
433            short_domain,
434            out_of_domain_point_curr_row,
435            out_of_domain_curr_row_main_and_aux_value,
436        );
437        profiler!(stop "main&aux curr row");
438
439        profiler!(start "main&aux next row");
440        let out_of_domain_next_row_main_and_aux_value =
441            main_and_aux_combination_polynomial.evaluate(out_of_domain_point_next_row);
442        let main_and_aux_next_row_deep_codeword = Self::deep_codeword(
443            &main_and_aux_codeword,
444            short_domain,
445            out_of_domain_point_next_row,
446            out_of_domain_next_row_main_and_aux_value,
447        );
448        profiler!(stop "main&aux next row");
449
450        profiler!(start "segmented quotient");
451        let out_of_domain_curr_row_quot_segments_value = quotient_segments_combination_polynomial
452            .evaluate(out_of_domain_point_curr_row_pow_num_segments);
453        let quotient_segments_curr_row_deep_codeword = Self::deep_codeword(
454            &quotient_segments_combination_codeword,
455            short_domain,
456            out_of_domain_point_curr_row_pow_num_segments,
457            out_of_domain_curr_row_quot_segments_value,
458        );
459        profiler!(stop "segmented quotient");
460        profiler!(stop "DEEP");
461
462        profiler!(start "combined DEEP polynomial");
463        profiler!(start "sum" ("CC"));
464        let deep_codeword = [
465            main_and_aux_curr_row_deep_codeword,
466            main_and_aux_next_row_deep_codeword,
467            quotient_segments_curr_row_deep_codeword,
468        ]
469        .into_par_iter()
470        .zip_eq(weights.deep.as_slice().unwrap())
471        .map(|(codeword, &weight)| codeword.into_par_iter().map(|c| c * weight).collect())
472        .reduce(
473            || vec![XFieldElement::ZERO; short_domain.length],
474            |left, right| left.into_iter().zip(right).map(|(l, r)| l + r).collect(),
475        );
476
477        profiler!(stop "sum");
478        let fri_combination_codeword = if fri_domain_is_short_domain {
479            deep_codeword
480        } else {
481            profiler!(start "LDE" ("LDE"));
482            let deep_codeword = domains
483                .quotient
484                .low_degree_extension(&deep_codeword, domains.fri);
485            profiler!(stop "LDE");
486            deep_codeword
487        };
488        assert_eq!(domains.fri.length, fri_combination_codeword.len());
489        profiler!(stop "combined DEEP polynomial");
490
491        profiler!(start "FRI");
492        let revealed_current_row_indices =
493            fri.prove(&fri_combination_codeword, &mut proof_stream)?;
494        assert_eq!(
495            self.parameters.num_collinearity_checks,
496            revealed_current_row_indices.len()
497        );
498        profiler!(stop "FRI");
499
500        profiler!(start "open trace leafs");
501        // Open leafs of zipped codewords at indicated positions
502        let main_row_err = |row: Vec<_>| ProvingError::TableRowConversionError {
503            expected_len: MasterMainTable::NUM_COLUMNS,
504            actual_len: row.len(),
505        };
506        let revealed_main_elems = master_main_table
507            .reveal_rows(&revealed_current_row_indices)
508            .into_iter()
509            .map(|row| row.try_into().map_err(main_row_err))
510            .try_collect()?;
511        let base_authentication_structure =
512            main_merkle_tree.authentication_structure(&revealed_current_row_indices)?;
513        proof_stream.enqueue(ProofItem::MasterMainTableRows(revealed_main_elems));
514        proof_stream.enqueue(ProofItem::AuthenticationStructure(
515            base_authentication_structure,
516        ));
517
518        let aux_row_err = |row: Vec<_>| ProvingError::TableRowConversionError {
519            expected_len: MasterAuxTable::NUM_COLUMNS,
520            actual_len: row.len(),
521        };
522        let revealed_aux_elems = master_aux_table
523            .reveal_rows(&revealed_current_row_indices)
524            .into_iter()
525            .map(|row| row.try_into().map_err(aux_row_err))
526            .try_collect()?;
527        let aux_authentication_structure =
528            aux_merkle_tree.authentication_structure(&revealed_current_row_indices)?;
529        proof_stream.enqueue(ProofItem::MasterAuxTableRows(revealed_aux_elems));
530        proof_stream.enqueue(ProofItem::AuthenticationStructure(
531            aux_authentication_structure,
532        ));
533
534        // Open quotient & combination codewords at the same positions as
535        // main & aux codewords.
536        let into_fixed_width_row =
537            |row: ArrayView1<_>| -> QuotientSegments { row.to_vec().try_into().unwrap() };
538        let revealed_quotient_segments_rows = revealed_current_row_indices
539            .iter()
540            .map(|&i| fri_domain_quotient_segment_codewords.row(i))
541            .map(into_fixed_width_row)
542            .collect_vec();
543        let revealed_quotient_authentication_structure =
544            quot_merkle_tree.authentication_structure(&revealed_current_row_indices)?;
545        proof_stream.enqueue(ProofItem::QuotientSegmentsElements(
546            revealed_quotient_segments_rows,
547        ));
548        proof_stream.enqueue(ProofItem::AuthenticationStructure(
549            revealed_quotient_authentication_structure,
550        ));
551        profiler!(stop "open trace leafs");
552
553        Ok(proof_stream.into())
554    }
555
556    fn compute_quotient_segments(
557        main_table: &mut MasterMainTable,
558        aux_table: &mut MasterAuxTable,
559        quotient_domain: ArithmeticDomain,
560        challenges: &Challenges,
561        quotient_combination_weights: &[XFieldElement],
562    ) -> (
563        Array2<XFieldElement>,
564        Array1<Polynomial<'static, XFieldElement>>,
565    ) {
566        let (Some(main_quotient_domain_codewords), Some(aux_quotient_domain_codewords)) = (
567            main_table.quotient_domain_table(),
568            aux_table.quotient_domain_table(),
569        ) else {
570            // The decision to cache the quotient domain main table can be
571            // independent of the decision to cache the quotient domain
572            // auxiliary table. Additionally, available memory is affected by
573            // other running programs. Together, this may result in one table
574            // being cached while the other is not. To compute the quotients,
575            // either both or neither are needed.[^1] For peak memory
576            // consumption, it is beneficial to clear any unused cache.
577            //
578            // Discarding the cache incurs a performance penalty later, when
579            // revealing the rows indicated by FRI. This is an acceptable
580            // tradeoff: Executing this very code path means that in the current
581            // environment, peak memory usage is a concern. Running out of
582            // memory results in abnormal termination of the prover. Slower
583            // execution is an acceptable price for normal termination.
584            //
585            // [^1]: Code using exactly one cache _could_ exist, but oh! the
586            //       engineering.
587            main_table.clear_cache();
588            aux_table.clear_cache();
589
590            profiler!(start "quotient calculation (just-in-time)");
591            let (fri_domain_quotient_segment_codewords, quotient_segment_polynomials) =
592                Self::compute_quotient_segments_with_jit_lde(
593                    main_table,
594                    aux_table,
595                    challenges,
596                    quotient_combination_weights,
597                );
598            profiler!(stop "quotient calculation (just-in-time)");
599
600            return (
601                fri_domain_quotient_segment_codewords,
602                quotient_segment_polynomials,
603            );
604        };
605
606        profiler!(start "quotient calculation (cached)" ("CC"));
607        let quotient_codeword = all_quotients_combined(
608            main_quotient_domain_codewords,
609            aux_quotient_domain_codewords,
610            main_table.domains().trace,
611            quotient_domain,
612            challenges,
613            quotient_combination_weights,
614        );
615        let quotient_codeword = Array1::from(quotient_codeword);
616        assert_eq!(quotient_domain.length, quotient_codeword.len());
617        profiler!(stop "quotient calculation (cached)");
618
619        profiler!(start "quotient LDE" ("LDE"));
620        let quotient_segment_polynomials =
621            Self::interpolate_quotient_segments(quotient_codeword, quotient_domain);
622
623        let fri_domain = main_table.domains().fri;
624        let fri_domain_quotient_segment_codewords =
625            Self::fri_domain_segment_polynomials(quotient_segment_polynomials.view(), fri_domain);
626        profiler!(stop "quotient LDE");
627
628        (
629            fri_domain_quotient_segment_codewords,
630            quotient_segment_polynomials,
631        )
632    }
633
634    /// Computes the quotient segments in a memory-friendly way, i.e., without
635    /// ever representing the entire low-degree extended trace. Instead, the
636    /// trace is extrapolated over cosets of the trace domain, and the
637    /// quotients are computed there. The resulting coset-quotients are
638    /// linearly recombined to produce the quotient segment codewords.
639    fn compute_quotient_segments_with_jit_lde(
640        main_table: &mut MasterMainTable,
641        aux_table: &mut MasterAuxTable,
642        challenges: &Challenges,
643        quotient_combination_weights: &[XFieldElement],
644    ) -> (
645        Array2<XFieldElement>,
646        Array1<Polynomial<'static, XFieldElement>>,
647    ) {
648        // This parameter regulates a time-memory tradeoff. Semantically, it is
649        // the ratio of the randomized trace length to the length of the domain
650        // used for calculating the quotient segments (aka “working domain”).
651        // When this factor is larger, there are _more_ cosets of _smaller_ size
652        // over which the trace polynomials are evaluated.
653        // Must be a power of two and lie in 2..=randomized_trace_domain.length.
654        //
655        // The requirement for the working domain to be at most as long as the
656        // trace domain, i.e., at most half the size of the randomized trace
657        // domain, is explained below.
658        const RANDOMIZED_TRACE_LEN_TO_WORKING_DOMAIN_LEN_RATIO: usize = 2;
659        const NUM_COSETS: usize =
660            NUM_QUOTIENT_SEGMENTS * RANDOMIZED_TRACE_LEN_TO_WORKING_DOMAIN_LEN_RATIO;
661
662        debug_assert!(RANDOMIZED_TRACE_LEN_TO_WORKING_DOMAIN_LEN_RATIO.is_power_of_two());
663
664        let domains = main_table.domains();
665        let mut working_domain = domains.randomized_trace;
666        for _ in 0..RANDOMIZED_TRACE_LEN_TO_WORKING_DOMAIN_LEN_RATIO.ilog2() {
667            working_domain = working_domain.halve().unwrap();
668        }
669        let working_domain = working_domain;
670
671        let num_rows = working_domain.length;
672        let coset_root_order = (num_rows * NUM_COSETS).try_into().unwrap();
673
674        // the powers of ι define `NUM_COSETS`-many cosets of the working domain
675        let iota = BFieldElement::primitive_root_of_unity(coset_root_order).unwrap();
676        let psi = domains.fri.offset;
677
678        // for every coset, evaluate constraints
679        profiler!(start "zero-initialization");
680        // column majority (“`F`”) for contiguous column slices
681        let mut quotient_multicoset_evaluations =
682            ndarray_helper::par_zeros((num_rows, NUM_COSETS).f());
683        let mut main_columns =
684            ndarray_helper::par_zeros((num_rows, MasterMainTable::NUM_COLUMNS).f());
685        let mut aux_columns =
686            ndarray_helper::par_zeros((num_rows, MasterAuxTable::NUM_COLUMNS).f());
687        profiler!(stop "zero-initialization");
688
689        profiler!(start "fetch trace randomizers");
690        let main_trace_randomizers = (0..MasterMainTable::NUM_COLUMNS)
691            .into_par_iter()
692            .map(|i| main_table.trace_randomizer_for_column(i))
693            .collect::<Vec<_>>();
694        let aux_trace_randomizers = (0..MasterAuxTable::NUM_COLUMNS)
695            .into_par_iter()
696            .map(|i| aux_table.trace_randomizer_for_column(i))
697            .collect::<Vec<_>>();
698        let main_trace_randomizers = Array1::from(main_trace_randomizers);
699        let aux_trace_randomizers = Array1::from(aux_trace_randomizers);
700        profiler!(stop "fetch trace randomizers");
701
702        profiler!(start "poly interpolate" ("LDE"));
703        main_table
704            .trace_table_mut()
705            .axis_iter_mut(COL_AXIS)
706            .into_par_iter()
707            .for_each(|mut column| intt(column.as_slice_mut().unwrap()));
708        aux_table
709            .trace_table_mut()
710            .axis_iter_mut(COL_AXIS)
711            .into_par_iter()
712            .for_each(|mut column| intt(column.as_slice_mut().unwrap()));
713        profiler!(stop "poly interpolate");
714
715        profiler!(start "calculate quotients");
716        for (coset_index, quotient_column) in
717            (0..).zip(quotient_multicoset_evaluations.columns_mut())
718        {
719            // offset by fri domain offset to avoid division-by-zero errors
720            let working_domain = working_domain.with_offset(iota.mod_pow(coset_index) * psi);
721            profiler!(start "poly evaluate" ("LDE"));
722            Zip::from(main_table.trace_table().axis_iter(COL_AXIS))
723                .and(main_columns.axis_iter_mut(COL_AXIS))
724                .par_for_each(|trace_column, target_column| {
725                    let trace_poly = Polynomial::new_borrowed(trace_column.as_slice().unwrap());
726                    Array1::from(working_domain.evaluate(&trace_poly)).move_into(target_column);
727                });
728            Zip::from(aux_table.trace_table().axis_iter(COL_AXIS))
729                .and(aux_columns.axis_iter_mut(COL_AXIS))
730                .par_for_each(|trace_column, target_column| {
731                    let trace_poly = Polynomial::new_borrowed(trace_column.as_slice().unwrap());
732                    Array1::from(working_domain.evaluate(&trace_poly)).move_into(target_column);
733                });
734            profiler!(stop "poly evaluate");
735
736            // A _randomized_ trace interpolant is:
737            //
738            //    trace_interpolant + trace_zerofier·trace_randomizer
739            //    ╶───────┬───────╴   ╶──────────────┬──────────────╴
740            //            ╵                          │
741            //   was just moved into                 ╵
742            //  `{main, aux}_columns`          still missing
743            //
744            //
745            // Knowing the shape of the trace zerofier (see also
746            // `domain.zerofier()`), and with the length of the trace domain
747            // being `n`, this is:
748            //
749            //  trace_zerofier·trace_randomizer = (X^n - 1)·trace_randomizer
750            //
751            // For reasons of efficiency, all three components (interpolant,
752            // zerofier, and randomizer) are evaluated over the
753            // `working_domain`, i.e., they are codewords. The constant
754            // `RANDOMIZED_TRACE_LEN_TO_WORKING_DOMAIN_LEN_RATIO` defines the
755            // length of the working domain in relation to the randomized trace
756            // domain. Let the length of the working domain `m`, the generator
757            // of the working domain a primitive mth root of unity ξ, and the
758            // working domain's offset γ.
759            //
760            // If the length of the working domain `m` is less than or equal the
761            // length of the trace domain, i.e., if m <= n or equivalently, if
762            // constant `RANDOMIZED_TRACE_LEN_TO_WORKING_DOMAIN_LEN_RATIO` >= 2,
763            // then evaluating the trace zerofier (X^n - 1) on the `i`th working
764            // domain value gives:
765            //
766            //   (X^n - 1)(ξ^i·γ) = (ξ^i·γ)^n - 1 = 1^i · γ^n - 1 = γ^n - 1
767            //
768            // In other words, the trace_zerofier codeword over the working
769            // domain is [working_domain_offset^n - 1; m].
770            //
771            // Should a future re-design want to consider a working domain of
772            // length equal to the randomized trace domain, or in other words,
773            // should it consider a
774            // `RANDOMIZED_TRACE_LEN_TO_WORKING_DOMAIN_LEN_RATIO` of 1, then the
775            // trace_zerofier's contribution below needs to be generalized. On
776            // working_domain's value `i`, the zerofier contribution would then
777            // be (-1)^i·γ^n - 1. In particular, note the term (-1)^i, which is
778            // absent from the trace randomizer when evaluated on working
779            // domains at most as long as the trace domain.
780            assert!(working_domain.length <= domains.trace.length);
781
782            profiler!(start "trace randomizers" ("LDE"));
783            let trace_domain_len = u64::try_from(domains.trace.length).unwrap();
784            let zerofier = working_domain.offset.mod_pow(trace_domain_len) - BFieldElement::ONE;
785
786            Zip::from(main_columns.axis_iter_mut(COL_AXIS))
787                .and(main_trace_randomizers.axis_iter(ROW_AXIS))
788                .par_for_each(|mut column, randomizer_polynomial| {
789                    let randomizer_codeword = working_domain.evaluate(&randomizer_polynomial[[]]);
790                    for (cell, randomizer) in column.iter_mut().zip(randomizer_codeword) {
791                        *cell += zerofier * randomizer;
792                    }
793                });
794            Zip::from(aux_columns.axis_iter_mut(COL_AXIS))
795                .and(aux_trace_randomizers.axis_iter(ROW_AXIS))
796                .par_for_each(|mut column, randomizer_polynomial| {
797                    let randomizer_codeword = working_domain.evaluate(&randomizer_polynomial[[]]);
798                    for (cell, randomizer) in column.iter_mut().zip(randomizer_codeword) {
799                        *cell += zerofier * randomizer;
800                    }
801                });
802            profiler!(stop "trace randomizers");
803
804            profiler!(start "AIR evaluation" ("AIR"));
805            let all_quotients = all_quotients_combined(
806                main_columns.view(),
807                aux_columns.view(),
808                domains.trace,
809                working_domain,
810                challenges,
811                quotient_combination_weights,
812            );
813            Array1::from(all_quotients).move_into(quotient_column);
814            profiler!(stop "AIR evaluation");
815        }
816        profiler!(stop "calculate quotients");
817
818        profiler!(start "segmentify");
819        let segmentification = Self::segmentify::<NUM_QUOTIENT_SEGMENTS>(
820            quotient_multicoset_evaluations,
821            psi,
822            iota,
823            domains.fri,
824        );
825        profiler!(stop "segmentify");
826
827        profiler!(start "restore original trace" ("LDE"));
828        main_table
829            .trace_table_mut()
830            .axis_iter_mut(COL_AXIS)
831            .into_par_iter()
832            .for_each(|mut column| ntt(column.as_slice_mut().unwrap()));
833        aux_table
834            .trace_table_mut()
835            .axis_iter_mut(COL_AXIS)
836            .into_par_iter()
837            .for_each(|mut column| ntt(column.as_slice_mut().unwrap()));
838        profiler!(stop "restore original trace");
839
840        segmentification
841    }
842
843    /// Map a matrix whose columns represent the evaluation of a high-degree
844    /// polynomial f on all constituents of a partition of some large domain
845    /// into smaller cosets, to
846    /// 1. a matrix of segment codewords (on the FRI domain), and
847    /// 2. an array of matching segment polynomials,
848    ///
849    /// such that the segment polynomials correspond to the interleaving split
850    /// of the original high-degree polynomial.
851    ///
852    /// For example, let f(X) have degree M·N where N is the chosen domain's
853    /// length. Then the input is an N×M matrix representing the values of
854    /// f(X) on the chosen domain and its cosets:
855    ///
856    /// ```txt
857    /// ⎛  …          …   …           ⎞  ┬
858    /// ⎜ f(coset_0)  …  f(coset_M-1) ⎟ domain length
859    /// ⎝  …          …   …           ⎠  ┴
860    ///
861    /// ├───────── NUM_COSETS ────────┤
862    /// ```
863    ///
864    /// The `NUM_SEGMENTS` (=:`K`) produced segment polynomials are f_i(X) such
865    /// that f(X) = Σ_k x^k · f_k(X^K).
866    /// For example, for `K = 2`, this is f(X) = f_E(X²) + X·f_O(X²).
867    ///
868    /// The produced segment codewords are the segment polynomial's evaluations
869    /// on the FRI domain:
870    ///
871    /// ```txt
872    /// ⎛  …            …   …             ⎞  ┬
873    /// ⎜ f_0(FRI_dom)  …  f_K-1(FRI_dom) ⎟ FRI domain length
874    /// ⎝  …            …   …             ⎠  ┴
875    ///
876    /// ├────────── NUM_SEGMENTS ─────────┤
877    /// ```
878    //
879    // The mechanics of this function are backed by some serious maths
880    // originally derived by Alan Szepieniec, and later generalized by him and
881    // Jan Ferdinand Sauer.
882    //
883    // The main idea is based on the segmentation formula. For K segments, this
884    // is
885    //
886    // f(X) = Σ_{k=0}^{K-1} X^k · f_k(X^K)
887    //
888    // where each f_k is one segment. When substituting X for X·ξ^l, where ξ is
889    // a Kth root of unity (i.e., ξ^K = 1), this gives rise to K equations,
890    // where l ∈ { 0, …, K-1 }:
891    //
892    // f(X·ξ^0)     = Σ_{k=0}^{K-1} (X·ξ^0)^k     · f_k(X^K)
893    //              ⋮
894    // f(X·ξ^(K-1)) = Σ_{k=0}^{K-1} (X·ξ^(K-1))^k · f_k(X^K)
895    //
896    // Note how the indeterminates of the f_k are identical for all rows. That
897    // is, the mapping between f's evaluations on (the “right”) cosets and f's
898    // segments is a linear one.
899    //
900    // ⎛  …       ⎞       ⎛     …               ⎞   ⎛  …       ⎞
901    // ⎜ f(X·ξ^l) ⎟   =   ⎜ …  X^k · ξ^(k·l)  … ⎟ · ⎜ f_k(X^K) ⎟             (1)
902    // ⎝  …       ⎠       ⎝     …               ⎠   ⎝  …       ⎠
903    //
904    // This function works by applying the inverse of the coefficient matrix to
905    // the function's input, i.e., to the left hand side of above equation. To
906    // compute this map efficiently, it is decomposed as follows. Operator “∘”
907    // denotes the Hadamard, i.e., element-wise product.
908    //
909    //                    ⎛     …         ⎞   ⎛ ⎛  …  ⎞   ⎛  …       ⎞ ⎞
910    //                =   ⎜ …  ξ^(k·l)  … ⎟ · ⎜ ⎜ X^k ⎟ ∘ ⎜ f_k(X^K) ⎟ ⎟
911    //                    ⎝     …         ⎠   ⎝ ⎝  …  ⎠   ⎝  …       ⎠ ⎠
912    //
913    // The coefficient matrix has dimensions K×K. Since ξ is a Kth root of
914    // unity, above matrix is an NTT matrix. That means its application can be
915    // efficiently reverted by performing iNTTs.
916    // The final step is elementwise multiplication with the vector (X^(-k)) to
917    // get the segment polynomials.
918    //
919    // For reasons of efficiency, this function does not operate on polynomials
920    // in monomial coefficient form, but on polynomial evaluations on some
921    // domain, i.e., codewords.
922    // Also for reasons of efficiency, the domain length N is a power of two,
923    // and the evaluation points are multiples of an Nth root of unity, ω. In
924    // order to avoid divisions by zero, the domain is offset by Ψ. Furthermore,
925    // the offset of a coset is some power of ι, which itself is a root of unity
926    // of order N·M, where M is the number of cosets. That is, ι^M = ω,
927    // and ω^N = 1. Summarizing, this function's input is a matrix of the
928    // following form:
929    //
930    // ⎛     …                  ⎞╷ ┬       ⎛     …                   ⎞
931    // ⎜ …  f(Ψ · ι^j · ω^i)  … ⎟i N   =   ⎜ …  f(Ψ · ι^(j + iM))  … ⎟
932    // ⎝     …                  ⎠↓ ┴       ⎝     …                   ⎠
933    // ╶─────────── j ─────────→
934    // ├─────────── M ──────────┤
935    //
936    // In order to kick off the series of steps derived & outlined above, this
937    // matrix needs to be rearranged. The desired shape can be derived by taking
938    // the left-hand side of the system of equations (1) and substituting the
939    // indeterminate X for the points at which f is evaluated, Ψ · ι^j · ω^i.
940    // Let L such that N·M = L·K. Observe that ξ being a Kth root of unity
941    // implies ξ = ω^(N/K) = ι^(N·M/K) = ι^L.
942    //
943    // ⎛  …       ⎞       ⎛     …                          ⎞ ┬
944    // ⎜ f(X·ξ^l) ⎟   ↦   ⎜ …  f(ψ · ι^(j + i·M + l·L))  … ⎟ L
945    // ⎝  …       ⎠       ⎝     …                          ⎠ ┴
946    //
947    //                    ├────────────── K ──────────────┤
948    //
949    // Helpful insights to understand the matrix re-arrangement are:
950    // - the various powers of ι, i.e., { ι^(j + i·M) | 0 ⩽ i < N, 0 ⩽ j < M },
951    //   sweep the entire input matrix (which has dimensions N×M)
952    // - ι is a (primitive) (N·M)th root of unity and thus, _all_ powers of ι
953    //   are required to index the entirety of the input matrix
954    // - the re-arranged matrix (which has dimensions L×K) has all the same
955    //   entries as the input matrix
956    //
957    // The map that satisfies all of these re-arrangement constraints is
958    // (i, j) ↦ ((j + i·M) % L, (j + i·M) // L)
959    // which has the inverse
960    // (a, b) ↦ ((a + b·L) // M, (a + b·L) % M).
961    //
962    // With all those pieces in place, it's finally time for some code.
963    fn segmentify<const NUM_SEGMENTS: usize>(
964        quotient_multicoset_evaluations: Array2<XFieldElement>,
965        psi: BFieldElement,
966        iota: BFieldElement,
967        fri_domain: ArithmeticDomain,
968    ) -> (
969        Array2<XFieldElement>,
970        Array1<Polynomial<'static, XFieldElement>>,
971    ) {
972        let num_input_rows = quotient_multicoset_evaluations.nrows();
973        let num_cosets = quotient_multicoset_evaluations.ncols();
974        let num_output_rows = num_input_rows * num_cosets / NUM_SEGMENTS;
975
976        assert!(num_input_rows.is_power_of_two());
977        assert!(num_cosets.is_power_of_two());
978        assert!(num_output_rows.is_power_of_two());
979        assert!(NUM_SEGMENTS.is_power_of_two());
980        assert!(
981            num_input_rows >= num_cosets,
982            "working domain length: {num_input_rows} versus num cosets: {num_cosets}",
983        );
984        assert!(
985            num_cosets >= NUM_SEGMENTS,
986            "num cosets: {num_cosets} versus num segments: {NUM_SEGMENTS}",
987        );
988
989        // Re-arrange data in preparation for iNTT:
990        // Move appropriate powers of ξ^(k·l) with the same k into the same row.
991        // Change the matrix's dimensions from N×M to L×K, with row majority
992        // (“`C`”) to get contiguous row slices for iNTT.
993        let mut quotient_segments = ndarray_helper::par_zeros((num_output_rows, NUM_SEGMENTS));
994        quotient_segments
995            .axis_iter_mut(ROW_AXIS)
996            .into_par_iter()
997            .enumerate()
998            .for_each(|(output_row_idx, mut output_row)| {
999                for (output_col_idx, cell) in output_row.iter_mut().enumerate() {
1000                    let exponent_of_iota = output_row_idx + output_col_idx * num_output_rows;
1001                    let input_row_idx = exponent_of_iota / num_cosets;
1002                    let input_col_idx = exponent_of_iota % num_cosets;
1003                    *cell = quotient_multicoset_evaluations[[input_row_idx, input_col_idx]];
1004                }
1005            });
1006
1007        // apply inverse of Vandermonde matrix for ξ = ι^L to every row
1008        quotient_segments
1009            .axis_iter_mut(ROW_AXIS)
1010            .into_par_iter()
1011            .for_each(|mut row| intt(row.as_slice_mut().unwrap()));
1012
1013        // scale every row by Ψ^-k · ι^(-k(j+i·M))
1014        let num_threads = rayon::current_num_threads().max(1);
1015        let chunk_size = (num_output_rows / num_threads).max(1);
1016        let iota_inverse = iota.inverse();
1017        let psi_inverse = psi.inverse();
1018        quotient_segments
1019            .axis_chunks_iter_mut(ROW_AXIS, chunk_size)
1020            .into_par_iter()
1021            .enumerate()
1022            .for_each(|(chunk_idx, mut chunk_of_rows)| {
1023                let chunk_start = (chunk_idx * chunk_size).try_into().unwrap();
1024                let mut psi_iotajim_inv = psi_inverse * iota_inverse.mod_pow(chunk_start);
1025                for mut row in chunk_of_rows.rows_mut() {
1026                    let mut psi_iotajim_invk = XFieldElement::ONE;
1027                    for cell in &mut row {
1028                        *cell *= psi_iotajim_invk;
1029                        psi_iotajim_invk *= psi_iotajim_inv;
1030                    }
1031                    psi_iotajim_inv *= iota_inverse;
1032                }
1033            });
1034
1035        // low-degree extend columns from trace to FRI domain
1036        let segment_domain_offset = psi.mod_pow(NUM_SEGMENTS.try_into().unwrap());
1037        let segment_domain = ArithmeticDomain::of_length(num_output_rows)
1038            .unwrap()
1039            .with_offset(segment_domain_offset);
1040
1041        let mut quotient_codewords = Array2::zeros([fri_domain.length, NUM_SEGMENTS]);
1042        let mut quotient_polynomials = Array1::zeros([NUM_SEGMENTS]);
1043        Zip::from(quotient_segments.axis_iter(COL_AXIS))
1044            .and(quotient_codewords.axis_iter_mut(COL_AXIS))
1045            .and(quotient_polynomials.axis_iter_mut(ROW_AXIS))
1046            .par_for_each(|segment, target_codeword, target_polynomial| {
1047                // Getting a column as a contiguous piece of memory from a
1048                // row-majority matrix requires `.to_vec()`.
1049                let interpolant = segment_domain.interpolate(&segment.to_vec());
1050                let lde_codeword = fri_domain.evaluate(&interpolant);
1051                Array1::from(lde_codeword).move_into(target_codeword);
1052                Array0::from_elem((), interpolant).move_into(target_polynomial);
1053            });
1054
1055        (quotient_codewords, quotient_polynomials)
1056    }
1057
1058    fn interpolate_quotient_segments(
1059        quotient_codeword: Array1<XFieldElement>,
1060        quotient_domain: ArithmeticDomain,
1061    ) -> Array1<Polynomial<'static, XFieldElement>> {
1062        let quotient_interpolation_poly = quotient_domain.interpolate(&quotient_codeword.to_vec());
1063        let quotient_segments: [_; NUM_QUOTIENT_SEGMENTS] =
1064            Self::split_polynomial_into_segments(quotient_interpolation_poly);
1065        Array1::from(quotient_segments.to_vec())
1066    }
1067
1068    /// Losslessly split the given polynomial `f` into `N` segments of (roughly)
1069    /// equal degree. The degree of each segment is at most `f.degree() /
1070    /// N`. It holds that `f(x) = Σ_{i=0}^{N-1} x^i·f_i(x^N)`, where the
1071    /// `f_i` are the segments.
1072    ///
1073    /// For example, let
1074    /// - `N = 3`, and
1075    /// - `f(x) = 7·x^7 + 6·x^6 + 5·x^5 + 4·x^4 + 3·x^3 + 2·x^2 + 1·x + 0`.
1076    ///
1077    /// Then, the function returns the array:
1078    ///
1079    /// ```text
1080    /// [f_0(x) = 6·x^2 + 3·x + 0,
1081    ///  f_1(x) = 7·x^2 + 4·x + 1,
1082    ///  f_2(x) =         5·x + 2]
1083    /// ```
1084    ///
1085    /// The following equality holds: `f(x) == f_0(x^3) + x·f_1(x^3) +
1086    /// x^2·f_2(x^3)`.
1087    fn split_polynomial_into_segments<const N: usize, FF: FiniteField>(
1088        polynomial: Polynomial<FF>,
1089    ) -> [Polynomial<'static, FF>; N] {
1090        let mut segments = Vec::with_capacity(N);
1091        let coefficients = polynomial.into_coefficients();
1092        for segment_index in 0..N {
1093            let segment_coefficients = coefficients.iter().skip(segment_index).step_by(N);
1094            let segment = Polynomial::new(segment_coefficients.copied().collect());
1095            segments.push(segment);
1096        }
1097        segments.try_into().unwrap()
1098    }
1099
1100    fn fri_domain_segment_polynomials(
1101        quotient_segment_polynomials: ArrayView1<Polynomial<XFieldElement>>,
1102        fri_domain: ArithmeticDomain,
1103    ) -> Array2<XFieldElement> {
1104        let fri_domain_codewords: Vec<_> = quotient_segment_polynomials
1105            .into_par_iter()
1106            .flat_map(|segment| fri_domain.evaluate(segment))
1107            .collect();
1108
1109        // While later steps would benefit from row majority (“`C`”), the
1110        // required `.to_standard_layout().to_owned()` is prohibitively
1111        // expensive. Should we decide to chase small gains at some point,
1112        // then a parallel transpose could be considered.
1113        Array2::from_shape_vec(
1114            [fri_domain.length, NUM_QUOTIENT_SEGMENTS].f(),
1115            fri_domain_codewords,
1116        )
1117        .unwrap()
1118    }
1119
1120    /// Apply the [DEEP update](Stark::deep_update) to a polynomial in value
1121    /// form, _i.e._, to a codeword.
1122    fn deep_codeword(
1123        codeword: &[XFieldElement],
1124        domain: ArithmeticDomain,
1125        out_of_domain_point: XFieldElement,
1126        out_of_domain_value: XFieldElement,
1127    ) -> Vec<XFieldElement> {
1128        domain
1129            .domain_values()
1130            .par_iter()
1131            .zip_eq(codeword)
1132            .map(|(&in_domain_value, &in_domain_evaluation)| {
1133                Stark::deep_update(
1134                    in_domain_value,
1135                    in_domain_evaluation,
1136                    out_of_domain_point,
1137                    out_of_domain_value,
1138                )
1139            })
1140            .collect()
1141    }
1142}
1143
1144impl Verifier {
1145    pub fn new(parameters: Stark) -> Self {
1146        Self { parameters }
1147    }
1148
1149    /// See also [`Stark::verify`].
1150    pub fn verify(self, claim: &Claim, proof: &Proof) -> Result<(), VerificationError> {
1151        profiler!(start "deserialize");
1152        let mut proof_stream = ProofStream::try_from(proof)?;
1153        profiler!(stop "deserialize");
1154
1155        profiler!(start "Fiat-Shamir: Claim" ("hash"));
1156        proof_stream.alter_fiat_shamir_state_with(claim);
1157        profiler!(stop "Fiat-Shamir: Claim");
1158
1159        profiler!(start "derive additional parameters");
1160        let log_2_padded_height = proof_stream.dequeue()?.try_into_log2_padded_height()?;
1161        let padded_height = 1 << log_2_padded_height;
1162        let fri = self.parameters.fri(padded_height)?;
1163        let merkle_tree_height = fri.domain.length.ilog2();
1164
1165        // The trace domain used by the prover is not necessarily of length
1166        // `padded_height`. Concretely, this is the case if the number of trace
1167        // randomizers is larger than `padded_height`. However, the trace domain
1168        // length is guaranteed to be _exactly_ half the size of the randomized
1169        // trace domain's length, due to the current approach for randomizing
1170        // the trace.
1171        let trace_domain_len =
1172            Stark::randomized_trace_len(padded_height, self.parameters.num_trace_randomizers) / 2;
1173        profiler!(stop "derive additional parameters");
1174
1175        profiler!(start "Fiat-Shamir 1" ("hash"));
1176        let main_merkle_tree_root = proof_stream.dequeue()?.try_into_merkle_root()?;
1177        let extension_challenge_weights = proof_stream.sample_scalars(Challenges::SAMPLE_COUNT);
1178        let challenges = Challenges::new(extension_challenge_weights, claim);
1179        let auxiliary_tree_merkle_root = proof_stream.dequeue()?.try_into_merkle_root()?;
1180
1181        // Sample weights for quotient codeword, which is a part of the
1182        // combination codeword. See corresponding part in the prover for a more
1183        // detailed explanation.
1184        let quot_codeword_weights = proof_stream.sample_scalars(MasterAuxTable::NUM_CONSTRAINTS);
1185        let quot_codeword_weights = Array1::from(quot_codeword_weights);
1186        let quotient_codeword_merkle_root = proof_stream.dequeue()?.try_into_merkle_root()?;
1187        profiler!(stop "Fiat-Shamir 1");
1188
1189        profiler!(start "dequeue ood point and rows" ("hash"));
1190        let trace_domain_generator =
1191            ArithmeticDomain::generator_for_length(trace_domain_len as u64)?;
1192        let out_of_domain_point_curr_row = proof_stream.sample_scalars(1)[0];
1193        let out_of_domain_point_next_row = trace_domain_generator * out_of_domain_point_curr_row;
1194        let out_of_domain_point_curr_row_pow_num_segments =
1195            out_of_domain_point_curr_row.mod_pow_u32(NUM_QUOTIENT_SEGMENTS as u32);
1196
1197        let out_of_domain_curr_main_row =
1198            proof_stream.dequeue()?.try_into_out_of_domain_main_row()?;
1199        let out_of_domain_curr_aux_row =
1200            proof_stream.dequeue()?.try_into_out_of_domain_aux_row()?;
1201        let out_of_domain_next_main_row =
1202            proof_stream.dequeue()?.try_into_out_of_domain_main_row()?;
1203        let out_of_domain_next_aux_row =
1204            proof_stream.dequeue()?.try_into_out_of_domain_aux_row()?;
1205        let out_of_domain_curr_row_quot_segments = proof_stream
1206            .dequeue()?
1207            .try_into_out_of_domain_quot_segments()?;
1208
1209        let out_of_domain_curr_main_row = Array1::from(out_of_domain_curr_main_row.to_vec());
1210        let out_of_domain_curr_aux_row = Array1::from(out_of_domain_curr_aux_row.to_vec());
1211        let out_of_domain_next_main_row = Array1::from(out_of_domain_next_main_row.to_vec());
1212        let out_of_domain_next_aux_row = Array1::from(out_of_domain_next_aux_row.to_vec());
1213        let out_of_domain_curr_row_quot_segments =
1214            Array1::from(out_of_domain_curr_row_quot_segments.to_vec());
1215        profiler!(stop "dequeue ood point and rows");
1216
1217        profiler!(start "out-of-domain quotient element");
1218        profiler!(start "evaluate AIR" ("AIR"));
1219        let evaluated_initial_constraints = MasterAuxTable::evaluate_initial_constraints(
1220            out_of_domain_curr_main_row.view(),
1221            out_of_domain_curr_aux_row.view(),
1222            &challenges,
1223        );
1224        let evaluated_consistency_constraints = MasterAuxTable::evaluate_consistency_constraints(
1225            out_of_domain_curr_main_row.view(),
1226            out_of_domain_curr_aux_row.view(),
1227            &challenges,
1228        );
1229        let evaluated_transition_constraints = MasterAuxTable::evaluate_transition_constraints(
1230            out_of_domain_curr_main_row.view(),
1231            out_of_domain_curr_aux_row.view(),
1232            out_of_domain_next_main_row.view(),
1233            out_of_domain_next_aux_row.view(),
1234            &challenges,
1235        );
1236        let evaluated_terminal_constraints = MasterAuxTable::evaluate_terminal_constraints(
1237            out_of_domain_curr_main_row.view(),
1238            out_of_domain_curr_aux_row.view(),
1239            &challenges,
1240        );
1241        profiler!(stop "evaluate AIR");
1242
1243        profiler!(start "zerofiers");
1244        let initial_zerofier_inv = (out_of_domain_point_curr_row - bfe!(1)).inverse();
1245        let consistency_zerofier_inv =
1246            (out_of_domain_point_curr_row.mod_pow_u64(trace_domain_len as u64) - bfe!(1)).inverse();
1247        let except_last_row = out_of_domain_point_curr_row - trace_domain_generator.inverse();
1248        let transition_zerofier_inv = except_last_row * consistency_zerofier_inv;
1249        let terminal_zerofier_inv = except_last_row.inverse(); // i.e., only last row
1250        profiler!(stop "zerofiers");
1251
1252        profiler!(start "divide");
1253        let divide = |constraints: Vec<_>, z_inv| constraints.into_iter().map(move |c| c * z_inv);
1254        let initial_quotients = divide(evaluated_initial_constraints, initial_zerofier_inv);
1255        let consistency_quotients =
1256            divide(evaluated_consistency_constraints, consistency_zerofier_inv);
1257        let transition_quotients =
1258            divide(evaluated_transition_constraints, transition_zerofier_inv);
1259        let terminal_quotients = divide(evaluated_terminal_constraints, terminal_zerofier_inv);
1260
1261        let quotient_summands = initial_quotients
1262            .chain(consistency_quotients)
1263            .chain(transition_quotients)
1264            .chain(terminal_quotients)
1265            .collect_vec();
1266        profiler!(stop "divide");
1267
1268        profiler!(start "inner product" ("CC"));
1269        let out_of_domain_quotient_value =
1270            quot_codeword_weights.dot(&Array1::from(quotient_summands));
1271        profiler!(stop "inner product");
1272        profiler!(stop "out-of-domain quotient element");
1273
1274        profiler!(start "verify quotient's segments");
1275        let powers_of_out_of_domain_point_curr_row = (0..NUM_QUOTIENT_SEGMENTS as u32)
1276            .map(|exponent| out_of_domain_point_curr_row.mod_pow_u32(exponent))
1277            .collect::<Array1<_>>();
1278        let sum_of_evaluated_out_of_domain_quotient_segments =
1279            powers_of_out_of_domain_point_curr_row.dot(&out_of_domain_curr_row_quot_segments);
1280        if out_of_domain_quotient_value != sum_of_evaluated_out_of_domain_quotient_segments {
1281            return Err(VerificationError::OutOfDomainQuotientValueMismatch);
1282        };
1283        profiler!(stop "verify quotient's segments");
1284
1285        profiler!(start "Fiat-Shamir 2" ("hash"));
1286        let weights = LinearCombinationWeights::sample(&mut proof_stream);
1287        let main_and_aux_codeword_weights = weights.main_and_aux();
1288        profiler!(stop "Fiat-Shamir 2");
1289
1290        profiler!(start "sum out-of-domain values" ("CC"));
1291        let out_of_domain_curr_row_main_and_aux_value = Self::linearly_sum_main_and_aux_row(
1292            out_of_domain_curr_main_row.view(),
1293            out_of_domain_curr_aux_row.view(),
1294            main_and_aux_codeword_weights.view(),
1295        );
1296        let out_of_domain_next_row_main_and_aux_value = Self::linearly_sum_main_and_aux_row(
1297            out_of_domain_next_main_row.view(),
1298            out_of_domain_next_aux_row.view(),
1299            main_and_aux_codeword_weights.view(),
1300        );
1301        let out_of_domain_curr_row_quotient_segment_value = weights
1302            .quot_segments
1303            .dot(&out_of_domain_curr_row_quot_segments);
1304        profiler!(stop "sum out-of-domain values");
1305
1306        // verify low degree of combination polynomial with FRI
1307        profiler!(start "FRI");
1308        let revealed_fri_indices_and_elements = fri.verify(&mut proof_stream)?;
1309        let (revealed_current_row_indices, revealed_fri_values): (Vec<_>, Vec<_>) =
1310            revealed_fri_indices_and_elements.into_iter().unzip();
1311        profiler!(stop "FRI");
1312
1313        profiler!(start "check leafs");
1314        profiler!(start "dequeue main elements");
1315        let main_table_rows = proof_stream.dequeue()?.try_into_master_main_table_rows()?;
1316        let main_authentication_structure = proof_stream
1317            .dequeue()?
1318            .try_into_authentication_structure()?;
1319        let leaf_digests_main: Vec<_> = main_table_rows
1320            .par_iter()
1321            .map(|revealed_main_elem| Tip5::hash_varlen(revealed_main_elem))
1322            .collect();
1323        profiler!(stop "dequeue main elements");
1324
1325        let index_leaves = |leaves| {
1326            let index_iter = revealed_current_row_indices.iter().copied();
1327            index_iter.zip_eq(leaves).collect()
1328        };
1329        profiler!(start "Merkle verify (main tree)" ("hash"));
1330        let base_merkle_tree_inclusion_proof = MerkleTreeInclusionProof {
1331            tree_height: merkle_tree_height,
1332            indexed_leafs: index_leaves(leaf_digests_main),
1333            authentication_structure: main_authentication_structure,
1334        };
1335        if !base_merkle_tree_inclusion_proof.verify(main_merkle_tree_root) {
1336            return Err(VerificationError::MainCodewordAuthenticationFailure);
1337        }
1338        profiler!(stop "Merkle verify (main tree)");
1339
1340        profiler!(start "dequeue auxiliary elements");
1341        let aux_table_rows = proof_stream.dequeue()?.try_into_master_aux_table_rows()?;
1342        let aux_authentication_structure = proof_stream
1343            .dequeue()?
1344            .try_into_authentication_structure()?;
1345        let leaf_digests_aux = aux_table_rows
1346            .par_iter()
1347            .map(|row| row.as_slice())
1348            .map(XFieldElement::bfe_slice)
1349            .map(Tip5::hash_varlen)
1350            .collect();
1351        profiler!(stop "dequeue auxiliary elements");
1352
1353        profiler!(start "Merkle verify (auxiliary tree)" ("hash"));
1354        let aux_merkle_tree_inclusion_proof = MerkleTreeInclusionProof {
1355            tree_height: merkle_tree_height,
1356            indexed_leafs: index_leaves(leaf_digests_aux),
1357            authentication_structure: aux_authentication_structure,
1358        };
1359        if !aux_merkle_tree_inclusion_proof.verify(auxiliary_tree_merkle_root) {
1360            return Err(VerificationError::AuxiliaryCodewordAuthenticationFailure);
1361        }
1362        profiler!(stop "Merkle verify (auxiliary tree)");
1363
1364        profiler!(start "dequeue quotient segments' elements");
1365        let revealed_quotient_segments_elements =
1366            proof_stream.dequeue()?.try_into_quot_segments_elements()?;
1367        let revealed_quotient_segments_digests = revealed_quotient_segments_elements
1368            .iter()
1369            .map(|row| row.as_slice())
1370            .map(XFieldElement::bfe_slice)
1371            .map(Tip5::hash_varlen)
1372            .collect();
1373        let revealed_quotient_authentication_structure = proof_stream
1374            .dequeue()?
1375            .try_into_authentication_structure()?;
1376        profiler!(stop "dequeue quotient segments' elements");
1377
1378        profiler!(start "Merkle verify (combined quotient)" ("hash"));
1379        let quot_merkle_tree_inclusion_proof = MerkleTreeInclusionProof {
1380            tree_height: merkle_tree_height,
1381            indexed_leafs: index_leaves(revealed_quotient_segments_digests),
1382            authentication_structure: revealed_quotient_authentication_structure,
1383        };
1384        if !quot_merkle_tree_inclusion_proof.verify(quotient_codeword_merkle_root) {
1385            return Err(VerificationError::QuotientCodewordAuthenticationFailure);
1386        }
1387        profiler!(stop "Merkle verify (combined quotient)");
1388        profiler!(stop "check leafs");
1389
1390        profiler!(start "linear combination");
1391        if self.parameters.num_collinearity_checks != revealed_current_row_indices.len() {
1392            return Err(VerificationError::IncorrectNumberOfRowIndices);
1393        };
1394        if self.parameters.num_collinearity_checks != revealed_fri_values.len() {
1395            return Err(VerificationError::IncorrectNumberOfFRIValues);
1396        };
1397        if self.parameters.num_collinearity_checks != revealed_quotient_segments_elements.len() {
1398            return Err(VerificationError::IncorrectNumberOfQuotientSegmentElements);
1399        };
1400        if self.parameters.num_collinearity_checks != main_table_rows.len() {
1401            return Err(VerificationError::IncorrectNumberOfMainTableRows);
1402        };
1403        if self.parameters.num_collinearity_checks != aux_table_rows.len() {
1404            return Err(VerificationError::IncorrectNumberOfAuxTableRows);
1405        };
1406
1407        for (row_idx, main_row, aux_row, quotient_segments_elements, fri_value) in izip!(
1408            revealed_current_row_indices,
1409            main_table_rows,
1410            aux_table_rows,
1411            revealed_quotient_segments_elements,
1412            revealed_fri_values,
1413        ) {
1414            let main_row = Array1::from(main_row.to_vec());
1415            let aux_row = Array1::from(aux_row.to_vec());
1416            let current_fri_domain_value = fri.domain.domain_value(row_idx as u32);
1417
1418            profiler!(start "main & aux elements" ("CC"));
1419            let main_and_aux_curr_row_element = Self::linearly_sum_main_and_aux_row(
1420                main_row.view(),
1421                aux_row.view(),
1422                main_and_aux_codeword_weights.view(),
1423            );
1424            let quotient_segments_curr_row_element = weights
1425                .quot_segments
1426                .dot(&Array1::from(quotient_segments_elements.to_vec()));
1427            profiler!(stop "main & aux elements");
1428
1429            profiler!(start "DEEP update");
1430            let main_and_aux_curr_row_deep_value = Stark::deep_update(
1431                current_fri_domain_value,
1432                main_and_aux_curr_row_element,
1433                out_of_domain_point_curr_row,
1434                out_of_domain_curr_row_main_and_aux_value,
1435            );
1436            let main_and_aux_next_row_deep_value = Stark::deep_update(
1437                current_fri_domain_value,
1438                main_and_aux_curr_row_element,
1439                out_of_domain_point_next_row,
1440                out_of_domain_next_row_main_and_aux_value,
1441            );
1442            let quot_curr_row_deep_value = Stark::deep_update(
1443                current_fri_domain_value,
1444                quotient_segments_curr_row_element,
1445                out_of_domain_point_curr_row_pow_num_segments,
1446                out_of_domain_curr_row_quotient_segment_value,
1447            );
1448            profiler!(stop "DEEP update");
1449
1450            profiler!(start "combination codeword equality");
1451            let deep_value_components = Array1::from(vec![
1452                main_and_aux_curr_row_deep_value,
1453                main_and_aux_next_row_deep_value,
1454                quot_curr_row_deep_value,
1455            ]);
1456            if fri_value != weights.deep.dot(&deep_value_components) {
1457                return Err(VerificationError::CombinationCodewordMismatch);
1458            };
1459            profiler!(stop "combination codeword equality");
1460        }
1461        profiler!(stop "linear combination");
1462        Ok(())
1463    }
1464
1465    fn linearly_sum_main_and_aux_row<FF>(
1466        main_row: ArrayView1<FF>,
1467        aux_row: ArrayView1<XFieldElement>,
1468        weights: ArrayView1<XFieldElement>,
1469    ) -> XFieldElement
1470    where
1471        FF: FiniteField + Into<XFieldElement>,
1472        XFieldElement: Mul<FF, Output = XFieldElement>,
1473    {
1474        profiler!(start "collect");
1475        let mut row = main_row.map(|&element| element.into());
1476        row.append(ROW_AXIS, aux_row).unwrap();
1477        profiler!(stop "collect");
1478        profiler!(start "inner product");
1479        // todo: Try to get rid of this clone. The alternative line
1480        //   `let main_and_aux_element = (&weights * &summands).sum();`
1481        //   without cloning the weights does not compile for a seemingly
1482        //   nonsensical reason.
1483        let weights = weights.to_owned();
1484        let main_and_aux_element = (weights * row).sum();
1485        profiler!(stop "inner product");
1486        main_and_aux_element
1487    }
1488}
1489
1490impl Stark {
1491    /// # Panics
1492    ///
1493    /// Panics if `log2_of_fri_expansion_factor` is zero.
1494    pub fn new(security_level: usize, log2_of_fri_expansion_factor: usize) -> Self {
1495        assert_ne!(
1496            0, log2_of_fri_expansion_factor,
1497            "FRI expansion factor must be greater than one."
1498        );
1499
1500        let fri_expansion_factor = 1 << log2_of_fri_expansion_factor;
1501        let num_collinearity_checks = security_level / log2_of_fri_expansion_factor;
1502        let num_collinearity_checks = std::cmp::max(num_collinearity_checks, 1);
1503
1504        let num_out_of_domain_rows = 2;
1505        let num_trace_randomizers = num_collinearity_checks
1506            + num_out_of_domain_rows * x_field_element::EXTENSION_DEGREE
1507            + NUM_QUOTIENT_SEGMENTS * x_field_element::EXTENSION_DEGREE;
1508
1509        Stark {
1510            security_level,
1511            fri_expansion_factor,
1512            num_trace_randomizers,
1513            num_collinearity_checks,
1514        }
1515    }
1516
1517    /// Prove the correctness of the given [Claim] using the given
1518    /// [witness](AlgebraicExecutionTrace).
1519    ///
1520    /// This method should be the first option to consider for proving.
1521    /// For more control over the proving process, see [`Prover`].
1522    //
1523    // This pass-through method guarantees fresh prover randomness at each call.
1524    pub fn prove(
1525        &self,
1526        claim: &Claim,
1527        aet: &AlgebraicExecutionTrace,
1528    ) -> Result<Proof, ProvingError> {
1529        Prover::new(*self).prove(claim, aet)
1530    }
1531
1532    /// Verify the accuracy of the given [Claim], supported by the [Proof].
1533    ///
1534    /// See also [`Verifier`].
1535    //
1536    // This pass-through method achieves symmetry with the [`Prover`].
1537    pub fn verify(&self, claim: &Claim, proof: &Proof) -> Result<(), VerificationError> {
1538        Verifier::new(*self).verify(claim, proof)
1539    }
1540
1541    /// The length of the trace-randomized, padded trace.
1542    ///
1543    /// Guaranteed to be a power of two.
1544    pub(crate) fn randomized_trace_len(
1545        padded_height: usize,
1546        num_trace_randomizers: usize,
1547    ) -> usize {
1548        let total_table_length = padded_height + num_trace_randomizers;
1549        total_table_length.next_power_of_two()
1550    }
1551
1552    pub(crate) fn interpolant_degree(padded_height: usize, num_trace_randomizers: usize) -> isize {
1553        (Self::randomized_trace_len(padded_height, num_trace_randomizers) - 1) as isize
1554    }
1555
1556    /// The upper bound to use for the maximum degree the quotients given the
1557    /// length of the trace and the number of trace randomizers. The degree
1558    /// of the quotients depends on the [AIR](air) constraints.
1559    pub fn max_degree(&self, padded_height: usize) -> isize {
1560        let interpolant_degree =
1561            Self::interpolant_degree(padded_height, self.num_trace_randomizers);
1562        let max_constraint_degree_with_origin =
1563            max_degree_with_origin(interpolant_degree, padded_height);
1564        let max_constraint_degree = max_constraint_degree_with_origin.degree as u64;
1565        let min_arithmetic_domain_length_supporting_max_constraint_degree =
1566            max_constraint_degree.next_power_of_two();
1567        let max_degree_supported_by_that_smallest_arithmetic_domain =
1568            min_arithmetic_domain_length_supporting_max_constraint_degree - 1;
1569
1570        max_degree_supported_by_that_smallest_arithmetic_domain as isize
1571    }
1572
1573    /// The parameters for [FRI](Fri). The length of the
1574    /// [FRI domain](ArithmeticDomain) has a major influence on
1575    /// [proving](Prover::prove) time. It is influenced by the length of the
1576    /// [execution trace](AlgebraicExecutionTrace) and the FRI expansion factor,
1577    /// a security parameter.
1578    ///
1579    /// In principle, the FRI domain length is also influenced by the AIR's
1580    /// degree (see [`air::TARGET_DEGREE`]). However, by segmenting the
1581    /// quotient polynomial into `TARGET_DEGREE`-many parts, that influence
1582    /// is mitigated.
1583    pub fn fri(&self, padded_height: usize) -> fri::SetupResult<Fri> {
1584        let fri_domain_length = self.fri_expansion_factor
1585            * Self::randomized_trace_len(padded_height, self.num_trace_randomizers);
1586        let coset_offset = BFieldElement::generator();
1587        let domain = ArithmeticDomain::of_length(fri_domain_length)?.with_offset(coset_offset);
1588
1589        Fri::new(
1590            domain,
1591            self.fri_expansion_factor,
1592            self.num_collinearity_checks,
1593        )
1594    }
1595
1596    /// Given `f(x)` (the in-domain evaluation of polynomial `f` in `x`), the
1597    /// domain point `x` at which polynomial `f` was evaluated, the
1598    /// out-of-domain evaluation `f(α)`, and the out-of-domain domain point
1599    /// `α`, apply the DEEP update: `(f(x) - f(α)) / (x - α)`.
1600    #[inline]
1601    fn deep_update(
1602        in_domain_point: BFieldElement,
1603        in_domain_value: XFieldElement,
1604        out_of_domain_point: XFieldElement,
1605        out_of_domain_value: XFieldElement,
1606    ) -> XFieldElement {
1607        (in_domain_value - out_of_domain_value) / (in_domain_point - out_of_domain_point)
1608    }
1609}
1610
1611impl Default for Stark {
1612    fn default() -> Self {
1613        let log_2_of_fri_expansion_factor = 2;
1614        let security_level = 160;
1615
1616        Self::new(security_level, log_2_of_fri_expansion_factor)
1617    }
1618}
1619
1620impl Default for Prover {
1621    fn default() -> Self {
1622        Self::new(Stark::default())
1623    }
1624}
1625
1626impl<'a> Arbitrary<'a> for Stark {
1627    fn arbitrary(u: &mut Unstructured<'a>) -> arbitrary::Result<Self> {
1628        let security_level = u.int_in_range(1..=490)?;
1629        let log_2_of_fri_expansion_factor = u.int_in_range(1..=8)?;
1630        Ok(Self::new(security_level, log_2_of_fri_expansion_factor))
1631    }
1632}
1633
1634/// Fiat-Shamir-sampled challenges to compress a row into a single
1635/// [extension field element][XFieldElement].
1636struct LinearCombinationWeights {
1637    /// of length [`MasterMainTable::NUM_COLUMNS`]
1638    main: Array1<XFieldElement>,
1639
1640    /// of length [`MasterAuxTable::NUM_COLUMNS`]
1641    aux: Array1<XFieldElement>,
1642
1643    /// of length [`NUM_QUOTIENT_SEGMENTS`]
1644    quot_segments: Array1<XFieldElement>,
1645
1646    /// of length [`NUM_DEEP_CODEWORD_COMPONENTS`]
1647    deep: Array1<XFieldElement>,
1648}
1649
1650impl LinearCombinationWeights {
1651    const NUM: usize = MasterMainTable::NUM_COLUMNS
1652        + MasterAuxTable::NUM_COLUMNS
1653        + NUM_QUOTIENT_SEGMENTS
1654        + NUM_DEEP_CODEWORD_COMPONENTS;
1655
1656    fn sample(proof_stream: &mut ProofStream) -> Self {
1657        const MAIN_END: usize = MasterMainTable::NUM_COLUMNS;
1658        const AUX_END: usize = MAIN_END + MasterAuxTable::NUM_COLUMNS;
1659        const QUOT_END: usize = AUX_END + NUM_QUOTIENT_SEGMENTS;
1660
1661        let weights = proof_stream.sample_scalars(Self::NUM);
1662
1663        Self {
1664            main: weights[..MAIN_END].to_vec().into(),
1665            aux: weights[MAIN_END..AUX_END].to_vec().into(),
1666            quot_segments: weights[AUX_END..QUOT_END].to_vec().into(),
1667            deep: weights[QUOT_END..].to_vec().into(),
1668        }
1669    }
1670
1671    fn main_and_aux(&self) -> Array1<XFieldElement> {
1672        let main = self.main.clone().into_iter();
1673        main.chain(self.aux.clone()).collect()
1674    }
1675}
1676
1677#[cfg(test)]
1678#[cfg_attr(coverage_nightly, coverage(off))]
1679pub(crate) mod tests {
1680    use std::collections::HashMap;
1681    use std::collections::HashSet;
1682    use std::fmt::Formatter;
1683
1684    use air::AIR;
1685    use air::challenge_id::ChallengeId::StandardInputIndeterminate;
1686    use air::challenge_id::ChallengeId::StandardOutputIndeterminate;
1687    use air::cross_table_argument::CrossTableArg;
1688    use air::cross_table_argument::EvalArg;
1689    use air::cross_table_argument::GrandCrossTableArg;
1690    use air::table::TableId;
1691    use air::table::cascade::CascadeTable;
1692    use air::table::hash::HashTable;
1693    use air::table::jump_stack::JumpStackTable;
1694    use air::table::lookup::LookupTable;
1695    use air::table::op_stack::OpStackTable;
1696    use air::table::processor::ProcessorTable;
1697    use air::table::program::ProgramTable;
1698    use air::table::ram;
1699    use air::table::ram::RamTable;
1700    use air::table::u32::U32Table;
1701    use air::table_column::MasterAuxColumn;
1702    use air::table_column::MasterMainColumn;
1703    use air::table_column::OpStackMainColumn;
1704    use air::table_column::ProcessorAuxColumn::InputTableEvalArg;
1705    use air::table_column::ProcessorAuxColumn::OutputTableEvalArg;
1706    use air::table_column::ProcessorMainColumn;
1707    use air::table_column::RamMainColumn;
1708    use assert2::assert;
1709    use assert2::check;
1710    use assert2::let_assert;
1711    use constraint_circuit::ConstraintCircuitBuilder;
1712    use isa::error::OpStackError;
1713    use isa::instruction::Instruction;
1714    use isa::op_stack::OpStackElement;
1715    use itertools::izip;
1716    use proptest::collection::vec;
1717    use proptest::prelude::*;
1718    use proptest::test_runner::TestCaseResult;
1719    use proptest_arbitrary_interop::arb;
1720    use rand::Rng;
1721    use rand::prelude::*;
1722    use strum::EnumCount;
1723    use strum::IntoEnumIterator;
1724    use test_strategy::proptest;
1725    use thiserror::Error;
1726    use twenty_first::math::other::random_elements;
1727
1728    use super::*;
1729    use crate::PublicInput;
1730    use crate::config::CacheDecision;
1731    use crate::error::InstructionError;
1732    use crate::shared_tests::TestableProgram;
1733    use crate::table::auxiliary_table;
1734    use crate::table::auxiliary_table::Evaluable;
1735    use crate::table::master_table::MasterAuxTable;
1736    use crate::triton_program;
1737    use crate::vm::NonDeterminism;
1738    use crate::vm::VM;
1739    use crate::vm::tests::ProgramForMerkleTreeUpdate;
1740    use crate::vm::tests::ProgramForRecurseOrReturn;
1741    use crate::vm::tests::ProgramForSpongeAndHashInstructions;
1742    use crate::vm::tests::property_based_test_program_for_and;
1743    use crate::vm::tests::property_based_test_program_for_assert_vector;
1744    use crate::vm::tests::property_based_test_program_for_div_mod;
1745    use crate::vm::tests::property_based_test_program_for_eq;
1746    use crate::vm::tests::property_based_test_program_for_is_u32;
1747    use crate::vm::tests::property_based_test_program_for_log2floor;
1748    use crate::vm::tests::property_based_test_program_for_lsb;
1749    use crate::vm::tests::property_based_test_program_for_lt;
1750    use crate::vm::tests::property_based_test_program_for_pop_count;
1751    use crate::vm::tests::property_based_test_program_for_pow;
1752    use crate::vm::tests::property_based_test_program_for_random_ram_access;
1753    use crate::vm::tests::property_based_test_program_for_split;
1754    use crate::vm::tests::property_based_test_program_for_xb_dot_step;
1755    use crate::vm::tests::property_based_test_program_for_xor;
1756    use crate::vm::tests::property_based_test_program_for_xx_dot_step;
1757    use crate::vm::tests::test_program_0_lt_0;
1758    use crate::vm::tests::test_program_claim_in_ram_corresponds_to_currently_running_program;
1759    use crate::vm::tests::test_program_for_add_mul_invert;
1760    use crate::vm::tests::test_program_for_and;
1761    use crate::vm::tests::test_program_for_assert_vector;
1762    use crate::vm::tests::test_program_for_call_recurse_return;
1763    use crate::vm::tests::test_program_for_div_mod;
1764    use crate::vm::tests::test_program_for_divine;
1765    use crate::vm::tests::test_program_for_eq;
1766    use crate::vm::tests::test_program_for_halt;
1767    use crate::vm::tests::test_program_for_hash;
1768    use crate::vm::tests::test_program_for_log2floor;
1769    use crate::vm::tests::test_program_for_lsb;
1770    use crate::vm::tests::test_program_for_lt;
1771    use crate::vm::tests::test_program_for_many_sponge_instructions;
1772    use crate::vm::tests::test_program_for_merkle_step_left_sibling;
1773    use crate::vm::tests::test_program_for_merkle_step_mem_left_sibling;
1774    use crate::vm::tests::test_program_for_merkle_step_mem_right_sibling;
1775    use crate::vm::tests::test_program_for_merkle_step_right_sibling;
1776    use crate::vm::tests::test_program_for_pop_count;
1777    use crate::vm::tests::test_program_for_pow;
1778    use crate::vm::tests::test_program_for_push_pop_dup_swap_nop;
1779    use crate::vm::tests::test_program_for_read_io_write_io;
1780    use crate::vm::tests::test_program_for_recurse_or_return;
1781    use crate::vm::tests::test_program_for_skiz;
1782    use crate::vm::tests::test_program_for_split;
1783    use crate::vm::tests::test_program_for_sponge_instructions;
1784    use crate::vm::tests::test_program_for_sponge_instructions_2;
1785    use crate::vm::tests::test_program_for_starting_with_pop_count;
1786    use crate::vm::tests::test_program_for_write_mem_read_mem;
1787    use crate::vm::tests::test_program_for_x_invert;
1788    use crate::vm::tests::test_program_for_xb_mul;
1789    use crate::vm::tests::test_program_for_xor;
1790    use crate::vm::tests::test_program_for_xx_add;
1791    use crate::vm::tests::test_program_for_xx_mul;
1792    use crate::vm::tests::test_program_hash_nop_nop_lt;
1793
1794    impl Stark {
1795        pub const LOW_SECURITY_LEVEL: usize = 32;
1796
1797        pub fn low_security() -> Self {
1798            let log_expansion_factor = 2;
1799            Stark::new(Self::LOW_SECURITY_LEVEL, log_expansion_factor)
1800        }
1801    }
1802
1803    #[proptest]
1804    fn two_default_provers_have_different_randomness_seeds() {
1805        let seed = || Prover::default().randomness_seed;
1806        prop_assert_ne!(seed(), seed());
1807    }
1808
1809    #[test]
1810    fn quotient_segments_are_independent_of_fri_table_caching() {
1811        // ensure caching _can_ happen by overwriting environment variables
1812        crate::config::overwrite_lde_trace_caching_to(CacheDecision::Cache);
1813
1814        let test_program = TestableProgram::new(triton_program!(halt));
1815        let stark = test_program.stark;
1816        let artifacts = test_program.generate_proof_artifacts();
1817        let mut main = artifacts.master_main_table;
1818        let mut aux = artifacts.master_aux_table;
1819        let ch = artifacts.challenges;
1820        let padded_height = main.trace_table().nrows();
1821        let fri_dom = stark.fri(padded_height).unwrap().domain;
1822        let max_degree = stark.max_degree(padded_height);
1823        let num_trace_randos = stark.num_trace_randomizers;
1824        let domains = ProverDomains::derive(padded_height, num_trace_randos, fri_dom, max_degree);
1825        let quot_dom = domains.quotient;
1826        let weights = StdRng::seed_from_u64(1632525295622789151)
1827            .random::<[XFieldElement; MasterAuxTable::NUM_CONSTRAINTS]>();
1828
1829        debug_assert!(main.fri_domain_table().is_none());
1830        debug_assert!(aux.fri_domain_table().is_none());
1831        let jit_segments =
1832            Prover::compute_quotient_segments(&mut main, &mut aux, quot_dom, &ch, &weights);
1833
1834        debug_assert!(main.fri_domain_table().is_none());
1835        main.maybe_low_degree_extend_all_columns();
1836        debug_assert!(main.fri_domain_table().is_some());
1837
1838        debug_assert!(aux.fri_domain_table().is_none());
1839        aux.maybe_low_degree_extend_all_columns();
1840        debug_assert!(aux.fri_domain_table().is_some());
1841
1842        let cache_segments =
1843            Prover::compute_quotient_segments(&mut main, &mut aux, quot_dom, &ch, &weights);
1844
1845        assert_eq!(jit_segments, cache_segments);
1846    }
1847
1848    /// [`Stark::compute_quotient_segments`] takes mutable references to both,
1849    /// the main and the auxiliary tables. It is vital that certain
1850    /// information is _not_ mutated.
1851    #[test]
1852    fn computing_quotient_segments_does_not_change_execution_trace() {
1853        fn assert_no_trace_mutation(cache_decision: CacheDecision) {
1854            crate::config::overwrite_lde_trace_caching_to(cache_decision);
1855
1856            let test_program = TestableProgram::new(triton_program!(halt));
1857            let stark = test_program.stark;
1858            let artifacts = test_program.generate_proof_artifacts();
1859            let mut main = artifacts.master_main_table;
1860            let mut aux = artifacts.master_aux_table;
1861            let ch = artifacts.challenges;
1862
1863            let original_main_trace = main.trace_table().to_owned();
1864            let original_aux_trace = aux.trace_table().to_owned();
1865
1866            let padded_height = main.trace_table().nrows();
1867            let fri_dom = stark.fri(padded_height).unwrap().domain;
1868            let max_degree = stark.max_degree(padded_height);
1869            let num_trace_randos = stark.num_trace_randomizers;
1870            let domains =
1871                ProverDomains::derive(padded_height, num_trace_randos, fri_dom, max_degree);
1872            let quot_dom = domains.quotient;
1873
1874            if cache_decision == CacheDecision::Cache {
1875                main.maybe_low_degree_extend_all_columns();
1876                assert!(main.fri_domain_table().is_some());
1877
1878                aux.maybe_low_degree_extend_all_columns();
1879                assert!(aux.fri_domain_table().is_some());
1880            }
1881
1882            let weights = StdRng::seed_from_u64(15157673430940347283)
1883                .random::<[XFieldElement; MasterAuxTable::NUM_CONSTRAINTS]>();
1884            let _segments =
1885                Prover::compute_quotient_segments(&mut main, &mut aux, quot_dom, &ch, &weights);
1886
1887            assert_eq!(original_main_trace, main.trace_table());
1888            assert_eq!(original_aux_trace, aux.trace_table());
1889        }
1890
1891        assert_no_trace_mutation(CacheDecision::Cache);
1892        assert_no_trace_mutation(CacheDecision::NoCache);
1893    }
1894
1895    #[test]
1896    fn supplying_prover_randomness_seed_fully_derandomizes_produced_proof() {
1897        let TestableProgram {
1898            program,
1899            public_input,
1900            non_determinism,
1901            stark,
1902        } = program_executing_every_instruction();
1903
1904        let claim = Claim::about_program(&program).with_input(public_input.clone());
1905        let (aet, output) = VM::trace_execution(program, public_input, non_determinism).unwrap();
1906        let claim = claim.with_output(output);
1907
1908        let mut rng = StdRng::seed_from_u64(3351975627407608972);
1909        let proof = Prover::new(stark)
1910            .set_randomness_seed_which_may_break_zero_knowledge(rng.random())
1911            .prove(&claim, &aet)
1912            .unwrap();
1913
1914        insta::assert_snapshot!(
1915            Tip5::hash(&proof),
1916            @"17275651906185656762,\
1917              13250937299792022858,\
1918              05731754925513787901,\
1919              05512095638892086027,\
1920              08634562101877660478",
1921        );
1922    }
1923
1924    #[test]
1925    fn print_ram_table_example_for_specification() {
1926        let program = triton_program!(
1927            push 20 push 100 write_mem 1 pop 1  // write 20 to address 100
1928            push 5 push 6 push 7 push 8 push 9
1929            push 42 write_mem 5 pop 1           // write 5..=9 to addresses 42..=46
1930            push 42 read_mem 1 pop 2            // read from address 42
1931            push 45 read_mem 3 pop 4            // read from address 42..=44
1932            push 17 push 18 push 19
1933            push 43 write_mem 3 pop 1           // write 17..=19 to addresses 43..=45
1934            push 46 read_mem 5 pop 1 pop 5      // read from addresses 42..=46
1935            push 42 read_mem 1 pop 2            // read from address 42
1936            push 100 read_mem 1 pop 2           // read from address 100
1937            halt
1938        );
1939        let master_main_table = TestableProgram::new(program)
1940            .generate_proof_artifacts()
1941            .master_main_table;
1942
1943        println!();
1944        println!("Processor Table:\n");
1945        println!("| clk | ci  | nia | st0 | st1 | st2 | st3 | st4 | st5 |");
1946        println!("|----:|:----|:----|----:|----:|----:|----:|----:|----:|");
1947        for row in master_main_table
1948            .table(TableId::Processor)
1949            .rows()
1950            .into_iter()
1951            .take(40)
1952        {
1953            let clk = row[ProcessorMainColumn::CLK.main_index()].to_string();
1954            let st0 = row[ProcessorMainColumn::ST0.main_index()].to_string();
1955            let st1 = row[ProcessorMainColumn::ST1.main_index()].to_string();
1956            let st2 = row[ProcessorMainColumn::ST2.main_index()].to_string();
1957            let st3 = row[ProcessorMainColumn::ST3.main_index()].to_string();
1958            let st4 = row[ProcessorMainColumn::ST4.main_index()].to_string();
1959            let st5 = row[ProcessorMainColumn::ST5.main_index()].to_string();
1960
1961            let (ci, nia) = ci_and_nia_from_master_table_row(row);
1962
1963            let interesting_cols = [clk, ci, nia, st0, st1, st2, st3, st4, st5];
1964            let interesting_cols = interesting_cols
1965                .iter()
1966                .map(|ff| format!("{:>10}", format!("{ff}")))
1967                .join(" | ");
1968            println!("| {interesting_cols} |");
1969        }
1970        println!();
1971        println!("RAM Table:\n");
1972        println!("| clk | type | pointer | value | iord |");
1973        println!("|----:|:-----|--------:|------:|-----:|");
1974        for row in master_main_table
1975            .table(TableId::Ram)
1976            .rows()
1977            .into_iter()
1978            .take(25)
1979        {
1980            let clk = row[RamMainColumn::CLK.main_index()].to_string();
1981            let ramp = row[RamMainColumn::RamPointer.main_index()].to_string();
1982            let ramv = row[RamMainColumn::RamValue.main_index()].to_string();
1983            let iord = row[RamMainColumn::InverseOfRampDifference.main_index()].to_string();
1984
1985            let instruction_type = match row[RamMainColumn::InstructionType.main_index()] {
1986                ram::INSTRUCTION_TYPE_READ => "read",
1987                ram::INSTRUCTION_TYPE_WRITE => "write",
1988                ram::PADDING_INDICATOR => "pad",
1989                _ => "-",
1990            }
1991            .to_string();
1992
1993            let interesting_cols = [clk, instruction_type, ramp, ramv, iord];
1994            let interesting_cols = interesting_cols
1995                .iter()
1996                .map(|ff| format!("{:>10}", format!("{ff}")))
1997                .join(" | ");
1998            println!("| {interesting_cols} |");
1999        }
2000    }
2001
2002    #[test]
2003    fn print_op_stack_table_example_for_specification() {
2004        let num_interesting_rows = 30;
2005        let fake_op_stack_size = 4;
2006
2007        let program = triton_program! {
2008            push 42 push 43 push 44 push 45 push 46 push 47 push 48
2009            nop pop 1 pop 2 pop 1
2010            push 77 swap 3 push 78 swap 3 push 79
2011            pop 1 pop 2 pop 3
2012            halt
2013        };
2014        let master_main_table = TestableProgram::new(program)
2015            .generate_proof_artifacts()
2016            .master_main_table;
2017
2018        println!();
2019        println!("Processor Table:");
2020        println!("| clk | ci  | nia | st0 | st1 | st2 | st3 | underflow | pointer |");
2021        println!("|----:|:----|----:|----:|----:|----:|----:|:----------|--------:|");
2022        for row in master_main_table
2023            .table(TableId::Processor)
2024            .rows()
2025            .into_iter()
2026            .take(num_interesting_rows)
2027        {
2028            let clk = row[ProcessorMainColumn::CLK.main_index()].to_string();
2029            let st0 = row[ProcessorMainColumn::ST0.main_index()].to_string();
2030            let st1 = row[ProcessorMainColumn::ST1.main_index()].to_string();
2031            let st2 = row[ProcessorMainColumn::ST2.main_index()].to_string();
2032            let st3 = row[ProcessorMainColumn::ST3.main_index()].to_string();
2033            let st4 = row[ProcessorMainColumn::ST4.main_index()].to_string();
2034            let st5 = row[ProcessorMainColumn::ST5.main_index()].to_string();
2035            let st6 = row[ProcessorMainColumn::ST6.main_index()].to_string();
2036            let st7 = row[ProcessorMainColumn::ST7.main_index()].to_string();
2037            let st8 = row[ProcessorMainColumn::ST8.main_index()].to_string();
2038            let st9 = row[ProcessorMainColumn::ST9.main_index()].to_string();
2039
2040            let osp = row[ProcessorMainColumn::OpStackPointer.main_index()];
2041            let osp =
2042                (osp.value() + fake_op_stack_size).saturating_sub(OpStackElement::COUNT as u64);
2043
2044            let underflow_size = osp.saturating_sub(fake_op_stack_size);
2045            let underflow_candidates = [st4, st5, st6, st7, st8, st9];
2046            let underflow = underflow_candidates
2047                .into_iter()
2048                .take(underflow_size as usize);
2049            let underflow = underflow.map(|ff| format!("{:>2}", format!("{ff}")));
2050            let underflow = format!("[{}]", underflow.collect_vec().join(", "));
2051
2052            let osp = osp.to_string();
2053            let (ci, nia) = ci_and_nia_from_master_table_row(row);
2054
2055            let interesting_cols = [clk, ci, nia, st0, st1, st2, st3, underflow, osp];
2056            let interesting_cols = interesting_cols
2057                .map(|ff| format!("{:>10}", format!("{ff}")))
2058                .join(" | ");
2059            println!("| {interesting_cols} |");
2060        }
2061
2062        println!();
2063        println!("Op Stack Table:");
2064        println!("| clk | ib1 | pointer | value |");
2065        println!("|----:|----:|--------:|------:|");
2066        for row in master_main_table
2067            .table(TableId::OpStack)
2068            .rows()
2069            .into_iter()
2070            .take(num_interesting_rows)
2071        {
2072            let clk = row[OpStackMainColumn::CLK.main_index()].to_string();
2073            let ib1 = row[OpStackMainColumn::IB1ShrinkStack.main_index()].to_string();
2074
2075            let osp = row[OpStackMainColumn::StackPointer.main_index()];
2076            let osp =
2077                (osp.value() + fake_op_stack_size).saturating_sub(OpStackElement::COUNT as u64);
2078            let osp = osp.to_string();
2079
2080            let value = row[OpStackMainColumn::FirstUnderflowElement.main_index()].to_string();
2081
2082            let interesting_cols = [clk, ib1, osp, value];
2083            let interesting_cols = interesting_cols
2084                .map(|ff| format!("{:>10}", format!("{ff}")))
2085                .join(" | ");
2086            println!("| {interesting_cols} |");
2087        }
2088    }
2089
2090    fn ci_and_nia_from_master_table_row(row: ArrayView1<BFieldElement>) -> (String, String) {
2091        let curr_instruction = row[ProcessorMainColumn::CI.main_index()].value();
2092        let next_instruction_or_arg = row[ProcessorMainColumn::NIA.main_index()].value();
2093
2094        let curr_instruction = Instruction::try_from(curr_instruction).unwrap();
2095        let nia = curr_instruction
2096            .arg()
2097            .map(|_| next_instruction_or_arg.to_string())
2098            .unwrap_or_default();
2099        (curr_instruction.name().to_string(), nia)
2100    }
2101
2102    /// To be used with `-- --nocapture`. Has mainly informative purpose.
2103    #[test]
2104    fn print_all_constraint_degrees() {
2105        let padded_height = 2;
2106        let num_trace_randomizers = 2;
2107        let interpolant_degree = Stark::interpolant_degree(padded_height, num_trace_randomizers);
2108        for deg in auxiliary_table::all_degrees_with_origin(interpolant_degree, padded_height) {
2109            println!("{deg}");
2110        }
2111    }
2112
2113    #[test]
2114    fn check_io_terminals() {
2115        let read_nop_program = triton_program!(
2116            read_io 3 nop nop write_io 2 push 17 write_io 1 halt
2117        );
2118        let artifacts = TestableProgram::new(read_nop_program)
2119            .with_input(bfe_vec![3, 5, 7])
2120            .generate_proof_artifacts();
2121
2122        let processor_table = artifacts.master_aux_table.table(TableId::Processor);
2123        let processor_table_last_row = processor_table.slice(s![-1, ..]);
2124        let ptie = processor_table_last_row[InputTableEvalArg.aux_index()];
2125        let ine = EvalArg::compute_terminal(
2126            &artifacts.claim.input,
2127            EvalArg::default_initial(),
2128            artifacts.challenges[StandardInputIndeterminate],
2129        );
2130        check!(ptie == ine);
2131
2132        let ptoe = processor_table_last_row[OutputTableEvalArg.aux_index()];
2133        let oute = EvalArg::compute_terminal(
2134            &artifacts.claim.output,
2135            EvalArg::default_initial(),
2136            artifacts.challenges[StandardOutputIndeterminate],
2137        );
2138        check!(ptoe == oute);
2139    }
2140
2141    #[test]
2142    fn constraint_polynomials_use_right_number_of_variables() {
2143        let challenges = Challenges::default();
2144        let main_row = Array1::<BFieldElement>::zeros(MasterMainTable::NUM_COLUMNS);
2145        let aux_row = Array1::zeros(MasterAuxTable::NUM_COLUMNS);
2146
2147        let br = main_row.view();
2148        let er = aux_row.view();
2149
2150        MasterAuxTable::evaluate_initial_constraints(br, er, &challenges);
2151        MasterAuxTable::evaluate_consistency_constraints(br, er, &challenges);
2152        MasterAuxTable::evaluate_transition_constraints(br, er, br, er, &challenges);
2153        MasterAuxTable::evaluate_terminal_constraints(br, er, &challenges);
2154    }
2155
2156    #[test]
2157    fn print_number_of_all_constraints_per_table() {
2158        let table_names = [
2159            "program table",
2160            "processor table",
2161            "op stack table",
2162            "ram table",
2163            "jump stack table",
2164            "hash table",
2165            "cascade table",
2166            "lookup table",
2167            "u32 table",
2168            "cross-table arg",
2169        ];
2170        let circuit_builder = ConstraintCircuitBuilder::new();
2171        let all_init = [
2172            ProgramTable::initial_constraints(&circuit_builder),
2173            ProcessorTable::initial_constraints(&circuit_builder),
2174            OpStackTable::initial_constraints(&circuit_builder),
2175            RamTable::initial_constraints(&circuit_builder),
2176            JumpStackTable::initial_constraints(&circuit_builder),
2177            HashTable::initial_constraints(&circuit_builder),
2178            CascadeTable::initial_constraints(&circuit_builder),
2179            LookupTable::initial_constraints(&circuit_builder),
2180            U32Table::initial_constraints(&circuit_builder),
2181            GrandCrossTableArg::initial_constraints(&circuit_builder),
2182        ]
2183        .map(|vec| vec.len());
2184        let circuit_builder = ConstraintCircuitBuilder::new();
2185        let all_cons = [
2186            ProgramTable::consistency_constraints(&circuit_builder),
2187            ProcessorTable::consistency_constraints(&circuit_builder),
2188            OpStackTable::consistency_constraints(&circuit_builder),
2189            RamTable::consistency_constraints(&circuit_builder),
2190            JumpStackTable::consistency_constraints(&circuit_builder),
2191            HashTable::consistency_constraints(&circuit_builder),
2192            CascadeTable::consistency_constraints(&circuit_builder),
2193            LookupTable::consistency_constraints(&circuit_builder),
2194            U32Table::consistency_constraints(&circuit_builder),
2195            GrandCrossTableArg::consistency_constraints(&circuit_builder),
2196        ]
2197        .map(|vec| vec.len());
2198        let circuit_builder = ConstraintCircuitBuilder::new();
2199        let all_trans = [
2200            ProgramTable::transition_constraints(&circuit_builder),
2201            ProcessorTable::transition_constraints(&circuit_builder),
2202            OpStackTable::transition_constraints(&circuit_builder),
2203            RamTable::transition_constraints(&circuit_builder),
2204            JumpStackTable::transition_constraints(&circuit_builder),
2205            HashTable::transition_constraints(&circuit_builder),
2206            CascadeTable::transition_constraints(&circuit_builder),
2207            LookupTable::transition_constraints(&circuit_builder),
2208            U32Table::transition_constraints(&circuit_builder),
2209            GrandCrossTableArg::transition_constraints(&circuit_builder),
2210        ]
2211        .map(|vec| vec.len());
2212        let circuit_builder = ConstraintCircuitBuilder::new();
2213        let all_term = [
2214            ProgramTable::terminal_constraints(&circuit_builder),
2215            ProcessorTable::terminal_constraints(&circuit_builder),
2216            OpStackTable::terminal_constraints(&circuit_builder),
2217            RamTable::terminal_constraints(&circuit_builder),
2218            JumpStackTable::terminal_constraints(&circuit_builder),
2219            HashTable::terminal_constraints(&circuit_builder),
2220            CascadeTable::terminal_constraints(&circuit_builder),
2221            LookupTable::terminal_constraints(&circuit_builder),
2222            U32Table::terminal_constraints(&circuit_builder),
2223            GrandCrossTableArg::terminal_constraints(&circuit_builder),
2224        ]
2225        .map(|vec| vec.len());
2226
2227        let num_total_init: usize = all_init.iter().sum();
2228        let num_total_cons: usize = all_cons.iter().sum();
2229        let num_total_trans: usize = all_trans.iter().sum();
2230        let num_total_term: usize = all_term.iter().sum();
2231        let num_total = num_total_init + num_total_cons + num_total_trans + num_total_term;
2232
2233        println!();
2234        println!("| Table                |  Init |  Cons | Trans |  Term |   Sum |");
2235        println!("|:---------------------|------:|------:|------:|------:|------:|");
2236        for (name, num_init, num_cons, num_trans, num_term) in
2237            izip!(table_names, all_init, all_cons, all_trans, all_term)
2238        {
2239            let num_total = num_init + num_cons + num_trans + num_term;
2240            println!(
2241                "| {name:<20} | {num_init:>5} | {num_cons:>5} \
2242                 | {num_trans:>5} | {num_term:>5} | {num_total:>5} |",
2243            );
2244        }
2245        println!(
2246            "| {:<20} | {num_total_init:>5} | {num_total_cons:>5} \
2247             | {num_total_trans:>5} | {num_total_term:>5} | {num_total:>5} |",
2248            "Sum",
2249        );
2250    }
2251
2252    #[test]
2253    fn number_of_quotient_degree_bounds_match_number_of_constraints() {
2254        let main_row = Array1::<BFieldElement>::zeros(MasterMainTable::NUM_COLUMNS);
2255        let aux_row = Array1::zeros(MasterAuxTable::NUM_COLUMNS);
2256        let ch = Challenges::default();
2257        let padded_height = 2;
2258        let num_trace_randomizers = 2;
2259        let interpolant_degree = Stark::interpolant_degree(padded_height, num_trace_randomizers);
2260
2261        // Shorten some names for better formatting. This is just a test.
2262        let ph = padded_height;
2263        let id = interpolant_degree;
2264        let mr = main_row.view();
2265        let ar = aux_row.view();
2266
2267        let num_init_quots = MasterAuxTable::NUM_INITIAL_CONSTRAINTS;
2268        let num_cons_quots = MasterAuxTable::NUM_CONSISTENCY_CONSTRAINTS;
2269        let num_tran_quots = MasterAuxTable::NUM_TRANSITION_CONSTRAINTS;
2270        let num_term_quots = MasterAuxTable::NUM_TERMINAL_CONSTRAINTS;
2271
2272        let eval_init_consts = MasterAuxTable::evaluate_initial_constraints(mr, ar, &ch);
2273        let eval_cons_consts = MasterAuxTable::evaluate_consistency_constraints(mr, ar, &ch);
2274        let eval_tran_consts = MasterAuxTable::evaluate_transition_constraints(mr, ar, mr, ar, &ch);
2275        let eval_term_consts = MasterAuxTable::evaluate_terminal_constraints(mr, ar, &ch);
2276
2277        assert!(num_init_quots == eval_init_consts.len());
2278        assert!(num_cons_quots == eval_cons_consts.len());
2279        assert!(num_tran_quots == eval_tran_consts.len());
2280        assert!(num_term_quots == eval_term_consts.len());
2281
2282        assert!(num_init_quots == MasterAuxTable::initial_quotient_degree_bounds(id).len());
2283        assert!(num_cons_quots == MasterAuxTable::consistency_quotient_degree_bounds(id, ph).len());
2284        assert!(num_tran_quots == MasterAuxTable::transition_quotient_degree_bounds(id, ph).len());
2285        assert!(num_term_quots == MasterAuxTable::terminal_quotient_degree_bounds(id).len());
2286    }
2287
2288    type ConstraintResult = Result<(), ConstraintErrorCollection>;
2289
2290    #[derive(Debug, Clone, Eq, PartialEq)]
2291    struct ConstraintErrorCollection {
2292        table: &'static str,
2293        errors: Vec<ConstraintError>,
2294    }
2295
2296    impl ConstraintErrorCollection {
2297        fn new(table: &'static str) -> Self {
2298            let errors = Vec::new();
2299            Self { table, errors }
2300        }
2301
2302        fn record(&mut self, err: ConstraintError) {
2303            self.errors.push(err);
2304        }
2305
2306        fn into_result(self) -> Result<(), Self> {
2307            self.errors.is_empty().then_some(()).ok_or(self)
2308        }
2309    }
2310
2311    impl core::fmt::Display for ConstraintErrorCollection {
2312        fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
2313            for err in &self.errors {
2314                writeln!(f, "{table}: {err}", table = self.table)?;
2315            }
2316
2317            Ok(())
2318        }
2319    }
2320
2321    impl core::error::Error for ConstraintErrorCollection {}
2322
2323    #[derive(Debug, Copy, Clone, Eq, PartialEq, Error)]
2324    enum ConstraintError {
2325        #[error("initial constraint {idx} failed")]
2326        Initial { idx: usize },
2327
2328        #[error("consistency constraint {idx} failed on row {row_idx}")]
2329        Consistency { idx: usize, row_idx: usize },
2330
2331        #[error("transition constraint {idx} failed on row {row_idx}")]
2332        Transition { idx: usize, row_idx: usize },
2333
2334        #[error("terminal constraint {idx} failed.")]
2335        Terminal { idx: usize },
2336    }
2337
2338    macro_rules! check_constraints_fn {
2339        (fn $fn_name:ident for $table:ident) => {
2340            fn $fn_name(
2341                master_main_trace_table: ArrayView2<BFieldElement>,
2342                master_aux_trace_table: ArrayView2<XFieldElement>,
2343                challenges: &Challenges,
2344            ) -> ConstraintResult {
2345                assert!(master_main_trace_table.nrows() == master_aux_trace_table.nrows());
2346                let challenges = &challenges.challenges;
2347
2348                let mut errors = ConstraintErrorCollection::new(stringify!($table));
2349                let builder = ConstraintCircuitBuilder::new();
2350                for (idx, constraint) in $table::initial_constraints(&builder)
2351                    .into_iter()
2352                    .map(|constraint_monad| constraint_monad.consume())
2353                    .enumerate()
2354                {
2355                    let evaluated_constraint = constraint.evaluate(
2356                        master_main_trace_table.slice(s![..1, ..]),
2357                        master_aux_trace_table.slice(s![..1, ..]),
2358                        challenges,
2359                    );
2360                    if evaluated_constraint != xfe!(0) {
2361                        errors.record(ConstraintError::Initial { idx });
2362                    }
2363                }
2364
2365                let builder = ConstraintCircuitBuilder::new();
2366                for (idx, constraint) in $table::consistency_constraints(&builder)
2367                    .into_iter()
2368                    .map(|constraint_monad| constraint_monad.consume())
2369                    .enumerate()
2370                {
2371                    for row_idx in 0..master_main_trace_table.nrows() {
2372                        let evaluated_constraint = constraint.evaluate(
2373                            master_main_trace_table.slice(s![row_idx..=row_idx, ..]),
2374                            master_aux_trace_table.slice(s![row_idx..=row_idx, ..]),
2375                            challenges,
2376                        );
2377                        if evaluated_constraint != xfe!(0) {
2378                            errors.record(ConstraintError::Consistency { idx, row_idx });
2379                        }
2380                    }
2381                }
2382
2383                let builder = ConstraintCircuitBuilder::new();
2384                for (idx, constraint) in $table::transition_constraints(&builder)
2385                    .into_iter()
2386                    .map(|constraint_monad| constraint_monad.consume())
2387                    .enumerate()
2388                {
2389                    for row_idx in 0..master_main_trace_table.nrows() - 1 {
2390                        let evaluated_constraint = constraint.evaluate(
2391                            master_main_trace_table.slice(s![row_idx..=row_idx + 1, ..]),
2392                            master_aux_trace_table.slice(s![row_idx..=row_idx + 1, ..]),
2393                            challenges,
2394                        );
2395                        if evaluated_constraint != xfe!(0) {
2396                            errors.record(ConstraintError::Transition { idx, row_idx });
2397                        }
2398                    }
2399                }
2400
2401                let builder = ConstraintCircuitBuilder::new();
2402                for (idx, constraint) in $table::terminal_constraints(&builder)
2403                    .into_iter()
2404                    .map(|constraint_monad| constraint_monad.consume())
2405                    .enumerate()
2406                {
2407                    let evaluated_constraint = constraint.evaluate(
2408                        master_main_trace_table.slice(s![-1.., ..]),
2409                        master_aux_trace_table.slice(s![-1.., ..]),
2410                        challenges,
2411                    );
2412                    if evaluated_constraint != xfe!(0) {
2413                        errors.record(ConstraintError::Terminal { idx });
2414                    }
2415                }
2416
2417                errors.into_result()
2418            }
2419        };
2420    }
2421
2422    check_constraints_fn!(fn check_program_table_constraints for ProgramTable);
2423    check_constraints_fn!(fn check_processor_table_constraints for ProcessorTable);
2424    check_constraints_fn!(fn check_op_stack_table_constraints for OpStackTable);
2425    check_constraints_fn!(fn check_ram_table_constraints for RamTable);
2426    check_constraints_fn!(fn check_jump_stack_table_constraints for JumpStackTable);
2427    check_constraints_fn!(fn check_hash_table_constraints for HashTable);
2428    check_constraints_fn!(fn check_cascade_table_constraints for CascadeTable);
2429    check_constraints_fn!(fn check_lookup_table_constraints for LookupTable);
2430    check_constraints_fn!(fn check_u32_table_constraints for U32Table);
2431    check_constraints_fn!(fn check_cross_table_constraints for GrandCrossTableArg);
2432
2433    fn triton_constraints_evaluate_to_zero(program: TestableProgram) -> ConstraintResult {
2434        let artifacts = program.generate_proof_artifacts();
2435        let mmt = artifacts.master_main_table.trace_table();
2436        let mat = artifacts.master_aux_table.trace_table();
2437        assert!(mmt.nrows() == mat.nrows());
2438
2439        let challenges = artifacts.challenges;
2440        check_program_table_constraints(mmt, mat, &challenges)?;
2441        check_processor_table_constraints(mmt, mat, &challenges)?;
2442        check_op_stack_table_constraints(mmt, mat, &challenges)?;
2443        check_ram_table_constraints(mmt, mat, &challenges)?;
2444        check_jump_stack_table_constraints(mmt, mat, &challenges)?;
2445        check_hash_table_constraints(mmt, mat, &challenges)?;
2446        check_cascade_table_constraints(mmt, mat, &challenges)?;
2447        check_lookup_table_constraints(mmt, mat, &challenges)?;
2448        check_u32_table_constraints(mmt, mat, &challenges)?;
2449        check_cross_table_constraints(mmt, mat, &challenges)?;
2450
2451        Ok(())
2452    }
2453
2454    #[test]
2455    fn constraints_evaluate_to_zero_on_fibonacci() -> ConstraintResult {
2456        let program = TestableProgram::new(crate::example_programs::FIBONACCI_SEQUENCE.clone())
2457            .with_input(bfe_array![100]);
2458        triton_constraints_evaluate_to_zero(program)
2459    }
2460
2461    #[test]
2462    fn constraints_evaluate_to_zero_on_big_mmr_snippet() -> ConstraintResult {
2463        let program = TestableProgram::new(
2464            crate::example_programs::CALCULATE_NEW_MMR_PEAKS_FROM_APPEND_WITH_SAFE_LISTS.clone(),
2465        );
2466        triton_constraints_evaluate_to_zero(program)
2467    }
2468
2469    #[test]
2470    fn constraints_evaluate_to_zero_on_program_for_halt() -> ConstraintResult {
2471        triton_constraints_evaluate_to_zero(test_program_for_halt())
2472    }
2473
2474    #[test]
2475    fn constraints_evaluate_to_zero_on_program_hash_nop_nop_lt() -> ConstraintResult {
2476        triton_constraints_evaluate_to_zero(test_program_hash_nop_nop_lt())
2477    }
2478
2479    #[test]
2480    fn constraints_evaluate_to_zero_on_program_for_push_pop_dup_swap_nop() -> ConstraintResult {
2481        triton_constraints_evaluate_to_zero(test_program_for_push_pop_dup_swap_nop())
2482    }
2483
2484    #[test]
2485    fn constraints_evaluate_to_zero_on_program_for_divine() -> ConstraintResult {
2486        triton_constraints_evaluate_to_zero(test_program_for_divine())
2487    }
2488
2489    #[test]
2490    fn constraints_evaluate_to_zero_on_program_for_skiz() -> ConstraintResult {
2491        triton_constraints_evaluate_to_zero(test_program_for_skiz())
2492    }
2493
2494    #[test]
2495    fn constraints_evaluate_to_zero_on_program_for_call_recurse_return() -> ConstraintResult {
2496        triton_constraints_evaluate_to_zero(test_program_for_call_recurse_return())
2497    }
2498
2499    #[test]
2500    fn constraints_evaluate_to_zero_on_program_for_recurse_or_return() -> ConstraintResult {
2501        triton_constraints_evaluate_to_zero(test_program_for_recurse_or_return())
2502    }
2503
2504    #[proptest(cases = 20)]
2505    fn constraints_evaluate_to_zero_on_property_based_test_program_for_recurse_or_return(
2506        program: ProgramForRecurseOrReturn,
2507    ) {
2508        triton_constraints_evaluate_to_zero(program.assemble())?;
2509    }
2510
2511    #[test]
2512    fn constraints_evaluate_to_zero_on_program_for_write_mem_read_mem() -> ConstraintResult {
2513        triton_constraints_evaluate_to_zero(test_program_for_write_mem_read_mem())
2514    }
2515
2516    #[test]
2517    fn constraints_evaluate_to_zero_on_program_for_hash() -> ConstraintResult {
2518        triton_constraints_evaluate_to_zero(test_program_for_hash())
2519    }
2520
2521    #[test]
2522    fn constraints_evaluate_to_zero_on_program_for_merkle_step_right_sibling() -> ConstraintResult {
2523        triton_constraints_evaluate_to_zero(test_program_for_merkle_step_right_sibling())
2524    }
2525
2526    #[test]
2527    fn constraints_evaluate_to_zero_on_program_for_merkle_step_left_sibling() -> ConstraintResult {
2528        triton_constraints_evaluate_to_zero(test_program_for_merkle_step_left_sibling())
2529    }
2530
2531    #[test]
2532    fn constraints_evaluate_to_zero_on_program_for_merkle_step_mem_right_sibling()
2533    -> ConstraintResult {
2534        triton_constraints_evaluate_to_zero(test_program_for_merkle_step_mem_right_sibling())
2535    }
2536
2537    // todo: https://github.com/rust-lang/rustfmt/issues/6521
2538    #[rustfmt::skip]
2539    #[test]
2540    fn constraints_evaluate_to_zero_on_program_for_merkle_step_mem_left_sibling()
2541    -> ConstraintResult {
2542        triton_constraints_evaluate_to_zero(test_program_for_merkle_step_mem_left_sibling())
2543    }
2544
2545    #[proptest(cases = 20)]
2546    fn constraints_evaluate_to_zero_on_property_based_test_program_for_merkle_tree_update(
2547        program: ProgramForMerkleTreeUpdate,
2548    ) {
2549        triton_constraints_evaluate_to_zero(program.assemble())?;
2550    }
2551
2552    #[test]
2553    fn constraints_evaluate_to_zero_on_program_for_assert_vector() -> ConstraintResult {
2554        triton_constraints_evaluate_to_zero(test_program_for_assert_vector())
2555    }
2556
2557    #[test]
2558    fn constraints_evaluate_to_zero_on_program_for_sponge_instructions() -> ConstraintResult {
2559        triton_constraints_evaluate_to_zero(test_program_for_sponge_instructions())
2560    }
2561
2562    #[test]
2563    fn constraints_evaluate_to_zero_on_program_for_sponge_instructions_2() -> ConstraintResult {
2564        triton_constraints_evaluate_to_zero(test_program_for_sponge_instructions_2())
2565    }
2566
2567    #[test]
2568    fn constraints_evaluate_to_zero_on_program_for_many_sponge_instructions() -> ConstraintResult {
2569        triton_constraints_evaluate_to_zero(test_program_for_many_sponge_instructions())
2570    }
2571
2572    #[test]
2573    fn constraints_evaluate_to_zero_on_program_for_add_mul_invert() -> ConstraintResult {
2574        triton_constraints_evaluate_to_zero(test_program_for_add_mul_invert())
2575    }
2576
2577    #[test]
2578    fn constraints_evaluate_to_zero_on_program_for_eq() -> ConstraintResult {
2579        triton_constraints_evaluate_to_zero(test_program_for_eq())
2580    }
2581
2582    #[test]
2583    fn constraints_evaluate_to_zero_on_program_for_lsb() -> ConstraintResult {
2584        triton_constraints_evaluate_to_zero(test_program_for_lsb())
2585    }
2586
2587    #[test]
2588    fn constraints_evaluate_to_zero_on_program_for_split() -> ConstraintResult {
2589        triton_constraints_evaluate_to_zero(test_program_for_split())
2590    }
2591
2592    #[test]
2593    fn constraints_evaluate_to_zero_on_program_0_lt_0() -> ConstraintResult {
2594        triton_constraints_evaluate_to_zero(test_program_0_lt_0())
2595    }
2596
2597    #[test]
2598    fn constraints_evaluate_to_zero_on_program_for_lt() -> ConstraintResult {
2599        triton_constraints_evaluate_to_zero(test_program_for_lt())
2600    }
2601
2602    #[test]
2603    fn constraints_evaluate_to_zero_on_program_for_and() -> ConstraintResult {
2604        triton_constraints_evaluate_to_zero(test_program_for_and())
2605    }
2606
2607    #[test]
2608    fn constraints_evaluate_to_zero_on_program_for_xor() -> ConstraintResult {
2609        triton_constraints_evaluate_to_zero(test_program_for_xor())
2610    }
2611
2612    #[test]
2613    fn constraints_evaluate_to_zero_on_program_for_log2floor() -> ConstraintResult {
2614        triton_constraints_evaluate_to_zero(test_program_for_log2floor())
2615    }
2616
2617    #[test]
2618    fn constraints_evaluate_to_zero_on_program_for_pow() -> ConstraintResult {
2619        triton_constraints_evaluate_to_zero(test_program_for_pow())
2620    }
2621
2622    #[test]
2623    fn constraints_evaluate_to_zero_on_program_for_div_mod() -> ConstraintResult {
2624        triton_constraints_evaluate_to_zero(test_program_for_div_mod())
2625    }
2626
2627    #[test]
2628    fn constraints_evaluate_to_zero_on_program_for_starting_with_pop_count() -> ConstraintResult {
2629        triton_constraints_evaluate_to_zero(test_program_for_starting_with_pop_count())
2630    }
2631
2632    #[test]
2633    fn constraints_evaluate_to_zero_on_program_for_pop_count() -> ConstraintResult {
2634        triton_constraints_evaluate_to_zero(test_program_for_pop_count())
2635    }
2636
2637    #[test]
2638    fn constraints_evaluate_to_zero_on_program_for_xx_add() -> ConstraintResult {
2639        triton_constraints_evaluate_to_zero(test_program_for_xx_add())
2640    }
2641
2642    #[test]
2643    fn constraints_evaluate_to_zero_on_program_for_xx_mul() -> ConstraintResult {
2644        triton_constraints_evaluate_to_zero(test_program_for_xx_mul())
2645    }
2646
2647    #[test]
2648    fn constraints_evaluate_to_zero_on_program_for_x_invert() -> ConstraintResult {
2649        triton_constraints_evaluate_to_zero(test_program_for_x_invert())
2650    }
2651
2652    #[test]
2653    fn constraints_evaluate_to_zero_on_program_for_xb_mul() -> ConstraintResult {
2654        triton_constraints_evaluate_to_zero(test_program_for_xb_mul())
2655    }
2656
2657    #[test]
2658    fn constraints_evaluate_to_zero_on_program_for_read_io_write_io() -> ConstraintResult {
2659        triton_constraints_evaluate_to_zero(test_program_for_read_io_write_io())
2660    }
2661
2662    #[test]
2663    fn constraints_evaluate_to_zero_on_property_based_test_program_for_assert_vector()
2664    -> ConstraintResult {
2665        triton_constraints_evaluate_to_zero(property_based_test_program_for_assert_vector())
2666    }
2667
2668    #[test]
2669    fn constraints_evaluate_to_zero_on_single_sponge_absorb_mem_instructions() -> ConstraintResult {
2670        let program = triton_program!(sponge_init sponge_absorb_mem halt);
2671        let program = TestableProgram::new(program);
2672        triton_constraints_evaluate_to_zero(program)
2673    }
2674
2675    #[proptest(cases = 3)]
2676    fn constraints_evaluate_to_zero_on_property_based_test_program_for_sponge_instructions(
2677        program: ProgramForSpongeAndHashInstructions,
2678    ) {
2679        triton_constraints_evaluate_to_zero(program.assemble())?;
2680    }
2681
2682    #[test]
2683    fn constraints_evaluate_to_zero_on_property_based_test_program_for_split() -> ConstraintResult {
2684        triton_constraints_evaluate_to_zero(property_based_test_program_for_split())
2685    }
2686
2687    #[test]
2688    fn constraints_evaluate_to_zero_on_property_based_test_program_for_eq() -> ConstraintResult {
2689        triton_constraints_evaluate_to_zero(property_based_test_program_for_eq())
2690    }
2691
2692    #[test]
2693    fn constraints_evaluate_to_zero_on_property_based_test_program_for_lsb() -> ConstraintResult {
2694        triton_constraints_evaluate_to_zero(property_based_test_program_for_lsb())
2695    }
2696
2697    #[test]
2698    fn constraints_evaluate_to_zero_on_property_based_test_program_for_lt() -> ConstraintResult {
2699        triton_constraints_evaluate_to_zero(property_based_test_program_for_lt())
2700    }
2701
2702    #[test]
2703    fn constraints_evaluate_to_zero_on_property_based_test_program_for_and() -> ConstraintResult {
2704        triton_constraints_evaluate_to_zero(property_based_test_program_for_and())
2705    }
2706
2707    #[test]
2708    fn constraints_evaluate_to_zero_on_property_based_test_program_for_xor() -> ConstraintResult {
2709        triton_constraints_evaluate_to_zero(property_based_test_program_for_xor())
2710    }
2711
2712    #[test]
2713    fn constraints_evaluate_to_zero_on_property_based_test_program_for_log2floor()
2714    -> ConstraintResult {
2715        triton_constraints_evaluate_to_zero(property_based_test_program_for_log2floor())
2716    }
2717
2718    #[test]
2719    fn constraints_evaluate_to_zero_on_property_based_test_program_for_pow() -> ConstraintResult {
2720        triton_constraints_evaluate_to_zero(property_based_test_program_for_pow())
2721    }
2722
2723    #[test]
2724    fn constraints_evaluate_to_zero_on_property_based_test_program_for_div_mod() -> ConstraintResult
2725    {
2726        triton_constraints_evaluate_to_zero(property_based_test_program_for_div_mod())
2727    }
2728
2729    #[test]
2730    fn constraints_evaluate_to_zero_on_property_based_test_program_for_pop_count()
2731    -> ConstraintResult {
2732        triton_constraints_evaluate_to_zero(property_based_test_program_for_pop_count())
2733    }
2734
2735    #[test]
2736    fn constraints_evaluate_to_zero_on_property_based_test_program_for_is_u32() -> ConstraintResult
2737    {
2738        triton_constraints_evaluate_to_zero(property_based_test_program_for_is_u32())
2739    }
2740
2741    #[test]
2742    fn constraints_evaluate_to_zero_on_property_based_test_program_for_random_ram_access()
2743    -> ConstraintResult {
2744        triton_constraints_evaluate_to_zero(property_based_test_program_for_random_ram_access())
2745    }
2746
2747    #[test]
2748    fn constraints_evaluate_to_zero_on_property_based_test_program_for_xx_dot_step()
2749    -> ConstraintResult {
2750        triton_constraints_evaluate_to_zero(property_based_test_program_for_xx_dot_step())
2751    }
2752
2753    #[test]
2754    fn constraints_evaluate_to_zero_on_property_based_test_program_for_xb_dot_step()
2755    -> ConstraintResult {
2756        triton_constraints_evaluate_to_zero(property_based_test_program_for_xb_dot_step())
2757    }
2758
2759    #[test]
2760    fn can_read_twice_from_same_ram_address_within_one_cycle() -> ConstraintResult {
2761        for i in 0..x_field_element::EXTENSION_DEGREE {
2762            // This program reads from the same address twice, even if the stack
2763            // is not well-initialized.
2764            let program = triton_program! {
2765                dup 0
2766                addi {i}
2767                xx_dot_step
2768                halt
2769            };
2770            let program = TestableProgram::new(program);
2771            debug_assert!(program.clone().run().is_ok());
2772            triton_constraints_evaluate_to_zero(program)?;
2773        }
2774
2775        Ok(())
2776    }
2777
2778    #[test]
2779    fn claim_in_ram_corresponds_to_currently_running_program() -> ConstraintResult {
2780        triton_constraints_evaluate_to_zero(
2781            test_program_claim_in_ram_corresponds_to_currently_running_program(),
2782        )
2783    }
2784
2785    #[test]
2786    fn derived_constraints_evaluate_to_zero_on_halt() {
2787        derived_constraints_evaluate_to_zero(test_program_for_halt());
2788    }
2789
2790    fn derived_constraints_evaluate_to_zero(program: TestableProgram) {
2791        let artifacts = program.generate_proof_artifacts();
2792        let master_main_trace_table = artifacts.master_main_table.trace_table();
2793        let master_aux_trace_table = artifacts.master_aux_table.trace_table();
2794        let challenges = artifacts.challenges;
2795
2796        let evaluated_initial_constraints = MasterAuxTable::evaluate_initial_constraints(
2797            master_main_trace_table.row(0),
2798            master_aux_trace_table.row(0),
2799            &challenges,
2800        );
2801        for (constraint_idx, evaluated_constraint) in
2802            evaluated_initial_constraints.into_iter().enumerate()
2803        {
2804            assert!(
2805                xfe!(0) == evaluated_constraint,
2806                "Initial constraint {constraint_idx} failed.",
2807            );
2808        }
2809
2810        for row_idx in 0..master_main_trace_table.nrows() {
2811            let evaluated_consistency_constraints =
2812                MasterAuxTable::evaluate_consistency_constraints(
2813                    master_main_trace_table.row(row_idx),
2814                    master_aux_trace_table.row(row_idx),
2815                    &challenges,
2816                );
2817            for (constraint_idx, evaluated_constraint) in
2818                evaluated_consistency_constraints.into_iter().enumerate()
2819            {
2820                assert!(
2821                    xfe!(0) == evaluated_constraint,
2822                    "Consistency constraint {constraint_idx} failed in row {row_idx}.",
2823                );
2824            }
2825        }
2826
2827        for curr_row_idx in 0..master_main_trace_table.nrows() - 1 {
2828            let next_row_idx = curr_row_idx + 1;
2829            let evaluated_transition_constraints = MasterAuxTable::evaluate_transition_constraints(
2830                master_main_trace_table.row(curr_row_idx),
2831                master_aux_trace_table.row(curr_row_idx),
2832                master_main_trace_table.row(next_row_idx),
2833                master_aux_trace_table.row(next_row_idx),
2834                &challenges,
2835            );
2836            for (constraint_idx, evaluated_constraint) in
2837                evaluated_transition_constraints.into_iter().enumerate()
2838            {
2839                assert!(
2840                    xfe!(0) == evaluated_constraint,
2841                    "Transition constraint {constraint_idx} failed in row {curr_row_idx}.",
2842                );
2843            }
2844        }
2845
2846        let evaluated_terminal_constraints = MasterAuxTable::evaluate_terminal_constraints(
2847            master_main_trace_table.row(master_main_trace_table.nrows() - 1),
2848            master_aux_trace_table.row(master_aux_trace_table.nrows() - 1),
2849            &challenges,
2850        );
2851        for (constraint_idx, evaluated_constraint) in
2852            evaluated_terminal_constraints.into_iter().enumerate()
2853        {
2854            assert!(
2855                xfe!(0) == evaluated_constraint,
2856                "Terminal constraint {constraint_idx} failed.",
2857            );
2858        }
2859    }
2860
2861    #[test]
2862    fn prove_and_verify_simple_program() {
2863        test_program_hash_nop_nop_lt().prove_and_verify();
2864    }
2865
2866    #[proptest(cases = 10)]
2867    fn prove_and_verify_halt_with_different_stark_parameters(#[strategy(arb())] stark: Stark) {
2868        test_program_for_halt().use_stark(stark).prove_and_verify();
2869    }
2870
2871    #[test]
2872    fn prove_and_verify_fibonacci_100() {
2873        TestableProgram::new(crate::example_programs::FIBONACCI_SEQUENCE.clone())
2874            .with_input(PublicInput::from(bfe_array![100]))
2875            .prove_and_verify();
2876    }
2877
2878    #[test]
2879    fn prove_verify_program_using_pick_and_place() {
2880        let input = bfe_vec![6, 3, 7, 5, 1, 2, 4, 4, 7, 3, 6, 1, 5, 2];
2881        let program = triton_program! {       // i: 13 12 11 10  9  8  7  6  5  4  3  2  1  0
2882            read_io 5 read_io 5 read_io 4     //  _  6  3  7  5 ›1‹ 2  4  4  7  3  6 ›1‹ 5  2
2883            pick 2 pick 9 place 13 place 13   //  _  1  1  6  3  7  5 ›2‹ 4  4  7  3  6  5 ›2‹
2884            pick 0 pick 7 place 13 place 13   //  _  2  2  1  1  6 ›3‹ 7  5  4  4  7 ›3‹ 6  5
2885            pick 2 pick 8 place 13 place 13   //  _  3  3  2  2  1  1  6  7  5 ›4‹›4‹ 7  6  5
2886            pick 3 pick 4 place 13 place 13   //  _  4  4  3  3  2  2  1  1  6  7 ›5‹ 7  6 ›5‹
2887            pick 0 pick 3 place 13 place 13   //  _  5  5  4  4  3  3  2  2  1  1 ›6‹ 7  7 ›6‹
2888            pick 0 pick 3 place 13 place 13   //  _  6  6  5  5  4  4  3  3  2  2  1  1 ›7‹›7‹
2889            pick 1 pick 1 place 13 place 13   //  _  7  7  6  6  5  5  4  4  3  3  2  2  1  1
2890            write_io 5 write_io 5 write_io 4  //  _
2891            halt
2892        };
2893
2894        let program = TestableProgram::new(program).with_input(input);
2895        let output = program.clone().run().unwrap();
2896        let expected_output = bfe_vec![1, 1, 2, 2, 3, 3, 4, 4, 5, 5, 6, 6, 7, 7];
2897        assert!(expected_output == output);
2898
2899        program.prove_and_verify();
2900    }
2901
2902    #[test]
2903    fn constraints_evaluate_to_zero_on_many_u32_operations() -> ConstraintResult {
2904        let many_u32_instructions = TestableProgram::new(
2905            crate::example_programs::PROGRAM_WITH_MANY_U32_INSTRUCTIONS.clone(),
2906        );
2907        triton_constraints_evaluate_to_zero(many_u32_instructions)
2908    }
2909
2910    #[test]
2911    fn prove_verify_many_u32_operations() {
2912        TestableProgram::new(crate::example_programs::PROGRAM_WITH_MANY_U32_INSTRUCTIONS.clone())
2913            .prove_and_verify();
2914    }
2915
2916    #[proptest]
2917    fn verifying_arbitrary_proof_does_not_panic(
2918        #[strategy(arb())] stark: Stark,
2919        #[strategy(arb())] claim: Claim,
2920        #[strategy(arb())] proof: Proof,
2921    ) {
2922        let _verdict = stark.verify(&claim, &proof);
2923    }
2924
2925    #[proptest]
2926    fn negative_log_2_floor(
2927        #[strategy(arb())]
2928        #[filter(#st0.value() > u64::from(u32::MAX))]
2929        st0: BFieldElement,
2930    ) {
2931        let program = triton_program!(push {st0} log_2_floor halt);
2932        let_assert!(Err(err) = VM::run(program, [].into(), [].into()));
2933        let_assert!(InstructionError::OpStackError(err) = err.source);
2934        let_assert!(OpStackError::FailedU32Conversion(element) = err);
2935        assert!(st0 == element);
2936    }
2937
2938    #[test]
2939    fn negative_log_2_floor_of_0() {
2940        let program = triton_program!(push 0 log_2_floor halt);
2941        let_assert!(Err(err) = VM::run(program, [].into(), [].into()));
2942        let_assert!(InstructionError::LogarithmOfZero = err.source);
2943    }
2944
2945    #[test]
2946    fn deep_update() {
2947        let domain_length = 1 << 10;
2948        let domain = ArithmeticDomain::of_length(domain_length).unwrap();
2949
2950        let poly_degree = rand::rng().random_range(2..20);
2951        let low_deg_poly_coeffs: Vec<XFieldElement> = random_elements(poly_degree);
2952        let low_deg_poly = Polynomial::new(low_deg_poly_coeffs.clone());
2953        let low_deg_codeword = domain.evaluate(&low_deg_poly);
2954
2955        let out_of_domain_point: XFieldElement = rand::rng().random();
2956        let out_of_domain_value = low_deg_poly.evaluate(out_of_domain_point);
2957
2958        let deep_poly = Prover::deep_codeword(
2959            &low_deg_codeword,
2960            domain,
2961            out_of_domain_point,
2962            out_of_domain_value,
2963        );
2964        let poly_of_maybe_low_degree = domain.interpolate(&deep_poly);
2965        assert!(poly_degree as isize - 2 == poly_of_maybe_low_degree.degree());
2966
2967        let bogus_out_of_domain_value = rand::rng().random();
2968        let bogus_deep_poly = Prover::deep_codeword(
2969            &low_deg_codeword,
2970            domain,
2971            out_of_domain_point,
2972            bogus_out_of_domain_value,
2973        );
2974        let poly_of_hopefully_high_degree = domain.interpolate(&bogus_deep_poly);
2975        assert!(domain_length as isize - 1 == poly_of_hopefully_high_degree.degree());
2976    }
2977
2978    /// Re-compose the segments of a polynomial and assert that the result is
2979    /// equal to the polynomial itself. Uses the Schwartz-Zippel lemma to
2980    /// test polynomial equality.
2981    fn assert_polynomial_equals_recomposed_segments<const N: usize, FF: FiniteField>(
2982        f: &Polynomial<FF>,
2983        segments: &[Polynomial<FF>; N],
2984        x: FF,
2985    ) {
2986        let x_pow_n = x.mod_pow_u32(N as u32);
2987        let evaluate_segment = |(segment_idx, segment): (_, &Polynomial<_>)| {
2988            segment.evaluate::<_, FF>(x_pow_n) * x.mod_pow_u32(segment_idx as u32)
2989        };
2990        let evaluated_segments = segments.iter().enumerate().map(evaluate_segment);
2991        let sum_of_evaluated_segments = evaluated_segments.fold(FF::zero(), |acc, x| acc + x);
2992        assert!(f.evaluate::<_, FF>(x) == sum_of_evaluated_segments);
2993    }
2994
2995    fn assert_segments_degrees_are_small_enough<const N: usize, FF: FiniteField>(
2996        f: &Polynomial<FF>,
2997        segments: &[Polynomial<FF>; N],
2998    ) {
2999        let max_allowed_degree = f.degree() / (N as isize);
3000        let all_degrees_are_small_enough =
3001            segments.iter().all(|s| s.degree() <= max_allowed_degree);
3002        assert!(all_degrees_are_small_enough);
3003    }
3004
3005    #[test]
3006    fn split_polynomial_into_segments_of_unequal_size() {
3007        let coefficients: [XFieldElement; 211] = rand::rng().random();
3008        let f = Polynomial::new(coefficients.to_vec());
3009
3010        let segments_2 = Prover::split_polynomial_into_segments::<2, _>(f.clone());
3011        let segments_3 = Prover::split_polynomial_into_segments::<3, _>(f.clone());
3012        let segments_4 = Prover::split_polynomial_into_segments::<4, _>(f.clone());
3013        let segments_7 = Prover::split_polynomial_into_segments::<7, _>(f.clone());
3014
3015        assert_segments_degrees_are_small_enough(&f, &segments_2);
3016        assert_segments_degrees_are_small_enough(&f, &segments_3);
3017        assert_segments_degrees_are_small_enough(&f, &segments_4);
3018        assert_segments_degrees_are_small_enough(&f, &segments_7);
3019
3020        let x = rand::rng().random();
3021        assert_polynomial_equals_recomposed_segments(&f, &segments_2, x);
3022        assert_polynomial_equals_recomposed_segments(&f, &segments_3, x);
3023        assert_polynomial_equals_recomposed_segments(&f, &segments_4, x);
3024        assert_polynomial_equals_recomposed_segments(&f, &segments_7, x);
3025    }
3026
3027    #[test]
3028    fn split_polynomial_into_segments_of_equal_size() {
3029        let coefficients: [BFieldElement; 2 * 3 * 4 * 7] = rand::rng().random();
3030        let f = Polynomial::new(coefficients.to_vec());
3031
3032        let segments_2 = Prover::split_polynomial_into_segments::<2, _>(f.clone());
3033        let segments_3 = Prover::split_polynomial_into_segments::<3, _>(f.clone());
3034        let segments_4 = Prover::split_polynomial_into_segments::<4, _>(f.clone());
3035        let segments_7 = Prover::split_polynomial_into_segments::<7, _>(f.clone());
3036
3037        assert_segments_degrees_are_small_enough(&f, &segments_2);
3038        assert_segments_degrees_are_small_enough(&f, &segments_3);
3039        assert_segments_degrees_are_small_enough(&f, &segments_4);
3040        assert_segments_degrees_are_small_enough(&f, &segments_7);
3041
3042        let x = rand::rng().random();
3043        assert_polynomial_equals_recomposed_segments(&f, &segments_2, x);
3044        assert_polynomial_equals_recomposed_segments(&f, &segments_3, x);
3045        assert_polynomial_equals_recomposed_segments(&f, &segments_4, x);
3046        assert_polynomial_equals_recomposed_segments(&f, &segments_7, x);
3047    }
3048
3049    #[derive(Debug, Clone, Eq, PartialEq, Hash, test_strategy::Arbitrary)]
3050    struct SegmentifyProptestData {
3051        #[strategy(2_usize..8)]
3052        log_trace_length: usize,
3053
3054        #[strategy(1_usize..=#log_trace_length.min(4))]
3055        log_num_cosets: usize,
3056
3057        #[strategy(1_usize..6)]
3058        log_expansion_factor: usize,
3059
3060        #[strategy(vec(arb(), (1 << #log_num_cosets) * (1 << #log_trace_length)))]
3061        #[map(Polynomial::new)]
3062        polynomial: Polynomial<'static, XFieldElement>,
3063
3064        #[strategy(arb())]
3065        psi: BFieldElement,
3066
3067        #[strategy(arb())]
3068        random_point: XFieldElement,
3069    }
3070
3071    #[proptest]
3072    fn polynomial_segments_cohere_with_originating_polynomial(test_data: SegmentifyProptestData) {
3073        fn segmentify_and_assert_coherence<const N: usize>(
3074            test_data: &SegmentifyProptestData,
3075        ) -> TestCaseResult {
3076            let num_cosets = 1 << test_data.log_num_cosets;
3077            let trace_length = 1 << test_data.log_trace_length;
3078            let expansion_factor = 1 << test_data.log_expansion_factor;
3079
3080            let iota =
3081                BFieldElement::primitive_root_of_unity((trace_length * num_cosets) as u64).unwrap();
3082            let trace_domain = ArithmeticDomain::of_length(trace_length)?;
3083            let fri_domain = ArithmeticDomain::of_length(trace_length * expansion_factor)?
3084                .with_offset(test_data.psi);
3085
3086            let coset_evaluations = (0..u32::try_from(num_cosets)?)
3087                .flat_map(|i| {
3088                    let coset = trace_domain.with_offset(iota.mod_pow_u32(i) * test_data.psi);
3089                    coset.evaluate(&test_data.polynomial)
3090                })
3091                .collect();
3092            let coset_evaluations =
3093                Array2::from_shape_vec((trace_length, num_cosets).f(), coset_evaluations)?;
3094
3095            let (actual_segment_codewords, segment_polynomials) =
3096                Prover::segmentify::<N>(coset_evaluations, test_data.psi, iota, fri_domain);
3097
3098            prop_assert_eq!(N, actual_segment_codewords.ncols());
3099            prop_assert_eq!(N, segment_polynomials.len());
3100
3101            let segments_evaluated = (0..)
3102                .zip(&segment_polynomials)
3103                .map(|(segment_index, segment_polynomial)| -> XFieldElement {
3104                    let point_to_the_seg_idx = test_data.random_point.mod_pow_u32(segment_index);
3105                    let point_to_the_num_seg = test_data.random_point.mod_pow_u32(N as u32);
3106                    let segment_in_point_to_the_num_seg =
3107                        segment_polynomial.evaluate_in_same_field(point_to_the_num_seg);
3108                    point_to_the_seg_idx * segment_in_point_to_the_num_seg
3109                })
3110                .sum::<XFieldElement>();
3111            let evaluation_in_random_point = test_data
3112                .polynomial
3113                .evaluate_in_same_field(test_data.random_point);
3114            prop_assert_eq!(segments_evaluated, evaluation_in_random_point);
3115
3116            let segments_codewords = segment_polynomials
3117                .iter()
3118                .flat_map(|polynomial| Array1::from(fri_domain.evaluate(polynomial)))
3119                .collect_vec();
3120            let segments_codewords =
3121                Array2::from_shape_vec((fri_domain.length, N).f(), segments_codewords)?;
3122            prop_assert_eq!(segments_codewords, actual_segment_codewords);
3123
3124            Ok(())
3125        }
3126
3127        let num_cosets = 1 << test_data.log_num_cosets;
3128        if num_cosets >= 1 {
3129            segmentify_and_assert_coherence::<1>(&test_data)?;
3130        }
3131        if num_cosets >= 2 {
3132            segmentify_and_assert_coherence::<2>(&test_data)?;
3133        }
3134        if num_cosets >= 4 {
3135            segmentify_and_assert_coherence::<4>(&test_data)?;
3136        }
3137        if num_cosets >= 8 {
3138            segmentify_and_assert_coherence::<8>(&test_data)?;
3139        }
3140        if num_cosets >= 16 {
3141            segmentify_and_assert_coherence::<16>(&test_data)?;
3142        }
3143    }
3144
3145    #[proptest]
3146    fn linear_combination_weights_samples_correct_number_of_elements(
3147        #[strategy(arb())] mut proof_stream: ProofStream,
3148    ) {
3149        let weights = LinearCombinationWeights::sample(&mut proof_stream);
3150
3151        prop_assert_eq!(MasterMainTable::NUM_COLUMNS, weights.main.len());
3152        prop_assert_eq!(MasterAuxTable::NUM_COLUMNS, weights.aux.len());
3153        prop_assert_eq!(NUM_QUOTIENT_SEGMENTS, weights.quot_segments.len());
3154        prop_assert_eq!(NUM_DEEP_CODEWORD_COMPONENTS, weights.deep.len());
3155        prop_assert_eq!(
3156            MasterMainTable::NUM_COLUMNS + MasterAuxTable::NUM_COLUMNS,
3157            weights.main_and_aux().len()
3158        );
3159    }
3160
3161    /// A program that executes every instruction in the instruction set.
3162    pub fn program_executing_every_instruction() -> TestableProgram {
3163        let m_step_mem_addr = 100_000;
3164
3165        let program = triton_program! {
3166            // merkle_step using the following fake tree:
3167            //     ─── 1 ───
3168            //    ╱         ╲
3169            //   2           3
3170            //  ╱  ╲
3171            // 4    5
3172            push {m_step_mem_addr}  // _ addr (address for `merkle_step_mem`)
3173            push 0                  // _ addr 0 (spacer)
3174            push 5                  // _ addr 0 5 (node index for `merkle_step`s)
3175            read_io 5               // _ addr 0 5 [digest; 5]
3176            merkle_step             // _ addr 0 2 [digest; 5]
3177            merkle_step_mem         // _ addr 0 1 [digest; 5]
3178            divine 5                // _ addr 0 1 [digest; 5] [digest; 5]
3179            assert_vector           // _ addr 0 1 [digest; 5]
3180            pop 5                   // _ addr 0 1
3181            assert                  // _ addr 0
3182            pop 2                   // _
3183
3184            // stack manipulation
3185            push 1 push 2 push 3    // _  1  2  3
3186            place 2                 // _  3  1  2
3187            pick 1                  // _  3  2  1
3188            swap 2                  // _  1  2  3
3189            dup 2 assert            // _  1  2  3
3190            addi -2 assert          // _  1  2
3191            addi -1 assert          // _  1
3192            assert                  // _
3193
3194            // dot_step
3195            push 0 push 0 push 0    // _ [accumulator; 3]
3196            push 500                // _ [accumulator; 3] addr_0
3197            push 800                // _ [accumulator; 3] addr_0 addr_1
3198            xb_dot_step             // _ [accumulator; 3] addr_0 addr_1
3199            xx_dot_step             // _ [accumulator; 3] addr_0 addr_1
3200            write_io 5              // _
3201
3202            // extension field arithmetic
3203            push 1 push 2 push 3    // _ [xfe_0; 3]
3204            push 7 push 8 push 9    // _ [xfe_0; 3] [xfe_1; 3]
3205            dup 3 dup 3 dup 3       // _ [xfe_0; 3] [xfe_1; 3] [xfe_2; 3]
3206            xx_add                  // _ [xfe_0; 3] [xfe_1; 3]
3207            dup 4 dup 4 dup 4       // _ [xfe_0; 3] [xfe_1; 3] [xfe_2; 3]
3208            xx_mul                  // _ [xfe_0; 3] [xfe_1; 3]
3209            x_invert                // _ [xfe_0; 3] [xfe_1; 3]
3210            push 42                 // _ [xfe_0; 3] [xfe_1; 3] 42
3211            xb_mul                  // _ [xfe_0; 3] [xfe_1; 3]
3212
3213            // base field arithmetic
3214            add mul                 // _ bfe_0 bfe_1 bfe_2 bfe_3
3215            addi 0                  // _ bfe_0 bfe_1 bfe_2 bfe_3
3216            invert                  // _ bfe_0 bfe_1 bfe_2 bfe_3
3217            mul add                 // _ bfe_0 bfe_1
3218            eq                      // _ bfe_0
3219            pop 1                   // _
3220
3221            // bit-wise arithmetic
3222            push 38                 // _ 38
3223            push 2                  // _ 38 2
3224            pow                     // _ big_num
3225            push 1337               // _ big_num 1337
3226            add                     // _ big_num
3227            split                   // _ u32_0 u32_1
3228            dup 1 dup 1 lt pop 1    // _ u32_0 u32_1
3229            dup 1 and               // _ u32_0 u32_1
3230            dup 1 xor               // _ u32_0 u32_1
3231            push 9                  // _ u32_0 u32_1 9
3232            log_2_floor pop 1       // _ u32_0 u32_1
3233            div_mod                 // _ u32_0 u32_1
3234            pop_count               // _ u32_0 u32_1
3235            pop 2                   // _
3236
3237            // Sponge
3238            sponge_init             // _
3239            divine 5 divine 5       // _ [stuff; 10]
3240            sponge_absorb           // _
3241            push 42                 // _ 42
3242            sponge_absorb_mem       // _ 52
3243            pop 1                   // _
3244            sponge_squeeze          // _ [stuff; 10]
3245            hash                    // _ [stuff; 5]
3246            pop 5                   // _
3247
3248            // RAM
3249            push 300                // _ address
3250            read_mem 5              // _ [stuff; 5] address
3251            swap 6                  // _ [stuff; 5] address
3252            write_mem 5             // _ address
3253            pop 1                   // _
3254
3255            // control flow
3256            push 0 skiz nop         // _
3257            push 1 skiz nop         // _
3258            push 0 push 2           // _ 0 2
3259            push 0 push 0 push 0    // _ 0 2 [0; 3]
3260            push 0 push 0           // _ 0 2 [0; 5]
3261            call rec_or_ret         // _ 0 0 [0; 5]
3262            pop 5 pop 2             // _
3263            push 2                  // _ 2
3264            call rec                // _ 0
3265            pop 1
3266            halt
3267
3268            // BEFORE: _ n
3269            // AFTER:  _ 0
3270            rec:
3271                dup 0 push 0 eq     // _ n n==0
3272                skiz return         // _ n
3273                push -1 add         // _ n-1
3274                recurse             // _ n-1
3275
3276            // BEFORE: _ m n [_; 5]
3277            // AFTER:  _ m m [_; 5]
3278            rec_or_ret:
3279                swap 5              // _ m [_; 5] n
3280                push -1 add         // _ m [_; 5] n-1
3281                swap 5              // _ m n-1 [_; 5]
3282                recurse_or_return   // _ m n-1 [_; 5]
3283        };
3284
3285        let tree_node_5 = Digest::new(bfe_array![5; 5]);
3286        let tree_node_4 = Digest::new(bfe_array![4; 5]);
3287        let tree_node_3 = Digest::new(bfe_array![3; 5]);
3288        let tree_node_2 = Tip5::hash_pair(tree_node_4, tree_node_5);
3289        let tree_node_1 = Tip5::hash_pair(tree_node_2, tree_node_3);
3290
3291        let public_input = tree_node_5.values().to_vec();
3292
3293        let secret_input = [tree_node_1.reversed().values().to_vec(), bfe_vec![1337; 10]].concat();
3294        let mut ram = (0..)
3295            .zip(42..)
3296            .take(1_000)
3297            .map(|(l, r)| (bfe!(l), bfe!(r)))
3298            .collect::<HashMap<_, _>>();
3299        for (address, digest_element) in (m_step_mem_addr..).zip(tree_node_3.values()) {
3300            ram.insert(bfe!(address), digest_element);
3301        }
3302        let non_determinism = NonDeterminism::new(secret_input)
3303            .with_digests([tree_node_4])
3304            .with_ram(ram);
3305
3306        TestableProgram::new(program)
3307            .with_input(public_input)
3308            .with_non_determinism(non_determinism)
3309    }
3310
3311    #[test]
3312    fn program_executing_every_instruction_actually_executes_every_instruction() {
3313        let TestableProgram {
3314            program,
3315            public_input,
3316            non_determinism,
3317            ..
3318        } = program_executing_every_instruction();
3319        let (aet, _) = VM::trace_execution(program, public_input, non_determinism).unwrap();
3320        let opcodes_of_all_executed_instructions = aet
3321            .processor_trace
3322            .column(ProcessorMainColumn::CI.main_index())
3323            .iter()
3324            .copied()
3325            .collect::<HashSet<_>>();
3326
3327        let all_opcodes = Instruction::iter()
3328            .map(|instruction| instruction.opcode_b())
3329            .collect::<HashSet<_>>();
3330
3331        all_opcodes
3332            .difference(&opcodes_of_all_executed_instructions)
3333            .for_each(|&opcode| {
3334                let instruction = Instruction::try_from(opcode).unwrap();
3335                eprintln!("Instruction {instruction} was not executed.");
3336            });
3337        assert_eq!(all_opcodes, opcodes_of_all_executed_instructions);
3338    }
3339
3340    #[test]
3341    fn constraints_evaluate_to_zero_on_program_executing_every_instruction() -> ConstraintResult {
3342        triton_constraints_evaluate_to_zero(program_executing_every_instruction())
3343    }
3344}