../../.cargo/katex-header.html

plonky2/fri/
recursive_verifier.rs

1#[cfg(not(feature = "std"))]
2use alloc::{format, vec::Vec};
3use core::ops::RangeInclusive;
4
5use itertools::Itertools;
6
7use crate::field::extension::Extendable;
8use crate::fri::proof::{
9    FriChallengesTarget, FriInitialTreeProofTarget, FriProofTarget, FriQueryRoundTarget,
10    FriQueryStepTarget,
11};
12use crate::fri::structure::{FriBatchInfoTarget, FriInstanceInfoTarget, FriOpeningsTarget};
13use crate::fri::{FriConfig, FriParams};
14use crate::gates::coset_interpolation::CosetInterpolationGate;
15use crate::gates::gate::Gate;
16use crate::gates::random_access::RandomAccessGate;
17use crate::hash::hash_types::{MerkleCapTarget, RichField};
18use crate::iop::ext_target::{flatten_target, ExtensionTarget};
19use crate::iop::target::{BoolTarget, Target};
20use crate::plonk::circuit_builder::CircuitBuilder;
21use crate::plonk::config::{AlgebraicHasher, GenericConfig};
22use crate::util::reducing::ReducingFactorTarget;
23use crate::util::{log2_strict, reverse_index_bits_in_place};
24use crate::with_context;
25
26impl<F: RichField + Extendable<D>, const D: usize> CircuitBuilder<F, D> {
27    /// Computes P'(x^arity) from {P(x*g^i)}_(i=0..arity), where g is a `arity`-th root of unity
28    /// and P' is the FRI reduced polynomial.
29    pub(crate) fn compute_evaluation(
30        &mut self,
31        x: Target,
32        x_index_within_coset_bits: &[BoolTarget],
33        arity_bits: usize,
34        evals: &[ExtensionTarget<D>],
35        beta: ExtensionTarget<D>,
36    ) -> ExtensionTarget<D> {
37        let arity = 1 << arity_bits;
38        debug_assert_eq!(evals.len(), arity);
39
40        let g = F::primitive_root_of_unity(arity_bits);
41        let g_inv = g.exp_u64((arity as u64) - 1);
42
43        // The evaluation vector needs to be reordered first.
44        let mut evals = evals.to_vec();
45        reverse_index_bits_in_place(&mut evals);
46        // Want `g^(arity - rev_x_index_within_coset)` as in the out-of-circuit version. Compute it
47        // as `(g^-1)^rev_x_index_within_coset`.
48        let start = self.exp_from_bits_const_base(g_inv, x_index_within_coset_bits.iter().rev());
49        let coset_start = self.mul(start, x);
50
51        // The answer is gotten by interpolating {(x*g^i, P(x*g^i))} and evaluating at beta.
52        let interpolation_gate = <CosetInterpolationGate<F, D>>::with_max_degree(
53            arity_bits,
54            self.config.max_quotient_degree_factor,
55        );
56        self.interpolate_coset(interpolation_gate, coset_start, &evals, beta)
57    }
58
59    /// Make sure we have enough wires and routed wires to do the FRI checks efficiently. This check
60    /// isn't required -- without it we'd get errors elsewhere in the stack -- but just gives more
61    /// helpful errors.
62    pub(crate) fn check_recursion_config(&self, max_fri_arity_bits: usize) {
63        let random_access = RandomAccessGate::<F, D>::new_from_config(
64            &self.config,
65            max_fri_arity_bits.max(self.config.fri_config.cap_height),
66        );
67        let interpolation_gate = CosetInterpolationGate::<F, D>::with_max_degree(
68            max_fri_arity_bits,
69            self.config.max_quotient_degree_factor,
70        );
71
72        let interpolation_wires = interpolation_gate.num_wires();
73        let interpolation_routed_wires = interpolation_gate.num_routed_wires();
74
75        let min_wires = random_access.num_wires().max(interpolation_wires);
76        let min_routed_wires = random_access
77            .num_routed_wires()
78            .max(interpolation_routed_wires);
79
80        assert!(
81            self.config.num_wires >= min_wires,
82            "To efficiently perform FRI checks with an arity of 2^{}, at least {} wires are needed. Consider reducing arity.",
83            max_fri_arity_bits,
84            min_wires
85        );
86
87        assert!(
88            self.config.num_routed_wires >= min_routed_wires,
89            "To efficiently perform FRI checks with an arity of 2^{}, at least {} routed wires are needed. Consider reducing arity.",
90            max_fri_arity_bits,
91            min_routed_wires
92        );
93    }
94
95    pub(crate) fn fri_verify_proof_of_work(
96        &mut self,
97        fri_pow_response: Target,
98        config: &FriConfig,
99    ) {
100        self.assert_leading_zeros(
101            fri_pow_response,
102            config.proof_of_work_bits + (64 - F::order().bits()) as u32,
103        );
104    }
105
106    pub fn verify_fri_proof<C: GenericConfig<D, F = F>>(
107        &mut self,
108        instance: &FriInstanceInfoTarget<D>,
109        openings: &FriOpeningsTarget<D>,
110        challenges: &FriChallengesTarget<D>,
111        initial_merkle_caps: &[MerkleCapTarget],
112        proof: &FriProofTarget<D>,
113        params: &FriParams,
114    ) where
115        C::Hasher: AlgebraicHasher<F>,
116    {
117        if let Some(max_arity_bits) = params.max_arity_bits() {
118            self.check_recursion_config(max_arity_bits);
119        }
120
121        debug_assert_eq!(
122            params.final_poly_len(),
123            proof.final_poly.len(),
124            "Final polynomial has wrong degree."
125        );
126
127        // Size of the LDE domain.
128        let n = params.lde_size();
129
130        with_context!(
131            self,
132            "check PoW",
133            self.fri_verify_proof_of_work(challenges.fri_pow_response, &params.config)
134        );
135
136        // Check that parameters are coherent.
137        debug_assert_eq!(
138            params.config.num_query_rounds,
139            proof.query_round_proofs.len(),
140            "Number of query rounds does not match config."
141        );
142
143        let precomputed_reduced_evals = with_context!(
144            self,
145            "precompute reduced evaluations",
146            PrecomputedReducedOpeningsTarget::from_os_and_alpha(
147                openings,
148                challenges.fri_alpha,
149                self
150            )
151        );
152
153        for (i, round_proof) in proof.query_round_proofs.iter().enumerate() {
154            // To minimize noise in our logs, we will only record a context for a single FRI query.
155            // The very first query will have some extra gates due to constants being registered, so
156            // the second query is a better representative.
157            let level = if i == 1 {
158                log::Level::Debug
159            } else {
160                log::Level::Trace
161            };
162
163            let num_queries = proof.query_round_proofs.len();
164            with_context!(
165                self,
166                level,
167                &format!("verify one (of {num_queries}) query rounds"),
168                self.fri_verifier_query_round::<C>(
169                    instance,
170                    challenges,
171                    &precomputed_reduced_evals,
172                    initial_merkle_caps,
173                    proof,
174                    challenges.fri_query_indices[i],
175                    n,
176                    round_proof,
177                    params,
178                )
179            );
180        }
181    }
182
183    /// Verifies the current FRI proof with `current_degree_bits`, which may differ from the
184    /// circuit's `degree_bits` in `params`.
185    /// The circuit uses random access gates to select and connect the current hash/evaluation
186    /// values with those in the proof. It is designed with the maximum number of query/folding
187    /// steps and final polynomial length at `degree_bits`, "skipping" steps when the actual proof
188    /// has fewer.
189    pub fn verify_fri_proof_with_multiple_degree_bits<C: GenericConfig<D, F = F>>(
190        &mut self,
191        instance: &FriInstanceInfoTarget<D>,
192        openings: &FriOpeningsTarget<D>,
193        challenges: &FriChallengesTarget<D>,
194        initial_merkle_caps: &[MerkleCapTarget],
195        proof: &FriProofTarget<D>,
196        params: &FriParams,
197        current_degree_bits: Target,
198        degree_sub_one_bits_vec: &[BoolTarget],
199        min_degree_bits_to_support: usize,
200    ) where
201        C::Hasher: AlgebraicHasher<F>,
202    {
203        if let Some(max_arity_bits) = params.max_arity_bits() {
204            self.check_recursion_config(max_arity_bits);
205        }
206
207        debug_assert_eq!(
208            params.final_poly_len(),
209            proof.final_poly.len(),
210            "Final polynomial has wrong degree."
211        );
212
213        // Size of the LDE domain.
214        let log_n = params.config.rate_bits + params.degree_bits;
215        let mut current_log_n = self.constant(F::from_canonical_usize(params.config.rate_bits));
216        current_log_n = self.add(current_log_n, current_degree_bits);
217        let min_log_n_to_support = params.config.rate_bits + min_degree_bits_to_support;
218
219        with_context!(
220            self,
221            "check PoW",
222            self.fri_verify_proof_of_work(challenges.fri_pow_response, &params.config)
223        );
224
225        // Check that parameters are coherent.
226        debug_assert_eq!(
227            params.config.num_query_rounds,
228            proof.query_round_proofs.len(),
229            "Number of query rounds does not match config."
230        );
231
232        let precomputed_reduced_evals = with_context!(
233            self,
234            "precompute reduced evaluations",
235            PrecomputedReducedOpeningsTarget::from_os_and_alpha(
236                openings,
237                challenges.fri_alpha,
238                self
239            )
240        );
241
242        for (i, round_proof) in proof.query_round_proofs.iter().enumerate() {
243            // To minimize noise in our logs, we will only record a context for a single FRI query.
244            // The very first query will have some extra gates due to constants being registered, so
245            // the second query is a better representative.
246            let level = if i == 1 {
247                log::Level::Debug
248            } else {
249                log::Level::Trace
250            };
251
252            let num_queries = proof.query_round_proofs.len();
253            with_context!(
254                self,
255                level,
256                &format!("verify one (of {num_queries}) query rounds"),
257                self.fri_verifier_query_round_with_multiple_degree_bits::<C>(
258                    instance,
259                    challenges,
260                    &precomputed_reduced_evals,
261                    initial_merkle_caps,
262                    proof,
263                    challenges.fri_query_indices[i],
264                    min_log_n_to_support..=log_n,
265                    current_log_n,
266                    degree_sub_one_bits_vec,
267                    round_proof,
268                    params,
269                )
270            );
271        }
272    }
273
274    fn fri_verify_initial_proof<H: AlgebraicHasher<F>>(
275        &mut self,
276        x_index_bits: &[BoolTarget],
277        proof: &FriInitialTreeProofTarget,
278        initial_merkle_caps: &[MerkleCapTarget],
279        cap_index: Target,
280    ) {
281        for (i, ((evals, merkle_proof), cap)) in proof
282            .evals_proofs
283            .iter()
284            .zip(initial_merkle_caps)
285            .enumerate()
286        {
287            with_context!(
288                self,
289                &format!("verify {i}'th initial Merkle proof"),
290                self.verify_merkle_proof_to_cap_with_cap_index::<H>(
291                    evals.clone(),
292                    x_index_bits,
293                    cap_index,
294                    cap,
295                    merkle_proof
296                )
297            );
298        }
299    }
300
301    fn fri_verify_initial_proof_with_multiple_degree_bits<H: AlgebraicHasher<F>>(
302        &mut self,
303        x_index_bits: &[BoolTarget],
304        log_n_range: RangeInclusive<usize>,
305        n_index: Target,
306        proof: &FriInitialTreeProofTarget,
307        initial_merkle_caps: &[MerkleCapTarget],
308        cap_index: Target,
309    ) {
310        let one = self.one();
311        for (i, ((evals, merkle_proof), cap)) in proof
312            .evals_proofs
313            .iter()
314            .zip(initial_merkle_caps)
315            .enumerate()
316        {
317            with_context!(
318                self,
319                &format!("verify {i}'th initial Merkle proof"),
320                self.verify_merkle_proof_to_cap_with_cap_indices::<H>(
321                    one,
322                    evals.clone(),
323                    x_index_bits,
324                    log_n_range.clone(),
325                    n_index,
326                    cap_index,
327                    cap,
328                    merkle_proof
329                )
330            );
331        }
332    }
333
334    fn fri_combine_initial(
335        &mut self,
336        instance: &FriInstanceInfoTarget<D>,
337        proof: &FriInitialTreeProofTarget,
338        alpha: ExtensionTarget<D>,
339        subgroup_x: Target,
340        precomputed_reduced_evals: &PrecomputedReducedOpeningsTarget<D>,
341        params: &FriParams,
342    ) -> ExtensionTarget<D> {
343        assert!(D > 1, "Not implemented for D=1.");
344        let degree_log = params.degree_bits;
345        debug_assert_eq!(
346            degree_log,
347            params.config.cap_height + proof.evals_proofs[0].1.siblings.len()
348                - params.config.rate_bits
349        );
350        let subgroup_x = self.convert_to_ext(subgroup_x);
351        let mut alpha = ReducingFactorTarget::new(alpha);
352        let mut sum = self.zero_extension();
353
354        for (batch, reduced_openings) in instance
355            .batches
356            .iter()
357            .zip(&precomputed_reduced_evals.reduced_openings_at_point)
358        {
359            let FriBatchInfoTarget { point, polynomials } = batch;
360            let evals = polynomials
361                .iter()
362                .map(|p| {
363                    let poly_blinding = instance.oracles[p.oracle_index].blinding;
364                    let salted = params.hiding && poly_blinding;
365                    proof.unsalted_eval(p.oracle_index, p.polynomial_index, salted)
366                })
367                .collect_vec();
368            let reduced_evals = alpha.reduce_base(&evals, self);
369            let numerator = self.sub_extension(reduced_evals, *reduced_openings);
370            let denominator = self.sub_extension(subgroup_x, *point);
371            sum = alpha.shift(sum, self);
372            sum = self.div_add_extension(numerator, denominator, sum);
373        }
374
375        sum
376    }
377
378    fn fri_verifier_query_round<C: GenericConfig<D, F = F>>(
379        &mut self,
380        instance: &FriInstanceInfoTarget<D>,
381        challenges: &FriChallengesTarget<D>,
382        precomputed_reduced_evals: &PrecomputedReducedOpeningsTarget<D>,
383        initial_merkle_caps: &[MerkleCapTarget],
384        proof: &FriProofTarget<D>,
385        x_index: Target,
386        n: usize,
387        round_proof: &FriQueryRoundTarget<D>,
388        params: &FriParams,
389    ) where
390        C::Hasher: AlgebraicHasher<F>,
391    {
392        let n_log = log2_strict(n);
393
394        // Note that this `low_bits` decomposition permits non-canonical binary encodings. Here we
395        // verify that this has a negligible impact on soundness error.
396        Self::assert_noncanonical_indices_ok(&params.config);
397        let mut x_index_bits = self.low_bits(x_index, n_log, F::BITS);
398
399        let cap_index =
400            self.le_sum(x_index_bits[x_index_bits.len() - params.config.cap_height..].iter());
401        with_context!(
402            self,
403            "check FRI initial proof",
404            self.fri_verify_initial_proof::<C::Hasher>(
405                &x_index_bits,
406                &round_proof.initial_trees_proof,
407                initial_merkle_caps,
408                cap_index
409            )
410        );
411
412        // `subgroup_x` is `subgroup[x_index]`, i.e., the actual field element in the domain.
413        let mut subgroup_x = with_context!(self, "compute x from its index", {
414            let g = self.constant(F::coset_shift());
415            let phi = F::primitive_root_of_unity(n_log);
416            let phi = self.exp_from_bits_const_base(phi, x_index_bits.iter().rev());
417            // subgroup_x = g * phi
418            self.mul(g, phi)
419        });
420
421        // old_eval is the last derived evaluation; it will be checked for consistency with its
422        // committed "parent" value in the next iteration.
423        let mut old_eval = with_context!(
424            self,
425            "combine initial oracles",
426            self.fri_combine_initial(
427                instance,
428                &round_proof.initial_trees_proof,
429                challenges.fri_alpha,
430                subgroup_x,
431                precomputed_reduced_evals,
432                params,
433            )
434        );
435
436        for (i, &arity_bits) in params.reduction_arity_bits.iter().enumerate() {
437            let evals = &round_proof.steps[i].evals;
438
439            // Split x_index into the index of the coset x is in, and the index of x within that coset.
440            let coset_index_bits = x_index_bits[arity_bits..].to_vec();
441            let x_index_within_coset_bits = &x_index_bits[..arity_bits];
442            let x_index_within_coset = self.le_sum(x_index_within_coset_bits.iter());
443
444            // Check consistency with our old evaluation from the previous round.
445            let new_eval = self.random_access_extension(x_index_within_coset, evals.clone());
446            self.connect_extension(new_eval, old_eval);
447
448            // Infer P(y) from {P(x)}_{x^arity=y}.
449            old_eval = with_context!(
450                self,
451                "infer evaluation using interpolation",
452                self.compute_evaluation(
453                    subgroup_x,
454                    x_index_within_coset_bits,
455                    arity_bits,
456                    evals,
457                    challenges.fri_betas[i],
458                )
459            );
460
461            with_context!(
462                self,
463                "verify FRI round Merkle proof.",
464                self.verify_merkle_proof_to_cap_with_cap_index::<C::Hasher>(
465                    flatten_target(evals),
466                    &coset_index_bits,
467                    cap_index,
468                    &proof.commit_phase_merkle_caps[i],
469                    &round_proof.steps[i].merkle_proof,
470                )
471            );
472
473            // Update the point x to x^arity.
474            subgroup_x = self.exp_power_of_2(subgroup_x, arity_bits);
475
476            x_index_bits = coset_index_bits;
477        }
478
479        // Final check of FRI. After all the reductions, we check that the final polynomial is equal
480        // to the one sent by the prover.
481        let eval = with_context!(
482            self,
483            &format!(
484                "evaluate final polynomial of length {}",
485                proof.final_poly.len()
486            ),
487            proof.final_poly.eval_scalar(self, subgroup_x)
488        );
489        self.connect_extension(eval, old_eval);
490    }
491
492    fn fri_verifier_query_round_with_multiple_degree_bits<C: GenericConfig<D, F = F>>(
493        &mut self,
494        instance: &FriInstanceInfoTarget<D>,
495        challenges: &FriChallengesTarget<D>,
496        precomputed_reduced_evals: &PrecomputedReducedOpeningsTarget<D>,
497        initial_merkle_caps: &[MerkleCapTarget],
498        proof: &FriProofTarget<D>,
499        x_index: Target,
500        log_n_range: RangeInclusive<usize>,
501        log_n: Target,
502        degree_sub_one_bits_vec: &[BoolTarget],
503        round_proof: &FriQueryRoundTarget<D>,
504        params: &FriParams,
505    ) where
506        C::Hasher: AlgebraicHasher<F>,
507    {
508        assert!(*log_n_range.start() > params.config.cap_height);
509        let n_index = {
510            let min_log_n = self.constant(F::from_canonical_usize(*log_n_range.start()));
511            self.sub(log_n, min_log_n)
512        };
513
514        // Note that this `low_bits` decomposition permits non-canonical binary encodings. Here we
515        // verify that this has a negligible impact on soundness error.
516        Self::assert_noncanonical_indices_ok(&params.config);
517        let mut x_index_bits = self.low_bits(x_index, *log_n_range.end(), F::BITS);
518
519        let cap_indices: Vec<_> = log_n_range
520            .clone()
521            .map(|n| {
522                let slice_start = n - params.config.cap_height;
523                self.le_sum(x_index_bits[slice_start..n].iter())
524            })
525            .collect();
526        let cap_index = self.random_access(n_index, cap_indices);
527        with_context!(
528            self,
529            "check FRI initial proof",
530            self.fri_verify_initial_proof_with_multiple_degree_bits::<C::Hasher>(
531                &x_index_bits,
532                log_n_range.clone(),
533                n_index,
534                &round_proof.initial_trees_proof,
535                initial_merkle_caps,
536                cap_index,
537            )
538        );
539
540        let g = self.constant(F::coset_shift());
541        // `subgroup_x` is `subgroup[x_index]`, i.e., the actual field element in the domain.
542        let subgroup_x_vec: Vec<_> = log_n_range
543            .clone()
544            .map(|n| {
545                with_context!(self, "compute x from its index", {
546                    let phi = F::primitive_root_of_unity(n);
547                    let phi = self.exp_from_bits_const_base(phi, x_index_bits[..n].iter().rev());
548                    // subgroup_x = g * phi
549                    self.mul(g, phi)
550                })
551            })
552            .collect();
553
554        let mut subgroup_x = self.random_access(n_index, subgroup_x_vec);
555
556        // old_eval is the last derived evaluation; it will be checked for consistency with its
557        // committed "parent" value in the next iteration.
558        let mut old_eval = with_context!(
559            self,
560            "combine initial oracles",
561            self.fri_combine_initial(
562                instance,
563                &round_proof.initial_trees_proof,
564                challenges.fri_alpha,
565                subgroup_x,
566                precomputed_reduced_evals,
567                params,
568            )
569        );
570
571        let mut index_in_degree_sub_one_bits_vec = {
572            let mut degree_bits_len = degree_sub_one_bits_vec.len();
573            for arity_bits in &params.reduction_arity_bits {
574                degree_bits_len -= arity_bits;
575            }
576            degree_bits_len
577        };
578        for (i, &arity_bits) in params.reduction_arity_bits.iter().enumerate() {
579            let evals = &round_proof.steps[i].evals;
580
581            // Split x_index into the index of the coset x is in, and the index of x within that coset.
582            let coset_index_bits = x_index_bits[arity_bits..].to_vec();
583            let x_index_within_coset_bits = &x_index_bits[..arity_bits];
584            let x_index_within_coset = self.le_sum(x_index_within_coset_bits.iter());
585
586            // Check consistency with our old evaluation from the previous round.
587            let new_eval = self.random_access_extension(x_index_within_coset, evals.clone());
588            let step_active = degree_sub_one_bits_vec[index_in_degree_sub_one_bits_vec];
589            self.conditional_assert_eq_ext(step_active.target, new_eval, old_eval);
590
591            // Infer P(y) from {P(x)}_{x^arity=y}.
592            let eval = with_context!(
593                self,
594                "infer evaluation using interpolation",
595                self.compute_evaluation(
596                    subgroup_x,
597                    x_index_within_coset_bits,
598                    arity_bits,
599                    evals,
600                    challenges.fri_betas[i],
601                )
602            );
603            old_eval = self.select_ext(step_active, eval, old_eval);
604
605            with_context!(
606                self,
607                "verify FRI round Merkle proof.",
608                self.verify_merkle_proof_to_cap_with_cap_indices::<C::Hasher>(
609                    step_active.target,
610                    flatten_target(evals),
611                    &coset_index_bits,
612                    log_n_range.clone(),
613                    n_index,
614                    cap_index,
615                    &proof.commit_phase_merkle_caps[i],
616                    &round_proof.steps[i].merkle_proof,
617                )
618            );
619
620            // Update the point x to x^arity.
621            let subgroup_x_cur = self.exp_power_of_2(subgroup_x, arity_bits);
622            subgroup_x = self.select(step_active, subgroup_x_cur, subgroup_x);
623
624            x_index_bits = coset_index_bits;
625            index_in_degree_sub_one_bits_vec += arity_bits;
626        }
627
628        // Final check of FRI. After all the reductions, we check that the final polynomial is equal
629        // to the one sent by the prover.
630        let eval = with_context!(
631            self,
632            &format!(
633                "evaluate final polynomial of length {}",
634                proof.final_poly.len()
635            ),
636            proof.final_poly.eval_scalar(self, subgroup_x)
637        );
638        self.connect_extension(eval, old_eval);
639    }
640
641    /// We decompose FRI query indices into bits without verifying that the decomposition given by
642    /// the prover is the canonical one. In particular, if `x_index < 2^field_bits - p`, then the
643    /// prover could supply the binary encoding of either `x_index` or `x_index + p`, since the are
644    /// congruent mod `p`. However, this only occurs with probability
645    ///     p_ambiguous = (2^field_bits - p) / p
646    /// which is small for the field that we use in practice.
647    ///
648    /// In particular, the soundness error of one FRI query is roughly the codeword rate, which
649    /// is much larger than this ambiguous-element probability given any reasonable parameters.
650    /// Thus ambiguous elements contribute a negligible amount to soundness error.
651    ///
652    /// Here we compare the probabilities as a sanity check, to verify the claim above.
653    pub(crate) fn assert_noncanonical_indices_ok(config: &FriConfig) {
654        let num_ambiguous_elems = u64::MAX - F::ORDER + 1;
655        let query_error = config.rate();
656        let p_ambiguous = (num_ambiguous_elems as f64) / (F::ORDER as f64);
657        assert!(p_ambiguous < query_error * 1e-5,
658                "A non-negligible portion of field elements are in the range that permits non-canonical encodings. Need to do more analysis or enforce canonical encodings.");
659    }
660
661    pub fn add_virtual_fri_proof(
662        &mut self,
663        num_leaves_per_oracle: &[usize],
664        params: &FriParams,
665    ) -> FriProofTarget<D> {
666        let cap_height = params.config.cap_height;
667        let num_queries = params.config.num_query_rounds;
668        let commit_phase_merkle_caps = (0..params.reduction_arity_bits.len())
669            .map(|_| self.add_virtual_cap(cap_height))
670            .collect();
671        let query_round_proofs = (0..num_queries)
672            .map(|_| self.add_virtual_fri_query(num_leaves_per_oracle, params))
673            .collect();
674        let final_poly = self.add_virtual_poly_coeff_ext(params.final_poly_len());
675        let pow_witness = self.add_virtual_target();
676        FriProofTarget {
677            commit_phase_merkle_caps,
678            query_round_proofs,
679            final_poly,
680            pow_witness,
681        }
682    }
683
684    fn add_virtual_fri_query(
685        &mut self,
686        num_leaves_per_oracle: &[usize],
687        params: &FriParams,
688    ) -> FriQueryRoundTarget<D> {
689        let cap_height = params.config.cap_height;
690        assert!(params.lde_bits() >= cap_height);
691        let mut merkle_proof_len = params.lde_bits() - cap_height;
692
693        let initial_trees_proof =
694            self.add_virtual_fri_initial_trees_proof(num_leaves_per_oracle, merkle_proof_len);
695
696        let mut steps = Vec::with_capacity(params.reduction_arity_bits.len());
697        for &arity_bits in &params.reduction_arity_bits {
698            assert!(merkle_proof_len >= arity_bits);
699            merkle_proof_len -= arity_bits;
700            steps.push(self.add_virtual_fri_query_step(arity_bits, merkle_proof_len));
701        }
702
703        FriQueryRoundTarget {
704            initial_trees_proof,
705            steps,
706        }
707    }
708
709    fn add_virtual_fri_initial_trees_proof(
710        &mut self,
711        num_leaves_per_oracle: &[usize],
712        initial_merkle_proof_len: usize,
713    ) -> FriInitialTreeProofTarget {
714        let evals_proofs = num_leaves_per_oracle
715            .iter()
716            .map(|&num_oracle_leaves| {
717                let leaves = self.add_virtual_targets(num_oracle_leaves);
718                let merkle_proof = self.add_virtual_merkle_proof(initial_merkle_proof_len);
719                (leaves, merkle_proof)
720            })
721            .collect();
722        FriInitialTreeProofTarget { evals_proofs }
723    }
724
725    fn add_virtual_fri_query_step(
726        &mut self,
727        arity_bits: usize,
728        merkle_proof_len: usize,
729    ) -> FriQueryStepTarget<D> {
730        FriQueryStepTarget {
731            evals: self.add_virtual_extension_targets(1 << arity_bits),
732            merkle_proof: self.add_virtual_merkle_proof(merkle_proof_len),
733        }
734    }
735}
736
737/// For each opening point, holds the reduced (by `alpha`) evaluations of each polynomial that's
738/// opened at that point.
739#[derive(Clone)]
740pub(crate) struct PrecomputedReducedOpeningsTarget<const D: usize> {
741    pub(crate) reduced_openings_at_point: Vec<ExtensionTarget<D>>,
742}
743
744impl<const D: usize> PrecomputedReducedOpeningsTarget<D> {
745    pub(crate) fn from_os_and_alpha<F: RichField + Extendable<D>>(
746        openings: &FriOpeningsTarget<D>,
747        alpha: ExtensionTarget<D>,
748        builder: &mut CircuitBuilder<F, D>,
749    ) -> Self {
750        let reduced_openings_at_point = openings
751            .batches
752            .iter()
753            .map(|batch| ReducingFactorTarget::new(alpha).reduce(&batch.values, builder))
754            .collect();
755        Self {
756            reduced_openings_at_point,
757        }
758    }
759}