1use std::{
2 array,
3 borrow::{Borrow, BorrowMut},
4 marker::PhantomData,
5};
6
7use itertools::Itertools;
8use p3_baby_bear::BabyBear;
9use p3_commit::TwoAdicMultiplicativeCoset;
10use p3_field::{AbstractField, PrimeField32, TwoAdicField};
11use sp1_core_machine::{cpu::MAX_CPU_LOG_DEGREE, riscv::RiscvAir};
12use sp1_primitives::{consts::WORD_SIZE, types::RecursionProgramType};
13use sp1_recursion_compiler::{
14 config::InnerConfig,
15 ir::{Array, Builder, Config, Ext, ExtConst, Felt, Var},
16 prelude::{DslVariable, *},
17};
18use sp1_recursion_core::{
19 air::{RecursionPublicValues, RECURSIVE_PROOF_NUM_PV_ELTS},
20 runtime::{RecursionProgram, DIGEST_SIZE},
21};
22use sp1_stark::{
23 air::{MachineAir, PublicValues, POSEIDON_NUM_WORDS, PV_DIGEST_NUM_WORDS},
24 baby_bear_poseidon2::BabyBearPoseidon2,
25 Com, ShardProof, StarkGenericConfig, StarkMachine, StarkVerifyingKey, Word,
26};
27
28use crate::{
29 challenger::{CanObserveVariable, DuplexChallengerVariable},
30 fri::TwoAdicFriPcsVariable,
31 hints::Hintable,
32 stark::{StarkVerifier, EMPTY},
33 types::{ShardProofVariable, VerifyingKeyVariable},
34 utils::{const_fri_config, felt2var, get_challenger_public_values, hash_vkey, var2felt},
35};
36
37use super::utils::{assert_complete, commit_public_values};
38
39#[derive(Debug, Clone, Copy)]
41pub struct SP1RecursiveVerifier<C: Config, SC: StarkGenericConfig> {
42 _phantom: PhantomData<(C, SC)>,
43}
44
45pub struct SP1RecursionMemoryLayout<'a, SC: StarkGenericConfig, A: MachineAir<SC::Val>> {
46 pub vk: &'a StarkVerifyingKey<SC>,
47 pub machine: &'a StarkMachine<SC, A>,
48 pub shard_proofs: Vec<ShardProof<SC>>,
49 pub leaf_challenger: &'a SC::Challenger,
50 pub initial_reconstruct_challenger: SC::Challenger,
51 pub is_complete: bool,
52}
53
54#[derive(DslVariable, Clone)]
55pub struct SP1RecursionMemoryLayoutVariable<C: Config> {
56 pub vk: VerifyingKeyVariable<C>,
57 pub shard_proofs: Array<C, ShardProofVariable<C>>,
58 pub leaf_challenger: DuplexChallengerVariable<C>,
59 pub initial_reconstruct_challenger: DuplexChallengerVariable<C>,
60 pub is_complete: Var<C::N>,
61}
62
63impl SP1RecursiveVerifier<InnerConfig, BabyBearPoseidon2> {
64 pub fn build(
66 machine: &StarkMachine<BabyBearPoseidon2, RiscvAir<BabyBear>>,
67 ) -> RecursionProgram<BabyBear> {
68 let mut builder = Builder::<InnerConfig>::new(RecursionProgramType::Core);
69
70 let input: SP1RecursionMemoryLayoutVariable<_> = builder.uninit();
71 SP1RecursionMemoryLayout::<BabyBearPoseidon2, RiscvAir<_>>::witness(&input, &mut builder);
72
73 let pcs = TwoAdicFriPcsVariable {
74 config: const_fri_config(&mut builder, machine.config().pcs().fri_config()),
75 };
76 SP1RecursiveVerifier::verify(&mut builder, &pcs, machine, input);
77
78 builder.halt();
79
80 builder.compile_program()
81 }
82}
83
84impl<C: Config, SC: StarkGenericConfig> SP1RecursiveVerifier<C, SC>
85where
86 C::F: PrimeField32 + TwoAdicField,
87 SC: StarkGenericConfig<
88 Val = C::F,
89 Challenge = C::EF,
90 Domain = TwoAdicMultiplicativeCoset<C::F>,
91 >,
92 Com<SC>: Into<[SC::Val; DIGEST_SIZE]>,
93{
94 pub fn verify(
121 builder: &mut Builder<C>,
122 pcs: &TwoAdicFriPcsVariable<C>,
123 machine: &StarkMachine<SC, RiscvAir<SC::Val>>,
124 input: SP1RecursionMemoryLayoutVariable<C>,
125 ) {
126 let SP1RecursionMemoryLayoutVariable {
128 vk,
129 shard_proofs,
130 leaf_challenger,
131 initial_reconstruct_challenger,
132 is_complete,
133 } = input;
134
135 let initial_shard = builder.uninit();
137 let current_shard = builder.uninit();
138
139 let initial_execution_shard = builder.uninit();
141 let current_execution_shard = builder.uninit();
142
143 let start_pc = builder.uninit();
145 let current_pc = builder.uninit();
146
147 let initial_previous_init_addr_bits: [Felt<_>; 32] = array::from_fn(|_| builder.uninit());
149 let initial_previous_finalize_addr_bits: [Felt<_>; 32] =
150 array::from_fn(|_| builder.uninit());
151 let current_init_addr_bits: [Felt<_>; 32] = array::from_fn(|_| builder.uninit());
152 let current_finalize_addr_bits: [Felt<_>; 32] = array::from_fn(|_| builder.uninit());
153
154 let exit_code: Felt<_> = builder.uninit();
156
157 let committed_value_digest: [Word<Felt<_>>; PV_DIGEST_NUM_WORDS] =
159 array::from_fn(|_| Word(array::from_fn(|_| builder.uninit())));
160
161 let deferred_proofs_digest: [Felt<_>; POSEIDON_NUM_WORDS] =
163 array::from_fn(|_| builder.uninit());
164
165 let leaf_challenger_public_values = get_challenger_public_values(builder, &leaf_challenger);
167 let mut reconstruct_challenger: DuplexChallengerVariable<_> =
168 initial_reconstruct_challenger.copy(builder);
169
170 let cumulative_sum: Ext<_, _> = builder.eval(C::EF::zero().cons());
172
173 builder.assert_usize_ne(shard_proofs.len(), 0);
175
176 builder.range(0, shard_proofs.len()).for_each(|i, builder| {
178 let proof = builder.get(&shard_proofs, i);
180
181 let contains_cpu: Var<_> = builder.eval(C::N::zero());
183 let contains_memory_init: Var<_> = builder.eval(C::N::zero());
184 let contains_memory_finalize: Var<_> = builder.eval(C::N::zero());
185 for (i, chip) in machine.chips().iter().enumerate() {
186 let index = builder.get(&proof.sorted_idxs, i);
187 if chip.name() == "CPU" {
188 builder.if_ne(index, C::N::from_canonical_usize(EMPTY)).then(|builder| {
189 builder.assign(contains_cpu, C::N::one());
190 });
191 } else if chip.name() == "MemoryInit" {
192 builder.if_ne(index, C::N::from_canonical_usize(EMPTY)).then(|builder| {
193 builder.assign(contains_memory_init, C::N::one());
194 });
195 } else if chip.name() == "MemoryFinalize" {
196 builder.if_ne(index, C::N::from_canonical_usize(EMPTY)).then(|builder| {
197 builder.assign(contains_memory_finalize, C::N::one());
198 });
199 }
200 }
201
202 let mut pv_elements = Vec::new();
204 for i in 0..machine.num_pv_elts() {
205 let element = builder.get(&proof.public_values, i);
206 pv_elements.push(element);
207 }
208 let public_values: &PublicValues<Word<Felt<_>>, Felt<_>> =
209 pv_elements.as_slice().borrow();
210
211 builder.if_eq(i, C::N::zero()).then(|builder| {
213 builder.assign(initial_shard, public_values.shard);
215 builder.assign(current_shard, public_values.shard);
216
217 builder.assign(initial_execution_shard, public_values.execution_shard);
219 builder.assign(current_execution_shard, public_values.execution_shard);
220
221 builder.assign(start_pc, public_values.start_pc);
223 builder.assign(current_pc, public_values.start_pc);
224
225 for ((bit, pub_bit), first_bit) in current_init_addr_bits
227 .iter()
228 .zip(public_values.previous_init_addr_bits.iter())
229 .zip(initial_previous_init_addr_bits.iter())
230 {
231 builder.assign(*bit, *pub_bit);
232 builder.assign(*first_bit, *pub_bit);
233 }
234 for ((bit, pub_bit), first_bit) in current_finalize_addr_bits
235 .iter()
236 .zip(public_values.previous_finalize_addr_bits.iter())
237 .zip(initial_previous_finalize_addr_bits.iter())
238 {
239 builder.assign(*bit, *pub_bit);
240 builder.assign(*first_bit, *pub_bit);
241 }
242
243 builder.assign(exit_code, public_values.exit_code);
245
246 for (word, first_word) in committed_value_digest
248 .iter()
249 .zip_eq(public_values.committed_value_digest.iter())
250 {
251 for (byte, first_byte) in word.0.iter().zip_eq(first_word.0.iter()) {
252 builder.assign(*byte, *first_byte);
253 }
254 }
255
256 for (digest, first_digest) in deferred_proofs_digest
258 .iter()
259 .zip_eq(public_values.deferred_proofs_digest.iter())
260 {
261 builder.assign(*digest, *first_digest);
262 }
263 });
264
265 let shard = felt2var(builder, public_values.shard);
268 builder.if_eq(shard, C::N::one()).then(|builder| {
269 let mut first_initial_challenger = DuplexChallengerVariable::new(builder);
270 first_initial_challenger.observe(builder, vk.commitment.clone());
271 first_initial_challenger.observe(builder, vk.pc_start);
272 initial_reconstruct_challenger.assert_eq(builder, &first_initial_challenger);
273 });
274
275 let mut challenger = leaf_challenger.copy(builder);
280 StarkVerifier::<C, SC>::verify_shard(
281 builder,
282 &vk,
283 pcs,
284 machine,
285 &mut challenger,
286 &proof,
287 false,
288 );
289
290 {
292 builder.if_eq(shard, C::N::one()).then(|builder| {
293 builder.assert_var_eq(contains_cpu, C::N::one());
294 });
295 }
296
297 {
299 for (i, chip) in machine.chips().iter().enumerate() {
300 if chip.name() == "CPU" {
301 builder.if_eq(contains_cpu, C::N::one()).then(|builder| {
302 let index = builder.get(&proof.sorted_idxs, i);
303 let cpu_log_degree =
304 builder.get(&proof.opened_values.chips, index).log_degree;
305 let cpu_log_degree_lt_max: Var<_> = builder.eval(C::N::zero());
306 builder.range(0, MAX_CPU_LOG_DEGREE + 1).for_each(|j, builder| {
307 builder.if_eq(j, cpu_log_degree).then(|builder| {
308 builder.assign(cpu_log_degree_lt_max, C::N::one());
309 });
310 });
311 builder.assert_var_eq(cpu_log_degree_lt_max, C::N::one());
312 });
313 }
314 }
315 }
316
317 {
319 builder.assert_felt_eq(current_shard, public_values.shard);
321
322 builder.assign(current_shard, current_shard + C::F::one());
324 }
325
326 let execution_shard = felt2var(builder, public_values.execution_shard);
328 {
329 builder.if_eq(contains_cpu, C::N::one()).then(|builder| {
331 builder.assert_felt_eq(current_execution_shard, public_values.execution_shard);
332 });
333
334 builder.if_eq(contains_cpu, C::N::one()).then(|builder| {
337 builder.assign(current_execution_shard, current_execution_shard + C::F::one());
338 });
339 }
340
341 {
343 builder.if_eq(shard, C::N::one()).then(|builder| {
346 builder.assert_felt_eq(public_values.start_pc, vk.pc_start);
347 });
348
349 builder.assert_felt_eq(current_pc, public_values.start_pc);
351
352 builder.if_ne(contains_cpu, C::N::one()).then(|builder| {
354 builder.assert_felt_eq(public_values.start_pc, public_values.next_pc);
355 });
356
357 builder.if_eq(contains_cpu, C::N::one()).then(|builder| {
359 builder.assert_felt_ne(public_values.start_pc, C::F::zero());
360 });
361
362 builder.assign(current_pc, public_values.next_pc);
364 }
365
366 {
368 builder.assert_felt_eq(exit_code, C::F::zero());
370 }
371
372 {
374 builder.if_eq(execution_shard, C::N::one()).then(|builder| {
377 for bit in current_init_addr_bits.iter() {
379 builder.assert_felt_eq(*bit, C::F::zero());
380 }
381
382 for bit in current_finalize_addr_bits.iter() {
384 builder.assert_felt_eq(*bit, C::F::zero());
385 }
386 });
387
388 for (bit, current_bit) in current_init_addr_bits
390 .iter()
391 .zip_eq(public_values.previous_init_addr_bits.iter())
392 {
393 builder.assert_felt_eq(*bit, *current_bit);
394 }
395
396 for (bit, current_bit) in current_finalize_addr_bits
398 .iter()
399 .zip_eq(public_values.previous_finalize_addr_bits.iter())
400 {
401 builder.assert_felt_eq(*bit, *current_bit);
402 }
403
404 builder.if_ne(contains_memory_init, C::N::one()).then(|builder| {
406 for (prev_bit, last_bit) in public_values
407 .previous_init_addr_bits
408 .iter()
409 .zip_eq(public_values.last_init_addr_bits.iter())
410 {
411 builder.assert_felt_eq(*prev_bit, *last_bit);
412 }
413 });
414
415 builder.if_ne(contains_memory_finalize, C::N::one()).then(|builder| {
417 for (prev_bit, last_bit) in public_values
418 .previous_finalize_addr_bits
419 .iter()
420 .zip_eq(public_values.last_finalize_addr_bits.iter())
421 {
422 builder.assert_felt_eq(*prev_bit, *last_bit);
423 }
424 });
425
426 for (bit, pub_bit) in
428 current_init_addr_bits.iter().zip(public_values.last_init_addr_bits.iter())
429 {
430 builder.assign(*bit, *pub_bit);
431 }
432
433 for (bit, pub_bit) in current_finalize_addr_bits
435 .iter()
436 .zip(public_values.last_finalize_addr_bits.iter())
437 {
438 builder.assign(*bit, *pub_bit);
439 }
440 }
441
442 {
444 let is_zero: Var<_> = builder.eval(C::N::one());
447 #[allow(clippy::needless_range_loop)]
448 for i in 0..committed_value_digest.len() {
449 for j in 0..WORD_SIZE {
450 let d = felt2var(builder, committed_value_digest[i][j]);
451 builder.if_ne(d, C::N::zero()).then(|builder| {
452 builder.assign(is_zero, C::N::zero());
453 });
454 }
455 }
456 builder.if_eq(is_zero, C::N::zero()).then(|builder| {
457 #[allow(clippy::needless_range_loop)]
458 for i in 0..committed_value_digest.len() {
459 for j in 0..WORD_SIZE {
460 builder.assert_felt_eq(
461 committed_value_digest[i][j],
462 public_values.committed_value_digest[i][j],
463 );
464 }
465 }
466 });
467
468 builder.if_ne(contains_cpu, C::N::one()).then(|builder| {
471 #[allow(clippy::needless_range_loop)]
472 for i in 0..committed_value_digest.len() {
473 for j in 0..WORD_SIZE {
474 builder.assert_felt_eq(
475 committed_value_digest[i][j],
476 public_values.committed_value_digest[i][j],
477 );
478 }
479 }
480 });
481
482 #[allow(clippy::needless_range_loop)]
484 for i in 0..committed_value_digest.len() {
485 for j in 0..WORD_SIZE {
486 builder.assign(
487 committed_value_digest[i][j],
488 public_values.committed_value_digest[i][j],
489 );
490 }
491 }
492
493 let is_zero: Var<_> = builder.eval(C::N::one());
497 #[allow(clippy::needless_range_loop)]
498 for i in 0..deferred_proofs_digest.len() {
499 let d = felt2var(builder, deferred_proofs_digest[i]);
500 builder.if_ne(d, C::N::zero()).then(|builder| {
501 builder.assign(is_zero, C::N::zero());
502 });
503 }
504 builder.if_eq(is_zero, C::N::zero()).then(|builder| {
505 #[allow(clippy::needless_range_loop)]
506 for i in 0..deferred_proofs_digest.len() {
507 builder.assert_felt_eq(
508 deferred_proofs_digest[i],
509 public_values.deferred_proofs_digest[i],
510 );
511 }
512 });
513
514 builder.if_ne(contains_cpu, C::N::one()).then(|builder| {
517 #[allow(clippy::needless_range_loop)]
518 for i in 0..deferred_proofs_digest.len() {
519 builder.assert_felt_eq(
520 deferred_proofs_digest[i],
521 public_values.deferred_proofs_digest[i],
522 );
523 }
524 });
525
526 #[allow(clippy::needless_range_loop)]
528 for i in 0..deferred_proofs_digest.len() {
529 builder
530 .assign(deferred_proofs_digest[i], public_values.deferred_proofs_digest[i]);
531 }
532 }
533
534 builder.range_check_f(public_values.shard, 16);
536
537 reconstruct_challenger.observe(builder, proof.commitment.main_commit.clone());
539 for j in 0..machine.num_pv_elts() {
540 let element = builder.get(&proof.public_values, j);
541 reconstruct_challenger.observe(builder, element);
542 }
543
544 let opened_values = proof.opened_values.chips;
546 builder.range(0, opened_values.len()).for_each(|k, builder| {
547 let values = builder.get(&opened_values, k);
548 let sum = values.cumulative_sum;
549 builder.assign(cumulative_sum, cumulative_sum + sum);
550 });
551 });
552
553 {
555 let vk_digest = hash_vkey(builder, &vk);
557 let vk_digest: [Felt<_>; DIGEST_SIZE] = array::from_fn(|i| builder.get(&vk_digest, i));
558
559 let initial_challenger_public_values =
561 get_challenger_public_values(builder, &initial_reconstruct_challenger);
562 let final_challenger_public_values =
563 get_challenger_public_values(builder, &reconstruct_challenger);
564
565 let cumulative_sum_array = builder.ext2felt(cumulative_sum);
567 let cumulative_sum_array = array::from_fn(|i| builder.get(&cumulative_sum_array, i));
568
569 let zero: Felt<_> = builder.eval(C::F::zero());
571 let start_deferred_digest = [zero; POSEIDON_NUM_WORDS];
572 let end_deferred_digest = [zero; POSEIDON_NUM_WORDS];
573
574 let is_complete_felt = var2felt(builder, is_complete);
576
577 let mut recursion_public_values_stream = [zero; RECURSIVE_PROOF_NUM_PV_ELTS];
579 let recursion_public_values: &mut RecursionPublicValues<_> =
580 recursion_public_values_stream.as_mut_slice().borrow_mut();
581 recursion_public_values.committed_value_digest = committed_value_digest;
582 recursion_public_values.deferred_proofs_digest = deferred_proofs_digest;
583 recursion_public_values.start_pc = start_pc;
584 recursion_public_values.next_pc = current_pc;
585 recursion_public_values.start_shard = initial_shard;
586 recursion_public_values.next_shard = current_shard;
587 recursion_public_values.start_execution_shard = initial_execution_shard;
588 recursion_public_values.next_execution_shard = current_execution_shard;
589 recursion_public_values.previous_init_addr_bits = initial_previous_init_addr_bits;
590 recursion_public_values.last_init_addr_bits = current_init_addr_bits;
591 recursion_public_values.previous_finalize_addr_bits =
592 initial_previous_finalize_addr_bits;
593 recursion_public_values.last_finalize_addr_bits = current_finalize_addr_bits;
594 recursion_public_values.sp1_vk_digest = vk_digest;
595 recursion_public_values.leaf_challenger = leaf_challenger_public_values;
596 recursion_public_values.start_reconstruct_challenger = initial_challenger_public_values;
597 recursion_public_values.end_reconstruct_challenger = final_challenger_public_values;
598 recursion_public_values.cumulative_sum = cumulative_sum_array;
599 recursion_public_values.start_reconstruct_deferred_digest = start_deferred_digest;
600 recursion_public_values.end_reconstruct_deferred_digest = end_deferred_digest;
601 recursion_public_values.exit_code = exit_code;
602 recursion_public_values.is_complete = is_complete_felt;
603
604 builder.if_eq(is_complete, C::N::one()).then(|builder| {
610 assert_complete(builder, recursion_public_values, &reconstruct_challenger)
611 });
612
613 commit_public_values(builder, recursion_public_values);
614 }
615 }
616}