1use std::{
2 array,
3 borrow::{Borrow, BorrowMut},
4 marker::PhantomData,
5};
6
7use crate::machine::utils::assert_complete;
8use itertools::{izip, Itertools};
9use p3_air::Air;
10use p3_baby_bear::BabyBear;
11use p3_commit::TwoAdicMultiplicativeCoset;
12use p3_field::{AbstractField, PrimeField32, TwoAdicField};
13use serde::{Deserialize, Serialize};
14use sp1_primitives::{consts::WORD_SIZE, types::RecursionProgramType};
15use sp1_recursion_compiler::{
16 config::InnerConfig,
17 ir::{Array, Builder, Config, Felt, Var},
18 prelude::DslVariable,
19};
20use sp1_recursion_core::{
21 air::{RecursionPublicValues, RECURSIVE_PROOF_NUM_PV_ELTS},
22 runtime::{RecursionProgram, D, DIGEST_SIZE},
23};
24use sp1_stark::{
25 baby_bear_poseidon2::BabyBearPoseidon2, Com, ShardProof, StarkMachine, StarkVerifyingKey, Word,
26};
27
28use sp1_recursion_compiler::prelude::*;
29use sp1_stark::{
30 air::{MachineAir, POSEIDON_NUM_WORDS, PV_DIGEST_NUM_WORDS},
31 StarkGenericConfig,
32};
33
34use crate::{
35 challenger::{CanObserveVariable, DuplexChallengerVariable},
36 fri::TwoAdicFriPcsVariable,
37 hints::Hintable,
38 stark::{RecursiveVerifierConstraintFolder, StarkVerifier},
39 types::{ShardProofVariable, VerifyingKeyVariable},
40 utils::{
41 assert_challenger_eq_pv, assign_challenger_from_pv, const_fri_config, felt2var,
42 get_challenger_public_values, hash_vkey,
43 },
44};
45
46use super::utils::{commit_public_values, proof_data_from_vk, verify_public_values_hash};
47
48#[derive(Debug, Clone, Copy)]
50pub struct SP1CompressVerifier<C: Config, SC: StarkGenericConfig, A> {
51 _phantom: PhantomData<(C, SC, A)>,
52}
53
54#[derive(Debug, Clone, Copy, Serialize, Deserialize, PartialEq, Eq)]
56pub enum ReduceProgramType {
57 Core = 0,
59 Deferred = 1,
61 Reduce = 2,
63}
64
65pub struct SP1CompressMemoryLayout<'a, SC: StarkGenericConfig, A: MachineAir<SC::Val>> {
67 pub compress_vk: &'a StarkVerifyingKey<SC>,
68 pub recursive_machine: &'a StarkMachine<SC, A>,
69 pub shard_proofs: Vec<ShardProof<SC>>,
70 pub is_complete: bool,
71 pub kinds: Vec<ReduceProgramType>,
72}
73
74#[derive(DslVariable, Clone)]
75pub struct SP1CompressMemoryLayoutVariable<C: Config> {
76 pub compress_vk: VerifyingKeyVariable<C>,
77 pub shard_proofs: Array<C, ShardProofVariable<C>>,
78 pub kinds: Array<C, Var<C::N>>,
79 pub is_complete: Var<C::N>,
80}
81
82impl<A> SP1CompressVerifier<InnerConfig, BabyBearPoseidon2, A>
83where
84 A: MachineAir<BabyBear> + for<'a> Air<RecursiveVerifierConstraintFolder<'a, InnerConfig>>,
85{
86 pub fn build(
88 machine: &StarkMachine<BabyBearPoseidon2, A>,
89 recursive_vk: &StarkVerifyingKey<BabyBearPoseidon2>,
90 deferred_vk: &StarkVerifyingKey<BabyBearPoseidon2>,
91 ) -> RecursionProgram<BabyBear> {
92 let mut builder = Builder::<InnerConfig>::new(RecursionProgramType::Compress);
93
94 let input: SP1CompressMemoryLayoutVariable<_> = builder.uninit();
95 SP1CompressMemoryLayout::<BabyBearPoseidon2, A>::witness(&input, &mut builder);
96
97 let pcs = TwoAdicFriPcsVariable {
98 config: const_fri_config(&mut builder, machine.config().pcs().fri_config()),
99 };
100 SP1CompressVerifier::verify(&mut builder, &pcs, machine, input, recursive_vk, deferred_vk);
101
102 builder.halt();
103
104 builder.compile_program()
105 }
106}
107
108impl<C: Config, SC, A> SP1CompressVerifier<C, SC, A>
109where
110 C::F: PrimeField32 + TwoAdicField,
111 SC: StarkGenericConfig<
112 Val = C::F,
113 Challenge = C::EF,
114 Domain = TwoAdicMultiplicativeCoset<C::F>,
115 >,
116 A: MachineAir<C::F> + for<'a> Air<RecursiveVerifierConstraintFolder<'a, C>>,
117 Com<SC>: Into<[SC::Val; DIGEST_SIZE]>,
118{
119 pub fn verify(
132 builder: &mut Builder<C>,
133 pcs: &TwoAdicFriPcsVariable<C>,
134 machine: &StarkMachine<SC, A>,
135 input: SP1CompressMemoryLayoutVariable<C>,
136 recursive_vk: &StarkVerifyingKey<SC>,
137 deferred_vk: &StarkVerifyingKey<SC>,
138 ) {
139 let SP1CompressMemoryLayoutVariable { compress_vk, shard_proofs, kinds, is_complete } =
140 input;
141
142 let mut reduce_public_values_stream: Vec<Felt<_>> =
145 (0..RECURSIVE_PROOF_NUM_PV_ELTS).map(|_| builder.uninit()).collect();
146 let reduce_public_values: &mut RecursionPublicValues<_> =
147 reduce_public_values_stream.as_mut_slice().borrow_mut();
148
149 let compress_vk_digest = hash_vkey(builder, &compress_vk);
151
152 reduce_public_values.compress_vk_digest =
153 array::from_fn(|i| builder.get(&compress_vk_digest, i));
154
155 builder.assert_usize_ne(shard_proofs.len(), 0);
157
158 builder.assert_usize_eq(shard_proofs.len(), kinds.len());
160
161 let sp1_vk_digest: [Felt<_>; DIGEST_SIZE] = array::from_fn(|_| builder.uninit());
163 let pc: Felt<_> = builder.uninit();
164 let shard: Felt<_> = builder.uninit();
165 let execution_shard: Felt<_> = builder.uninit();
166 let mut initial_reconstruct_challenger = DuplexChallengerVariable::new(builder);
167 let mut reconstruct_challenger = DuplexChallengerVariable::new(builder);
168 let mut leaf_challenger = DuplexChallengerVariable::new(builder);
169 let committed_value_digest: [Word<Felt<_>>; PV_DIGEST_NUM_WORDS] =
170 array::from_fn(|_| Word(array::from_fn(|_| builder.uninit())));
171 let deferred_proofs_digest: [Felt<_>; POSEIDON_NUM_WORDS] =
172 array::from_fn(|_| builder.uninit());
173 let reconstruct_deferred_digest: [Felt<_>; POSEIDON_NUM_WORDS] =
174 core::array::from_fn(|_| builder.uninit());
175 let cumulative_sum: [Felt<_>; D] = core::array::from_fn(|_| builder.eval(C::F::zero()));
176 let init_addr_bits: [Felt<_>; 32] = core::array::from_fn(|_| builder.uninit());
177 let finalize_addr_bits: [Felt<_>; 32] = core::array::from_fn(|_| builder.uninit());
178
179 let recursive_vk_variable = proof_data_from_vk(builder, recursive_vk, machine);
181 let deferred_vk_variable = proof_data_from_vk(builder, deferred_vk, machine);
182
183 let core_kind = C::N::from_canonical_u32(ReduceProgramType::Core as u32);
185 let deferred_kind = C::N::from_canonical_u32(ReduceProgramType::Deferred as u32);
186 let reduce_kind = C::N::from_canonical_u32(ReduceProgramType::Reduce as u32);
187
188 builder.range(0, shard_proofs.len()).for_each(|i, builder| {
190 let proof = builder.get(&shard_proofs, i);
192
193 let kind = builder.get(&kinds, i);
195
196 let vk: VerifyingKeyVariable<_> = builder.uninit();
200
201 builder.if_eq(kind, core_kind).then_or_else(
204 |builder| {
205 builder.assign(vk.clone(), recursive_vk_variable.clone());
206 },
207 |builder| {
208 builder.if_eq(kind, deferred_kind).then_or_else(
209 |builder| {
210 builder.assign(vk.clone(), deferred_vk_variable.clone());
211 },
212 |builder| {
213 builder.if_eq(kind, reduce_kind).then_or_else(
214 |builder| {
215 builder.assign(vk.clone(), compress_vk.clone());
216 },
217 |builder| {
218 builder.error();
220 },
221 );
222 },
223 );
224 },
225 );
226
227 let mut challenger = DuplexChallengerVariable::new(builder);
231
232 challenger.observe(builder, vk.commitment.clone());
234 challenger.observe(builder, vk.pc_start);
235
236 challenger.observe(builder, proof.commitment.main_commit.clone());
238 for j in 0..machine.num_pv_elts() {
239 let element = builder.get(&proof.public_values, j);
240 challenger.observe(builder, element);
241 }
242
243 StarkVerifier::<C, SC>::verify_shard(
245 builder,
246 &vk,
247 pcs,
248 machine,
249 &mut challenger,
250 &proof,
251 true,
252 );
253
254 let current_public_values_elements = (0..RECURSIVE_PROOF_NUM_PV_ELTS)
256 .map(|i| builder.get(&proof.public_values, i))
257 .collect::<Vec<Felt<_>>>();
258
259 let current_public_values: &RecursionPublicValues<Felt<C::F>> =
260 current_public_values_elements.as_slice().borrow();
261
262 verify_public_values_hash(builder, current_public_values);
264
265 builder.if_eq(i, C::N::zero()).then(|builder| {
267 for (digest, current_digest, global_digest) in izip!(
271 reconstruct_deferred_digest.iter(),
272 current_public_values.start_reconstruct_deferred_digest.iter(),
273 reduce_public_values.start_reconstruct_deferred_digest.iter()
274 ) {
275 builder.assign(*digest, *current_digest);
276 builder.assign(*global_digest, *current_digest);
277 }
278
279 for (digest, first_digest) in
281 sp1_vk_digest.iter().zip(current_public_values.sp1_vk_digest)
282 {
283 builder.assign(*digest, first_digest);
284 }
285
286 builder.assign(reduce_public_values.start_pc, current_public_values.start_pc);
288 builder.assign(pc, current_public_values.start_pc);
289
290 builder.assign(shard, current_public_values.start_shard);
292 builder.assign(reduce_public_values.start_shard, current_public_values.start_shard);
293
294 builder.assign(execution_shard, current_public_values.start_execution_shard);
296 builder.assign(
297 reduce_public_values.start_execution_shard,
298 current_public_values.start_execution_shard,
299 );
300
301 for (bit, (first_bit, current_bit)) in init_addr_bits.iter().zip(
303 reduce_public_values
304 .previous_init_addr_bits
305 .iter()
306 .zip(current_public_values.previous_init_addr_bits.iter()),
307 ) {
308 builder.assign(*bit, *current_bit);
309 builder.assign(*first_bit, *current_bit);
310 }
311
312 for (bit, (first_bit, current_bit)) in finalize_addr_bits.iter().zip(
314 reduce_public_values
315 .previous_finalize_addr_bits
316 .iter()
317 .zip(current_public_values.previous_finalize_addr_bits.iter()),
318 ) {
319 builder.assign(*bit, *current_bit);
320 builder.assign(*first_bit, *current_bit);
321 }
322
323 assign_challenger_from_pv(
325 builder,
326 &mut leaf_challenger,
327 current_public_values.leaf_challenger,
328 );
329
330 assign_challenger_from_pv(
332 builder,
333 &mut initial_reconstruct_challenger,
334 current_public_values.start_reconstruct_challenger,
335 );
336 assign_challenger_from_pv(
337 builder,
338 &mut reconstruct_challenger,
339 current_public_values.start_reconstruct_challenger,
340 );
341
342 for (word, current_word) in committed_value_digest
344 .iter()
345 .zip_eq(current_public_values.committed_value_digest.iter())
346 {
347 for (byte, current_byte) in word.0.iter().zip_eq(current_word.0.iter()) {
348 builder.assign(*byte, *current_byte);
349 }
350 }
351
352 for (digest, current_digest) in deferred_proofs_digest
353 .iter()
354 .zip_eq(current_public_values.deferred_proofs_digest.iter())
355 {
356 builder.assign(*digest, *current_digest);
357 }
358 });
359
360 for (digest, current_digest) in reconstruct_deferred_digest
364 .iter()
365 .zip_eq(current_public_values.start_reconstruct_deferred_digest.iter())
366 {
367 builder.assert_felt_eq(*digest, *current_digest);
368 }
369
370 for (digest, current) in sp1_vk_digest.iter().zip(current_public_values.sp1_vk_digest) {
374 builder.assert_felt_eq(*digest, current);
375 }
376
377 builder.assert_felt_eq(pc, current_public_values.start_pc);
379
380 builder.assert_felt_eq(shard, current_public_values.start_shard);
382
383 builder.assert_felt_eq(execution_shard, current_public_values.start_execution_shard);
385
386 for (bit, current_bit) in
390 init_addr_bits.iter().zip(current_public_values.previous_init_addr_bits.iter())
391 {
392 builder.assert_felt_eq(*bit, *current_bit);
393 }
394
395 for (bit, current_bit) in finalize_addr_bits
397 .iter()
398 .zip(current_public_values.previous_finalize_addr_bits.iter())
399 {
400 builder.assert_felt_eq(*bit, *current_bit);
401 }
402
403 assert_challenger_eq_pv(
404 builder,
405 &leaf_challenger,
406 current_public_values.leaf_challenger,
407 );
408 assert_challenger_eq_pv(
410 builder,
411 &reconstruct_challenger,
412 current_public_values.start_reconstruct_challenger,
413 );
414
415 {
417 let is_zero: Var<_> = builder.eval(C::N::one());
420 #[allow(clippy::needless_range_loop)]
421 for i in 0..committed_value_digest.len() {
422 for j in 0..WORD_SIZE {
423 let d = felt2var(builder, committed_value_digest[i][j]);
424 builder.if_ne(d, C::N::zero()).then(|builder| {
425 builder.assign(is_zero, C::N::zero());
426 });
427 }
428 }
429 builder.if_eq(is_zero, C::N::zero()).then(|builder| {
430 #[allow(clippy::needless_range_loop)]
431 for i in 0..committed_value_digest.len() {
432 for j in 0..WORD_SIZE {
433 builder.assert_felt_eq(
434 committed_value_digest[i][j],
435 current_public_values.committed_value_digest[i][j],
436 );
437 }
438 }
439 });
440
441 #[allow(clippy::needless_range_loop)]
443 for i in 0..committed_value_digest.len() {
444 for j in 0..WORD_SIZE {
445 builder.assign(
446 committed_value_digest[i][j],
447 current_public_values.committed_value_digest[i][j],
448 );
449 }
450 }
451
452 let is_zero: Var<_> = builder.eval(C::N::one());
456 #[allow(clippy::needless_range_loop)]
457 for i in 0..deferred_proofs_digest.len() {
458 let d = felt2var(builder, deferred_proofs_digest[i]);
459 builder.if_ne(d, C::N::zero()).then(|builder| {
460 builder.assign(is_zero, C::N::zero());
461 });
462 }
463 builder.if_eq(is_zero, C::N::zero()).then(|builder| {
464 #[allow(clippy::needless_range_loop)]
465 for i in 0..deferred_proofs_digest.len() {
466 builder.assert_felt_eq(
467 deferred_proofs_digest[i],
468 current_public_values.deferred_proofs_digest[i],
469 );
470 }
471 });
472
473 #[allow(clippy::needless_range_loop)]
475 for i in 0..deferred_proofs_digest.len() {
476 builder.assign(
477 deferred_proofs_digest[i],
478 current_public_values.deferred_proofs_digest[i],
479 );
480 }
481 }
482
483 for (digest, current_digest) in reconstruct_deferred_digest
485 .iter()
486 .zip_eq(current_public_values.end_reconstruct_deferred_digest.iter())
487 {
488 builder.assign(*digest, *current_digest);
489 }
490
491 builder.assign(pc, current_public_values.next_pc);
494
495 builder.assign(shard, current_public_values.next_shard);
497
498 builder.assign(execution_shard, current_public_values.next_execution_shard);
500
501 for (bit, next_bit) in
503 init_addr_bits.iter().zip(current_public_values.last_init_addr_bits.iter())
504 {
505 builder.assign(*bit, *next_bit);
506 }
507
508 for (bit, next_bit) in
510 finalize_addr_bits.iter().zip(current_public_values.last_finalize_addr_bits.iter())
511 {
512 builder.assign(*bit, *next_bit);
513 }
514
515 assign_challenger_from_pv(
517 builder,
518 &mut reconstruct_challenger,
519 current_public_values.end_reconstruct_challenger,
520 );
521
522 for (sum_element, current_sum_element) in
524 cumulative_sum.iter().zip_eq(current_public_values.cumulative_sum.iter())
525 {
526 builder.assign(*sum_element, *sum_element + *current_sum_element);
527 }
528 });
529
530 reduce_public_values.sp1_vk_digest = sp1_vk_digest;
533 reduce_public_values.next_pc = pc;
535 reduce_public_values.next_shard = shard;
537 reduce_public_values.next_execution_shard = execution_shard;
539 reduce_public_values.last_init_addr_bits = init_addr_bits;
541 reduce_public_values.last_finalize_addr_bits = finalize_addr_bits;
543 let values = get_challenger_public_values(builder, &leaf_challenger);
545 reduce_public_values.leaf_challenger = values;
546 let values = get_challenger_public_values(builder, &initial_reconstruct_challenger);
548 reduce_public_values.start_reconstruct_challenger = values;
549 let values = get_challenger_public_values(builder, &reconstruct_challenger);
551 reduce_public_values.end_reconstruct_challenger = values;
552 reduce_public_values.end_reconstruct_deferred_digest = reconstruct_deferred_digest;
554 reduce_public_values.deferred_proofs_digest = deferred_proofs_digest;
556 reduce_public_values.committed_value_digest = committed_value_digest;
558 reduce_public_values.cumulative_sum = cumulative_sum;
560
561 builder.if_eq(is_complete, C::N::one()).then_or_else(
564 |builder| {
565 builder.assign(reduce_public_values.is_complete, C::F::one());
566 assert_complete(builder, reduce_public_values, &reconstruct_challenger)
567 },
568 |builder| {
569 builder.assert_var_eq(is_complete, C::N::zero());
570 builder.assign(reduce_public_values.is_complete, C::F::zero());
571 },
572 );
573
574 commit_public_values(builder, reduce_public_values);
575 }
576}