1use deepsize2::DeepSizeOf;
2use hashbrown::HashMap;
3use slop_air::AirBuilder;
4use slop_algebra::{AbstractField, Field, PrimeField, PrimeField32};
5use sp1_hypercube::{
6 air::{
7 AirInteraction, BaseAirBuilder, InteractionScope, MachineAir, PublicValues, SP1AirBuilder,
8 PROOF_NONCE_NUM_WORDS, PV_DIGEST_NUM_WORDS, SP1_PROOF_NUM_PV_ELTS,
9 },
10 septic_digest::SepticDigest,
11 shape::Shape,
12 InteractionKind, MachineRecord,
13};
14use std::{
15 borrow::Borrow,
16 iter::once,
17 mem::take,
18 sync::{Arc, Mutex},
19};
20
21use serde::{Deserialize, Serialize};
22
23use crate::{
24 events::{
25 AluEvent, BranchEvent, ByteLookupEvent, ByteRecord, GlobalInteractionEvent,
26 InstructionDecodeEvent, InstructionFetchEvent, JumpEvent, MemInstrEvent,
27 MemoryInitializeFinalizeEvent, MemoryLocalEvent, MemoryRecordEnum,
28 PageProtInitializeFinalizeEvent, PageProtLocalEvent, PrecompileEvent, PrecompileEvents,
29 SyscallEvent, UTypeEvent,
30 },
31 program::Program,
32 ByteOpcode, Instruction, RetainedEventsPreset, RiscvAirId, SplitOpts, SyscallCode,
33};
34
35#[derive(Clone, Debug, Serialize, Deserialize, Default, DeepSizeOf)]
39pub struct ExecutionRecord {
40 pub program: Arc<Program>,
42 pub cpu_event_count: u32,
44 pub add_events: Vec<(AluEvent, RTypeRecord)>,
46 pub addw_events: Vec<(AluEvent, ALUTypeRecord)>,
48 pub addi_events: Vec<(AluEvent, ITypeRecord)>,
50 pub mul_events: Vec<(AluEvent, RTypeRecord)>,
52 pub sub_events: Vec<(AluEvent, RTypeRecord)>,
54 pub subw_events: Vec<(AluEvent, RTypeRecord)>,
56 pub bitwise_events: Vec<(AluEvent, ALUTypeRecord)>,
58 pub shift_left_events: Vec<(AluEvent, ALUTypeRecord)>,
60 pub shift_right_events: Vec<(AluEvent, ALUTypeRecord)>,
62 pub divrem_events: Vec<(AluEvent, RTypeRecord)>,
64 pub lt_events: Vec<(AluEvent, ALUTypeRecord)>,
66 pub memory_load_byte_events: Vec<(MemInstrEvent, ITypeRecord)>,
68 pub memory_load_half_events: Vec<(MemInstrEvent, ITypeRecord)>,
70 pub memory_load_word_events: Vec<(MemInstrEvent, ITypeRecord)>,
72 pub memory_load_x0_events: Vec<(MemInstrEvent, ITypeRecord)>,
74 pub memory_load_double_events: Vec<(MemInstrEvent, ITypeRecord)>,
76 pub memory_store_byte_events: Vec<(MemInstrEvent, ITypeRecord)>,
78 pub memory_store_half_events: Vec<(MemInstrEvent, ITypeRecord)>,
80 pub memory_store_word_events: Vec<(MemInstrEvent, ITypeRecord)>,
82 pub memory_store_double_events: Vec<(MemInstrEvent, ITypeRecord)>,
84 pub utype_events: Vec<(UTypeEvent, JTypeRecord)>,
86 pub branch_events: Vec<(BranchEvent, ITypeRecord)>,
88 pub jal_events: Vec<(JumpEvent, JTypeRecord)>,
90 pub jalr_events: Vec<(JumpEvent, ITypeRecord)>,
92 pub byte_lookups: HashMap<ByteLookupEvent, usize>,
94 pub precompile_events: PrecompileEvents,
96 pub global_memory_initialize_events: Vec<MemoryInitializeFinalizeEvent>,
98 pub global_memory_finalize_events: Vec<MemoryInitializeFinalizeEvent>,
100 pub global_page_prot_initialize_events: Vec<PageProtInitializeFinalizeEvent>,
102 pub global_page_prot_finalize_events: Vec<PageProtInitializeFinalizeEvent>,
104 pub cpu_local_memory_access: Vec<MemoryLocalEvent>,
106 pub cpu_local_page_prot_access: Vec<PageProtLocalEvent>,
108 pub syscall_events: Vec<(SyscallEvent, RTypeRecord)>,
110 pub global_interaction_events: Vec<GlobalInteractionEvent>,
112 pub instruction_fetch_events: Vec<(InstructionFetchEvent, MemoryAccessRecord)>,
114 pub instruction_decode_events: Vec<InstructionDecodeEvent>,
116 pub global_cumulative_sum: Arc<Mutex<SepticDigest<u32>>>,
118 pub global_interaction_event_count: u32,
120 pub bump_memory_events: Vec<(MemoryRecordEnum, u64, bool)>,
122 pub bump_state_events: Vec<(u64, u64, bool, u64)>,
124 pub public_values: PublicValues<u32, u64, u64, u32>,
126 pub next_nonce: u64,
128 pub shape: Option<Shape<RiscvAirId>>,
130 pub estimated_trace_area: u64,
132 pub initial_timestamp: u64,
134 pub last_timestamp: u64,
136 pub pc_start: Option<u64>,
138 pub next_pc: u64,
140 pub exit_code: u32,
142 pub global_dependencies_opt: bool,
144}
145
146impl ExecutionRecord {
147 #[must_use]
149 pub fn new(
150 program: Arc<Program>,
151 proof_nonce: [u32; PROOF_NONCE_NUM_WORDS],
152 global_dependencies_opt: bool,
153 ) -> Self {
154 let mut result = Self { program, ..Default::default() };
155 result.public_values.proof_nonce = proof_nonce;
156 result.global_dependencies_opt = global_dependencies_opt;
157 result
158 }
159
160 #[must_use]
162 pub fn new_preallocated(
163 program: Arc<Program>,
164 proof_nonce: [u32; PROOF_NONCE_NUM_WORDS],
165 global_dependencies_opt: bool,
166 reservation_size: usize,
167 ) -> Self {
168 let mut result = Self { program, ..Default::default() };
169
170 result.add_events.reserve(reservation_size);
171 result.addi_events.reserve(reservation_size);
172 result.addw_events.reserve(reservation_size);
173 result.mul_events.reserve(reservation_size);
174 result.sub_events.reserve(reservation_size);
175 result.subw_events.reserve(reservation_size);
176 result.bitwise_events.reserve(reservation_size);
177 result.shift_left_events.reserve(reservation_size);
178 result.shift_right_events.reserve(reservation_size);
179 result.divrem_events.reserve(reservation_size);
180 result.lt_events.reserve(reservation_size);
181 result.branch_events.reserve(reservation_size);
182 result.jal_events.reserve(reservation_size);
183 result.jalr_events.reserve(reservation_size);
184 result.utype_events.reserve(reservation_size);
185 result.memory_load_x0_events.reserve(reservation_size);
186 result.memory_load_byte_events.reserve(reservation_size);
187 result.memory_load_half_events.reserve(reservation_size);
188 result.memory_load_word_events.reserve(reservation_size);
189 result.memory_load_double_events.reserve(reservation_size);
190 result.memory_store_byte_events.reserve(reservation_size);
191 result.memory_store_half_events.reserve(reservation_size);
192 result.memory_store_word_events.reserve(reservation_size);
193 result.memory_store_double_events.reserve(reservation_size);
194 result.global_memory_initialize_events.reserve(reservation_size);
195 result.global_memory_finalize_events.reserve(reservation_size);
196 result.global_interaction_events.reserve(reservation_size);
197 result.byte_lookups.reserve(reservation_size);
198
199 result.public_values.proof_nonce = proof_nonce;
200 result.global_dependencies_opt = global_dependencies_opt;
201 result
202 }
203
204 #[must_use]
209 pub fn defer<'a>(
210 &mut self,
211 retain_presets: impl IntoIterator<Item = &'a RetainedEventsPreset>,
212 ) -> ExecutionRecord {
213 let mut execution_record = ExecutionRecord::new(
214 self.program.clone(),
215 self.public_values.proof_nonce,
216 self.global_dependencies_opt,
217 );
218 execution_record.precompile_events = std::mem::take(&mut self.precompile_events);
219
220 self.precompile_events.events.extend(
222 retain_presets.into_iter().flat_map(RetainedEventsPreset::syscall_codes).filter_map(
223 |code| execution_record.precompile_events.events.remove(code).map(|x| (*code, x)),
224 ),
225 );
226
227 execution_record.global_memory_initialize_events =
228 std::mem::take(&mut self.global_memory_initialize_events);
229 execution_record.global_memory_finalize_events =
230 std::mem::take(&mut self.global_memory_finalize_events);
231 execution_record.global_page_prot_initialize_events =
232 std::mem::take(&mut self.global_page_prot_initialize_events);
233 execution_record.global_page_prot_finalize_events =
234 std::mem::take(&mut self.global_page_prot_finalize_events);
235 execution_record
236 }
237
238 #[allow(clippy::too_many_lines)]
241 pub fn split(
242 &mut self,
243 done: bool,
244 last_record: &mut ExecutionRecord,
245 can_pack_global_memory: bool,
246 opts: &SplitOpts,
247 ) -> Vec<ExecutionRecord> {
248 let mut shards = Vec::new();
249
250 let precompile_events = take(&mut self.precompile_events);
251
252 for (syscall_code, events) in precompile_events.into_iter() {
253 let threshold: usize = opts.syscall_threshold[syscall_code];
254
255 let chunks = events.chunks_exact(threshold);
256 if done {
257 let remainder = chunks.remainder().to_vec();
258 if !remainder.is_empty() {
259 let mut execution_record = ExecutionRecord::new(
260 self.program.clone(),
261 self.public_values.proof_nonce,
262 self.global_dependencies_opt,
263 );
264 execution_record.precompile_events.insert(syscall_code, remainder);
265 execution_record.public_values.update_initialized_state(
266 self.program.pc_start_abs,
267 self.program.enable_untrusted_programs,
268 );
269 shards.push(execution_record);
270 }
271 } else {
272 self.precompile_events.insert(syscall_code, chunks.remainder().to_vec());
273 }
274 let mut event_shards = chunks
275 .map(|chunk| {
276 let mut execution_record = ExecutionRecord::new(
277 self.program.clone(),
278 self.public_values.proof_nonce,
279 self.global_dependencies_opt,
280 );
281 execution_record.precompile_events.insert(syscall_code, chunk.to_vec());
282 execution_record.public_values.update_initialized_state(
283 self.program.pc_start_abs,
284 self.program.enable_untrusted_programs,
285 );
286 execution_record
287 })
288 .collect::<Vec<_>>();
289 shards.append(&mut event_shards);
290 }
291
292 if done {
293 let pack_memory_events_into_last_record = can_pack_global_memory && shards.is_empty();
296 let mut blank_record = ExecutionRecord::new(
297 self.program.clone(),
298 self.public_values.proof_nonce,
299 self.global_dependencies_opt,
300 );
301
302 let last_record_public_values = last_record.public_values;
304
305 blank_record
307 .public_values
308 .update_finalized_state_from_public_values(&last_record_public_values);
309
310 let mem_record_ref =
312 if pack_memory_events_into_last_record { last_record } else { &mut blank_record };
313
314 let mut init_page_idx = 0;
315 let mut finalize_page_idx = 0;
316
317 if !self.global_page_prot_initialize_events.is_empty()
319 || !self.global_page_prot_finalize_events.is_empty()
320 {
321 self.global_page_prot_initialize_events.sort_by_key(|event| event.page_idx);
322 self.global_page_prot_finalize_events.sort_by_key(|event| event.page_idx);
323
324 let init_iter = self.global_page_prot_initialize_events.iter();
325 let finalize_iter = self.global_page_prot_finalize_events.iter();
326 let mut init_remaining = init_iter.as_slice();
327 let mut finalize_remaining = finalize_iter.as_slice();
328
329 while !init_remaining.is_empty() || !finalize_remaining.is_empty() {
330 let capacity = 2 * opts.page_prot;
331 let init_to_take = init_remaining.len().min(capacity);
332 let finalize_to_take = finalize_remaining.len().min(capacity - init_to_take);
333
334 let finalize_to_take = if init_to_take < capacity {
335 finalize_to_take.max(finalize_remaining.len().min(capacity - init_to_take))
336 } else {
337 0
338 };
339
340 let page_prot_init_chunk = &init_remaining[..init_to_take];
341 let page_prot_finalize_chunk = &finalize_remaining[..finalize_to_take];
342
343 mem_record_ref
344 .global_page_prot_initialize_events
345 .extend_from_slice(page_prot_init_chunk);
346 mem_record_ref.public_values.previous_init_page_idx = init_page_idx;
347 if let Some(last_event) = page_prot_init_chunk.last() {
348 init_page_idx = last_event.page_idx;
349 }
350 mem_record_ref.public_values.last_init_page_idx = init_page_idx;
351
352 mem_record_ref
353 .global_page_prot_finalize_events
354 .extend_from_slice(page_prot_finalize_chunk);
355 mem_record_ref.public_values.previous_finalize_page_idx = finalize_page_idx;
356 if let Some(last_event) = page_prot_finalize_chunk.last() {
357 finalize_page_idx = last_event.page_idx;
358 }
359 mem_record_ref.public_values.last_finalize_page_idx = finalize_page_idx;
360
361 mem_record_ref.public_values.is_untrusted_programs_enabled = true as u32;
363
364 init_remaining = &init_remaining[init_to_take..];
365 finalize_remaining = &finalize_remaining[finalize_to_take..];
366
367 mem_record_ref.public_values.proof_nonce = self.public_values.proof_nonce;
369 mem_record_ref.global_dependencies_opt = self.global_dependencies_opt;
370
371 if !pack_memory_events_into_last_record {
372 shards.push(take(mem_record_ref));
376
377 mem_record_ref.program = self.program.clone();
381 mem_record_ref
383 .public_values
384 .update_finalized_state_from_public_values(&last_record_public_values);
385 }
386 }
387 }
388
389 self.global_memory_initialize_events.sort_by_key(|event| event.addr);
390 self.global_memory_finalize_events.sort_by_key(|event| event.addr);
391
392 let mut init_addr = 0;
393 let mut finalize_addr = 0;
394
395 let mut mem_init_remaining = self.global_memory_initialize_events.as_slice();
396 let mut mem_finalize_remaining = self.global_memory_finalize_events.as_slice();
397
398 while !mem_init_remaining.is_empty() || !mem_finalize_remaining.is_empty() {
399 let capacity = 2 * opts.memory;
400 let init_to_take = mem_init_remaining.len().min(capacity);
401 let finalize_to_take = mem_finalize_remaining.len().min(capacity - init_to_take);
402
403 let finalize_to_take = if init_to_take < capacity {
404 finalize_to_take.max(mem_finalize_remaining.len().min(capacity - init_to_take))
405 } else {
406 0
407 };
408
409 let mem_init_chunk = &mem_init_remaining[..init_to_take];
410 let mem_finalize_chunk = &mem_finalize_remaining[..finalize_to_take];
411
412 mem_record_ref.global_memory_initialize_events.extend_from_slice(mem_init_chunk);
413 mem_record_ref.public_values.previous_init_addr = init_addr;
414 if let Some(last_event) = mem_init_chunk.last() {
415 init_addr = last_event.addr;
416 }
417 mem_record_ref.public_values.last_init_addr = init_addr;
418
419 mem_record_ref.global_memory_finalize_events.extend_from_slice(mem_finalize_chunk);
420 mem_record_ref.public_values.previous_finalize_addr = finalize_addr;
421 if let Some(last_event) = mem_finalize_chunk.last() {
422 finalize_addr = last_event.addr;
423 }
424 mem_record_ref.public_values.last_finalize_addr = finalize_addr;
425
426 mem_record_ref.public_values.proof_nonce = self.public_values.proof_nonce;
427 mem_record_ref.global_dependencies_opt = self.global_dependencies_opt;
428
429 mem_init_remaining = &mem_init_remaining[init_to_take..];
430 mem_finalize_remaining = &mem_finalize_remaining[finalize_to_take..];
431
432 if !pack_memory_events_into_last_record {
433 mem_record_ref.public_values.previous_init_page_idx = init_page_idx;
434 mem_record_ref.public_values.last_init_page_idx = init_page_idx;
435 mem_record_ref.public_values.previous_finalize_page_idx = finalize_page_idx;
436 mem_record_ref.public_values.last_finalize_page_idx = finalize_page_idx;
437
438 shards.push(take(mem_record_ref));
441
442 mem_record_ref.program = self.program.clone();
445 mem_record_ref
447 .public_values
448 .update_finalized_state_from_public_values(&last_record_public_values);
449 }
450 }
451 }
452
453 shards
454 }
455
456 pub fn fixed_log2_rows<F: PrimeField, A: MachineAir<F>>(&self, _air: &A) -> Option<usize> {
461 None
462 }
463
464 #[must_use]
466 pub fn contains_cpu(&self) -> bool {
467 self.cpu_event_count > 0
468 }
469
470 #[inline]
471 pub fn add_precompile_event(
473 &mut self,
474 syscall_code: SyscallCode,
475 syscall_event: SyscallEvent,
476 event: PrecompileEvent,
477 ) {
478 self.precompile_events.add_event(syscall_code, syscall_event, event);
479 }
480
481 #[inline]
483 #[must_use]
484 pub fn get_precompile_events(
485 &self,
486 syscall_code: SyscallCode,
487 ) -> &Vec<(SyscallEvent, PrecompileEvent)> {
488 self.precompile_events.get_events(syscall_code).expect("Precompile events not found")
489 }
490
491 #[inline]
493 pub fn get_local_mem_events(&self) -> impl Iterator<Item = &MemoryLocalEvent> {
494 let precompile_local_mem_events = self.precompile_events.get_local_mem_events();
495 precompile_local_mem_events.chain(self.cpu_local_memory_access.iter())
496 }
497
498 #[inline]
500 pub fn get_local_page_prot_events(&self) -> impl Iterator<Item = &PageProtLocalEvent> {
501 let precompile_local_page_prot_events = self.precompile_events.get_local_page_prot_events();
502 precompile_local_page_prot_events.chain(self.cpu_local_page_prot_access.iter())
503 }
504
505 #[inline]
507 pub fn reset(&mut self) {
508 self.add_events.truncate(0);
509 self.addw_events.truncate(0);
510 self.addi_events.truncate(0);
511 self.mul_events.truncate(0);
512 self.sub_events.truncate(0);
513 self.subw_events.truncate(0);
514 self.bitwise_events.truncate(0);
515 self.shift_left_events.truncate(0);
516 self.shift_right_events.truncate(0);
517 self.divrem_events.truncate(0);
518 self.lt_events.truncate(0);
519 self.memory_load_byte_events.truncate(0);
520 self.memory_load_half_events.truncate(0);
521 self.memory_load_word_events.truncate(0);
522 self.memory_load_x0_events.truncate(0);
523 self.memory_load_double_events.truncate(0);
524 self.memory_store_byte_events.truncate(0);
525 self.memory_store_half_events.truncate(0);
526 self.memory_store_word_events.truncate(0);
527 self.memory_store_double_events.truncate(0);
528 self.utype_events.truncate(0);
529 self.branch_events.truncate(0);
530 self.jal_events.truncate(0);
531 self.jalr_events.truncate(0);
532 self.byte_lookups.clear();
533 self.precompile_events = PrecompileEvents::default();
534 self.global_memory_initialize_events.truncate(0);
535 self.global_memory_finalize_events.truncate(0);
536 self.global_page_prot_initialize_events.truncate(0);
537 self.global_page_prot_finalize_events.truncate(0);
538 self.cpu_local_memory_access.truncate(0);
539 self.cpu_local_page_prot_access.truncate(0);
540 self.syscall_events.truncate(0);
541 self.global_interaction_events.truncate(0);
542 self.instruction_fetch_events.truncate(0);
543 self.instruction_decode_events.truncate(0);
544 let mut cumulative_sum = self.global_cumulative_sum.lock().unwrap();
545 *cumulative_sum = SepticDigest::default();
546 self.global_interaction_event_count = 0;
547 self.bump_memory_events.truncate(0);
548 self.bump_state_events.truncate(0);
549 let _ = self.public_values.reset();
550 self.next_nonce = 0;
551 self.shape = None;
552 self.estimated_trace_area = 0;
553 self.initial_timestamp = 0;
554 self.last_timestamp = 0;
555 self.pc_start = None;
556 self.next_pc = 0;
557 self.exit_code = 0;
558 }
559}
560
561#[derive(Debug, Copy, Clone, Default, Serialize, Deserialize, DeepSizeOf)]
563pub struct MemoryAccessRecord {
564 pub a: Option<MemoryRecordEnum>,
566 pub b: Option<MemoryRecordEnum>,
568 pub c: Option<MemoryRecordEnum>,
570 pub memory: Option<MemoryRecordEnum>,
572 pub untrusted_instruction: Option<(MemoryRecordEnum, u32)>,
576}
577
578#[derive(Debug, Clone, Copy, Serialize, Deserialize, PartialEq, Eq, DeepSizeOf)]
580pub struct RTypeRecord {
581 pub op_a: u8,
583 pub a: MemoryRecordEnum,
585 pub op_b: u64,
587 pub b: MemoryRecordEnum,
589 pub op_c: u64,
591 pub c: MemoryRecordEnum,
593 pub is_untrusted: bool,
595}
596
597impl RTypeRecord {
598 pub(crate) fn new(value: &MemoryAccessRecord, instruction: &Instruction) -> Self {
599 Self {
600 op_a: instruction.op_a,
601 a: value.a.expect("expected MemoryRecord for op_a in RTypeRecord"),
602 op_b: instruction.op_b,
603 b: value.b.expect("expected MemoryRecord for op_b in RTypeRecord"),
604 op_c: instruction.op_c,
605 c: value.c.expect("expected MemoryRecord for op_c in RTypeRecord"),
606 is_untrusted: value.untrusted_instruction.is_some(),
607 }
608 }
609}
610#[derive(Debug, Clone, Copy, Serialize, Deserialize, DeepSizeOf)]
612pub struct ITypeRecord {
613 pub op_a: u8,
615 pub a: MemoryRecordEnum,
617 pub op_b: u64,
619 pub b: MemoryRecordEnum,
621 pub op_c: u64,
623 pub is_untrusted: bool,
625}
626
627impl ITypeRecord {
628 pub(crate) fn new(value: &MemoryAccessRecord, instruction: &Instruction) -> Self {
629 debug_assert!(value.c.is_none());
630 Self {
631 op_a: instruction.op_a,
632 a: value.a.expect("expected MemoryRecord for op_a in ITypeRecord"),
633 op_b: instruction.op_b,
634 b: value.b.expect("expected MemoryRecord for op_b in ITypeRecord"),
635 op_c: instruction.op_c,
636 is_untrusted: value.untrusted_instruction.is_some(),
637 }
638 }
639}
640
641#[derive(Debug, Clone, Copy, Serialize, Deserialize, DeepSizeOf)]
643pub struct JTypeRecord {
644 pub op_a: u8,
646 pub a: MemoryRecordEnum,
648 pub op_b: u64,
650 pub op_c: u64,
652 pub is_untrusted: bool,
654}
655
656impl JTypeRecord {
657 pub(crate) fn new(value: &MemoryAccessRecord, instruction: &Instruction) -> Self {
658 debug_assert!(value.b.is_none());
659 debug_assert!(value.c.is_none());
660 Self {
661 op_a: instruction.op_a,
662 a: value.a.expect("expected MemoryRecord for op_a in JTypeRecord"),
663 op_b: instruction.op_b,
664 op_c: instruction.op_c,
665 is_untrusted: value.untrusted_instruction.is_some(),
666 }
667 }
668}
669
670#[derive(Debug, Clone, Copy, Serialize, Deserialize, DeepSizeOf)]
672pub struct ALUTypeRecord {
673 pub op_a: u8,
675 pub a: MemoryRecordEnum,
677 pub op_b: u64,
679 pub b: MemoryRecordEnum,
681 pub op_c: u64,
683 pub c: Option<MemoryRecordEnum>,
685 pub is_imm: bool,
687 pub is_untrusted: bool,
689}
690
691impl ALUTypeRecord {
692 pub(crate) fn new(value: &MemoryAccessRecord, instruction: &Instruction) -> Self {
693 Self {
694 op_a: instruction.op_a,
695 a: value.a.expect("expected MemoryRecord for op_a in ALUTypeRecord"),
696 op_b: instruction.op_b,
697 b: value.b.expect("expected MemoryRecord for op_b in ALUTypeRecord"),
698 op_c: instruction.op_c,
699 c: value.c,
700 is_imm: instruction.imm_c,
701 is_untrusted: value.untrusted_instruction.is_some(),
702 }
703 }
704}
705
706#[derive(Debug, Clone, Copy, Serialize, Deserialize)]
708pub struct UntrustedProgramInstructionRecord {
709 pub memory_access_record: MemoryAccessRecord,
711 pub instruction: Instruction,
713 pub encoded_instruction: u32,
715}
716
717impl MachineRecord for ExecutionRecord {
718 fn stats(&self) -> HashMap<String, usize> {
719 let mut stats = HashMap::new();
720 stats.insert("cpu_events".to_string(), self.cpu_event_count as usize);
721 stats.insert("add_events".to_string(), self.add_events.len());
722 stats.insert("mul_events".to_string(), self.mul_events.len());
723 stats.insert("sub_events".to_string(), self.sub_events.len());
724 stats.insert("bitwise_events".to_string(), self.bitwise_events.len());
725 stats.insert("shift_left_events".to_string(), self.shift_left_events.len());
726 stats.insert("shift_right_events".to_string(), self.shift_right_events.len());
727 stats.insert("divrem_events".to_string(), self.divrem_events.len());
728 stats.insert("lt_events".to_string(), self.lt_events.len());
729 stats.insert("load_byte_events".to_string(), self.memory_load_byte_events.len());
730 stats.insert("load_half_events".to_string(), self.memory_load_half_events.len());
731 stats.insert("load_word_events".to_string(), self.memory_load_word_events.len());
732 stats.insert("load_x0_events".to_string(), self.memory_load_x0_events.len());
733 stats.insert("store_byte_events".to_string(), self.memory_store_byte_events.len());
734 stats.insert("store_half_events".to_string(), self.memory_store_half_events.len());
735 stats.insert("store_word_events".to_string(), self.memory_store_word_events.len());
736 stats.insert("branch_events".to_string(), self.branch_events.len());
737 stats.insert("jal_events".to_string(), self.jal_events.len());
738 stats.insert("jalr_events".to_string(), self.jalr_events.len());
739 stats.insert("utype_events".to_string(), self.utype_events.len());
740 stats.insert("instruction_decode_events".to_string(), self.instruction_decode_events.len());
741 stats.insert("instruction_fetch_events".to_string(), self.instruction_fetch_events.len());
742
743 for (syscall_code, events) in self.precompile_events.iter() {
744 stats.insert(format!("syscall {syscall_code:?}"), events.len());
745 }
746
747 stats.insert(
748 "global_memory_initialize_events".to_string(),
749 self.global_memory_initialize_events.len(),
750 );
751 stats.insert(
752 "global_memory_finalize_events".to_string(),
753 self.global_memory_finalize_events.len(),
754 );
755 stats.insert("local_memory_access_events".to_string(), self.cpu_local_memory_access.len());
756 stats.insert(
757 "local_page_prot_access_events".to_string(),
758 self.cpu_local_page_prot_access.len(),
759 );
760 if self.contains_cpu() {
761 stats.insert("byte_lookups".to_string(), self.byte_lookups.len());
762 }
763 stats.retain(|_, v| *v != 0);
765 stats
766 }
767
768 fn append(&mut self, other: &mut ExecutionRecord) {
769 self.cpu_event_count += other.cpu_event_count;
770 other.cpu_event_count = 0;
771 self.public_values.global_count += other.public_values.global_count;
772 other.public_values.global_count = 0;
773 self.public_values.global_init_count += other.public_values.global_init_count;
774 other.public_values.global_init_count = 0;
775 self.public_values.global_finalize_count += other.public_values.global_finalize_count;
776 other.public_values.global_finalize_count = 0;
777 self.public_values.global_page_prot_init_count +=
778 other.public_values.global_page_prot_init_count;
779 other.public_values.global_page_prot_init_count = 0;
780 self.public_values.global_page_prot_finalize_count +=
781 other.public_values.global_page_prot_finalize_count;
782 other.public_values.global_page_prot_finalize_count = 0;
783 self.estimated_trace_area += other.estimated_trace_area;
784 other.estimated_trace_area = 0;
785 self.add_events.append(&mut other.add_events);
786 self.sub_events.append(&mut other.sub_events);
787 self.mul_events.append(&mut other.mul_events);
788 self.bitwise_events.append(&mut other.bitwise_events);
789 self.shift_left_events.append(&mut other.shift_left_events);
790 self.shift_right_events.append(&mut other.shift_right_events);
791 self.divrem_events.append(&mut other.divrem_events);
792 self.lt_events.append(&mut other.lt_events);
793 self.memory_load_byte_events.append(&mut other.memory_load_byte_events);
794 self.memory_load_half_events.append(&mut other.memory_load_half_events);
795 self.memory_load_word_events.append(&mut other.memory_load_word_events);
796 self.memory_load_x0_events.append(&mut other.memory_load_x0_events);
797 self.memory_store_byte_events.append(&mut other.memory_store_byte_events);
798 self.memory_store_half_events.append(&mut other.memory_store_half_events);
799 self.memory_store_word_events.append(&mut other.memory_store_word_events);
800 self.branch_events.append(&mut other.branch_events);
801 self.jal_events.append(&mut other.jal_events);
802 self.jalr_events.append(&mut other.jalr_events);
803 self.utype_events.append(&mut other.utype_events);
804 self.syscall_events.append(&mut other.syscall_events);
805 self.bump_memory_events.append(&mut other.bump_memory_events);
806 self.bump_state_events.append(&mut other.bump_state_events);
807 self.precompile_events.append(&mut other.precompile_events);
808 self.instruction_fetch_events.append(&mut other.instruction_fetch_events);
809 self.instruction_decode_events.append(&mut other.instruction_decode_events);
810
811 if self.byte_lookups.is_empty() {
812 self.byte_lookups = std::mem::take(&mut other.byte_lookups);
813 } else {
814 self.add_byte_lookup_events_from_maps(vec![&other.byte_lookups]);
815 }
816
817 self.global_memory_initialize_events.append(&mut other.global_memory_initialize_events);
818 self.global_memory_finalize_events.append(&mut other.global_memory_finalize_events);
819 self.global_page_prot_initialize_events
820 .append(&mut other.global_page_prot_initialize_events);
821 self.global_page_prot_finalize_events.append(&mut other.global_page_prot_finalize_events);
822 self.cpu_local_memory_access.append(&mut other.cpu_local_memory_access);
823 self.cpu_local_page_prot_access.append(&mut other.cpu_local_page_prot_access);
824 self.global_interaction_events.append(&mut other.global_interaction_events);
825 }
826
827 fn public_values<F: AbstractField>(&self) -> Vec<F> {
829 let mut public_values = self.public_values;
830 public_values.global_cumulative_sum = *self.global_cumulative_sum.lock().unwrap();
831 public_values.to_vec()
832 }
833
834 #[allow(clippy::type_complexity)]
836 fn eval_public_values<AB: SP1AirBuilder>(builder: &mut AB) {
837 let public_values_slice: [AB::PublicVar; SP1_PROOF_NUM_PV_ELTS] =
838 core::array::from_fn(|i| builder.public_values()[i]);
839 let public_values: &PublicValues<
840 [AB::PublicVar; 4],
841 [AB::PublicVar; 3],
842 [AB::PublicVar; 4],
843 AB::PublicVar,
844 > = public_values_slice.as_slice().borrow();
845
846 for var in public_values.empty {
847 builder.assert_zero(var);
848 }
849
850 Self::eval_state(public_values, builder);
851 Self::eval_first_execution_shard(public_values, builder);
852 Self::eval_exit_code(public_values, builder);
853 Self::eval_committed_value_digest(public_values, builder);
854 Self::eval_deferred_proofs_digest(public_values, builder);
855 Self::eval_global_sum(public_values, builder);
856 Self::eval_global_memory_init(public_values, builder);
857 Self::eval_global_memory_finalize(public_values, builder);
858 Self::eval_global_page_prot_init(public_values, builder);
859 Self::eval_global_page_prot_finalize(public_values, builder);
860 }
861
862 fn interactions_in_public_values() -> Vec<InteractionKind> {
863 InteractionKind::all_kinds()
864 .iter()
865 .filter(|kind| kind.appears_in_eval_public_values())
866 .copied()
867 .collect()
868 }
869}
870
871impl ByteRecord for ExecutionRecord {
872 fn add_byte_lookup_event(&mut self, blu_event: ByteLookupEvent) {
873 *self.byte_lookups.entry(blu_event).or_insert(0) += 1;
874 }
875
876 #[inline]
877 fn add_byte_lookup_events_from_maps(
878 &mut self,
879 new_events: Vec<&HashMap<ByteLookupEvent, usize>>,
880 ) {
881 for new_blu_map in new_events {
882 for (blu_event, count) in new_blu_map.iter() {
883 *self.byte_lookups.entry(*blu_event).or_insert(0) += count;
884 }
885 }
886 }
887}
888
889impl ExecutionRecord {
890 #[allow(clippy::type_complexity)]
891 fn eval_state<AB: SP1AirBuilder>(
892 public_values: &PublicValues<
893 [AB::PublicVar; 4],
894 [AB::PublicVar; 3],
895 [AB::PublicVar; 4],
896 AB::PublicVar,
897 >,
898 builder: &mut AB,
899 ) {
900 let initial_timestamp_high = public_values.initial_timestamp[1].into()
901 + public_values.initial_timestamp[0].into() * AB::Expr::from_canonical_u32(1 << 8);
902 let initial_timestamp_low = public_values.initial_timestamp[3].into()
903 + public_values.initial_timestamp[2].into() * AB::Expr::from_canonical_u32(1 << 16);
904 let last_timestamp_high = public_values.last_timestamp[1].into()
905 + public_values.last_timestamp[0].into() * AB::Expr::from_canonical_u32(1 << 8);
906 let last_timestamp_low = public_values.last_timestamp[3].into()
907 + public_values.last_timestamp[2].into() * AB::Expr::from_canonical_u32(1 << 16);
908
909 builder.send_byte(
911 AB::Expr::from_canonical_u32(ByteOpcode::Range as u32),
912 public_values.initial_timestamp[0].into(),
913 AB::Expr::from_canonical_u32(16),
914 AB::Expr::zero(),
915 AB::Expr::one(),
916 );
917 builder.send_byte(
918 AB::Expr::from_canonical_u32(ByteOpcode::Range as u32),
919 (public_values.initial_timestamp[3].into() - AB::Expr::one())
920 * AB::F::from_canonical_u8(8).inverse(),
921 AB::Expr::from_canonical_u32(13),
922 AB::Expr::zero(),
923 AB::Expr::one(),
924 );
925 builder.send_byte(
926 AB::Expr::from_canonical_u32(ByteOpcode::Range as u32),
927 public_values.last_timestamp[0].into(),
928 AB::Expr::from_canonical_u32(16),
929 AB::Expr::zero(),
930 AB::Expr::one(),
931 );
932 builder.send_byte(
933 AB::Expr::from_canonical_u32(ByteOpcode::Range as u32),
934 (public_values.last_timestamp[3].into() - AB::Expr::one())
935 * AB::F::from_canonical_u8(8).inverse(),
936 AB::Expr::from_canonical_u32(13),
937 AB::Expr::zero(),
938 AB::Expr::one(),
939 );
940 builder.send_byte(
941 AB::Expr::from_canonical_u8(ByteOpcode::U8Range as u8),
942 AB::Expr::zero(),
943 public_values.initial_timestamp[1],
944 public_values.initial_timestamp[2],
945 AB::Expr::one(),
946 );
947 builder.send_byte(
948 AB::Expr::from_canonical_u8(ByteOpcode::U8Range as u8),
949 AB::Expr::zero(),
950 public_values.last_timestamp[1],
951 public_values.last_timestamp[2],
952 AB::Expr::one(),
953 );
954
955 for i in 0..3 {
957 builder.send_byte(
958 AB::Expr::from_canonical_u32(ByteOpcode::Range as u32),
959 public_values.pc_start[i].into(),
960 AB::Expr::from_canonical_u32(16),
961 AB::Expr::zero(),
962 AB::Expr::one(),
963 );
964 builder.send_byte(
965 AB::Expr::from_canonical_u32(ByteOpcode::Range as u32),
966 public_values.next_pc[i].into(),
967 AB::Expr::from_canonical_u32(16),
968 AB::Expr::zero(),
969 AB::Expr::one(),
970 );
971 }
972
973 builder.send_state(
975 initial_timestamp_high.clone(),
976 initial_timestamp_low.clone(),
977 public_values.pc_start,
978 AB::Expr::one(),
979 );
980 builder.receive_state(
981 last_timestamp_high.clone(),
982 last_timestamp_low.clone(),
983 public_values.next_pc,
984 AB::Expr::one(),
985 );
986
987 let is_execution_shard = public_values.is_execution_shard.into();
989 builder.assert_bool(is_execution_shard.clone());
990 builder
991 .when_not(is_execution_shard.clone())
992 .assert_eq(initial_timestamp_low.clone(), last_timestamp_low.clone());
993 builder
994 .when_not(is_execution_shard.clone())
995 .assert_eq(initial_timestamp_high.clone(), last_timestamp_high.clone());
996 builder
997 .when_not(is_execution_shard.clone())
998 .assert_all_eq(public_values.pc_start, public_values.next_pc);
999
1000 builder.assert_bool(public_values.is_timestamp_high_eq);
1002 builder.assert_eq(
1004 (last_timestamp_high.clone() - initial_timestamp_high.clone())
1005 * public_values.inv_timestamp_high.into(),
1006 AB::Expr::one() - public_values.is_timestamp_high_eq.into(),
1007 );
1008 builder.assert_zero(
1010 (last_timestamp_high.clone() - initial_timestamp_high.clone())
1011 * public_values.is_timestamp_high_eq.into(),
1012 );
1013
1014 builder.assert_bool(public_values.is_timestamp_low_eq);
1016 builder.assert_eq(
1018 (last_timestamp_low.clone() - initial_timestamp_low.clone())
1019 * public_values.inv_timestamp_low.into(),
1020 AB::Expr::one() - public_values.is_timestamp_low_eq.into(),
1021 );
1022 builder.assert_zero(
1024 (last_timestamp_low.clone() - initial_timestamp_low.clone())
1025 * public_values.is_timestamp_low_eq.into(),
1026 );
1027
1028 builder.assert_eq(
1030 AB::Expr::one() - is_execution_shard.clone(),
1031 public_values.is_timestamp_high_eq.into() * public_values.is_timestamp_low_eq.into(),
1032 );
1033
1034 builder.when(is_execution_shard.clone()).assert_eq(
1038 (last_timestamp_high + last_timestamp_low - AB::Expr::one())
1039 * public_values.last_timestamp_inv.into(),
1040 AB::Expr::one(),
1041 );
1042 }
1043
1044 #[allow(clippy::type_complexity)]
1045 fn eval_first_execution_shard<AB: SP1AirBuilder>(
1046 public_values: &PublicValues<
1047 [AB::PublicVar; 4],
1048 [AB::PublicVar; 3],
1049 [AB::PublicVar; 4],
1050 AB::PublicVar,
1051 >,
1052 builder: &mut AB,
1053 ) {
1054 builder.assert_bool(public_values.is_first_execution_shard.into());
1056
1057 builder.when(public_values.is_first_execution_shard.into()).assert_all_eq(
1065 public_values.initial_timestamp,
1066 [AB::Expr::zero(), AB::Expr::zero(), AB::Expr::zero(), AB::Expr::one()],
1067 );
1068
1069 builder
1071 .when(public_values.is_first_execution_shard.into())
1072 .assert_one(public_values.is_execution_shard);
1073
1074 for i in 0..PV_DIGEST_NUM_WORDS {
1078 builder
1079 .when(public_values.is_first_execution_shard.into())
1080 .assert_all_zero(public_values.prev_committed_value_digest[i]);
1081 }
1082
1083 builder
1085 .when(public_values.is_first_execution_shard.into())
1086 .assert_all_zero(public_values.prev_deferred_proofs_digest);
1087
1088 builder
1090 .when(public_values.is_first_execution_shard.into())
1091 .assert_zero(public_values.prev_exit_code);
1092
1093 builder
1095 .when(public_values.is_first_execution_shard.into())
1096 .assert_all_zero(public_values.previous_init_addr);
1097
1098 builder
1100 .when(public_values.is_first_execution_shard.into())
1101 .assert_all_zero(public_values.previous_finalize_addr);
1102
1103 builder
1105 .when(public_values.is_first_execution_shard.into())
1106 .assert_all_zero(public_values.previous_init_page_idx);
1107
1108 builder
1110 .when(public_values.is_first_execution_shard.into())
1111 .assert_all_zero(public_values.previous_finalize_page_idx);
1112
1113 builder
1115 .when(public_values.is_first_execution_shard.into())
1116 .assert_zero(public_values.prev_commit_syscall);
1117
1118 builder
1120 .when(public_values.is_first_execution_shard.into())
1121 .assert_zero(public_values.prev_commit_deferred_syscall);
1122 }
1123
1124 #[allow(clippy::type_complexity)]
1125 fn eval_exit_code<AB: SP1AirBuilder>(
1126 public_values: &PublicValues<
1127 [AB::PublicVar; 4],
1128 [AB::PublicVar; 3],
1129 [AB::PublicVar; 4],
1130 AB::PublicVar,
1131 >,
1132 builder: &mut AB,
1133 ) {
1134 let is_execution_shard = public_values.is_execution_shard.into();
1135
1136 builder.assert_zero(
1138 public_values.prev_exit_code.into()
1139 * (public_values.exit_code.into() - public_values.prev_exit_code.into()),
1140 );
1141
1142 builder
1144 .when_not(is_execution_shard.clone())
1145 .assert_eq(public_values.prev_exit_code, public_values.exit_code);
1146 }
1147
1148 #[allow(clippy::type_complexity)]
1149 fn eval_committed_value_digest<AB: SP1AirBuilder>(
1150 public_values: &PublicValues<
1151 [AB::PublicVar; 4],
1152 [AB::PublicVar; 3],
1153 [AB::PublicVar; 4],
1154 AB::PublicVar,
1155 >,
1156 builder: &mut AB,
1157 ) {
1158 let is_execution_shard = public_values.is_execution_shard.into();
1159
1160 for i in 0..PV_DIGEST_NUM_WORDS {
1162 builder.send_byte(
1163 AB::Expr::from_canonical_u8(ByteOpcode::U8Range as u8),
1164 AB::Expr::zero(),
1165 public_values.prev_committed_value_digest[i][0],
1166 public_values.prev_committed_value_digest[i][1],
1167 AB::Expr::one(),
1168 );
1169 builder.send_byte(
1170 AB::Expr::from_canonical_u8(ByteOpcode::U8Range as u8),
1171 AB::Expr::zero(),
1172 public_values.prev_committed_value_digest[i][2],
1173 public_values.prev_committed_value_digest[i][3],
1174 AB::Expr::one(),
1175 );
1176 builder.send_byte(
1177 AB::Expr::from_canonical_u8(ByteOpcode::U8Range as u8),
1178 AB::Expr::zero(),
1179 public_values.committed_value_digest[i][0],
1180 public_values.committed_value_digest[i][1],
1181 AB::Expr::one(),
1182 );
1183 builder.send_byte(
1184 AB::Expr::from_canonical_u8(ByteOpcode::U8Range as u8),
1185 AB::Expr::zero(),
1186 public_values.committed_value_digest[i][2],
1187 public_values.committed_value_digest[i][3],
1188 AB::Expr::one(),
1189 );
1190 }
1191
1192 builder.assert_bool(public_values.prev_commit_syscall);
1194 builder.assert_bool(public_values.commit_syscall);
1195
1196 builder.when(public_values.prev_commit_syscall).assert_one(public_values.commit_syscall);
1198
1199 builder
1201 .when_not(is_execution_shard.clone())
1202 .assert_eq(public_values.prev_commit_syscall, public_values.commit_syscall);
1203
1204 for i in 0..PV_DIGEST_NUM_WORDS {
1206 builder.when_not(is_execution_shard.clone()).assert_all_eq(
1207 public_values.prev_committed_value_digest[i],
1208 public_values.committed_value_digest[i],
1209 );
1210 }
1211
1212 for word in public_values.prev_committed_value_digest {
1215 for limb in word {
1216 for i in 0..PV_DIGEST_NUM_WORDS {
1217 builder.when(limb).assert_all_eq(
1218 public_values.prev_committed_value_digest[i],
1219 public_values.committed_value_digest[i],
1220 );
1221 }
1222 }
1223 }
1224
1225 for i in 0..PV_DIGEST_NUM_WORDS {
1227 builder.when(public_values.prev_commit_syscall).assert_all_eq(
1228 public_values.prev_committed_value_digest[i],
1229 public_values.committed_value_digest[i],
1230 );
1231 }
1232 }
1233
1234 #[allow(clippy::type_complexity)]
1235 fn eval_deferred_proofs_digest<AB: SP1AirBuilder>(
1236 public_values: &PublicValues<
1237 [AB::PublicVar; 4],
1238 [AB::PublicVar; 3],
1239 [AB::PublicVar; 4],
1240 AB::PublicVar,
1241 >,
1242 builder: &mut AB,
1243 ) {
1244 let is_execution_shard = public_values.is_execution_shard.into();
1245
1246 builder.assert_bool(public_values.prev_commit_deferred_syscall);
1248 builder.assert_bool(public_values.commit_deferred_syscall);
1249
1250 builder
1252 .when(public_values.prev_commit_deferred_syscall)
1253 .assert_one(public_values.commit_deferred_syscall);
1254
1255 builder.when_not(is_execution_shard.clone()).assert_eq(
1257 public_values.prev_commit_deferred_syscall,
1258 public_values.commit_deferred_syscall,
1259 );
1260
1261 builder.when_not(is_execution_shard.clone()).assert_all_eq(
1263 public_values.prev_deferred_proofs_digest,
1264 public_values.deferred_proofs_digest,
1265 );
1266
1267 for limb in public_values.prev_deferred_proofs_digest {
1270 builder.when(limb).assert_all_eq(
1271 public_values.prev_deferred_proofs_digest,
1272 public_values.deferred_proofs_digest,
1273 );
1274 }
1275
1276 builder.when(public_values.prev_commit_deferred_syscall).assert_all_eq(
1278 public_values.prev_deferred_proofs_digest,
1279 public_values.deferred_proofs_digest,
1280 );
1281 }
1282
1283 #[allow(clippy::type_complexity)]
1284 fn eval_global_sum<AB: SP1AirBuilder>(
1285 public_values: &PublicValues<
1286 [AB::PublicVar; 4],
1287 [AB::PublicVar; 3],
1288 [AB::PublicVar; 4],
1289 AB::PublicVar,
1290 >,
1291 builder: &mut AB,
1292 ) {
1293 let initial_sum = SepticDigest::<AB::F>::zero().0;
1294 builder.send(
1295 AirInteraction::new(
1296 once(AB::Expr::zero())
1297 .chain(initial_sum.x.0.into_iter().map(Into::into))
1298 .chain(initial_sum.y.0.into_iter().map(Into::into))
1299 .collect(),
1300 AB::Expr::one(),
1301 InteractionKind::GlobalAccumulation,
1302 ),
1303 InteractionScope::Local,
1304 );
1305 builder.receive(
1306 AirInteraction::new(
1307 once(public_values.global_count.into())
1308 .chain(public_values.global_cumulative_sum.0.x.0.map(Into::into))
1309 .chain(public_values.global_cumulative_sum.0.y.0.map(Into::into))
1310 .collect(),
1311 AB::Expr::one(),
1312 InteractionKind::GlobalAccumulation,
1313 ),
1314 InteractionScope::Local,
1315 );
1316 }
1317
1318 #[allow(clippy::type_complexity)]
1319 fn eval_global_memory_init<AB: SP1AirBuilder>(
1320 public_values: &PublicValues<
1321 [AB::PublicVar; 4],
1322 [AB::PublicVar; 3],
1323 [AB::PublicVar; 4],
1324 AB::PublicVar,
1325 >,
1326 builder: &mut AB,
1327 ) {
1328 for i in 0..3 {
1330 builder.send_byte(
1331 AB::Expr::from_canonical_u32(ByteOpcode::Range as u32),
1332 public_values.previous_init_addr[i].into(),
1333 AB::Expr::from_canonical_u32(16),
1334 AB::Expr::zero(),
1335 AB::Expr::one(),
1336 );
1337 builder.send_byte(
1338 AB::Expr::from_canonical_u32(ByteOpcode::Range as u32),
1339 public_values.last_init_addr[i].into(),
1340 AB::Expr::from_canonical_u32(16),
1341 AB::Expr::zero(),
1342 AB::Expr::one(),
1343 );
1344 }
1345
1346 builder.send(
1347 AirInteraction::new(
1348 once(AB::Expr::zero())
1349 .chain(public_values.previous_init_addr.into_iter().map(Into::into))
1350 .chain(once(AB::Expr::one()))
1351 .collect(),
1352 AB::Expr::one(),
1353 InteractionKind::MemoryGlobalInitControl,
1354 ),
1355 InteractionScope::Local,
1356 );
1357 builder.receive(
1358 AirInteraction::new(
1359 once(public_values.global_init_count.into())
1360 .chain(public_values.last_init_addr.into_iter().map(Into::into))
1361 .chain(once(AB::Expr::one()))
1362 .collect(),
1363 AB::Expr::one(),
1364 InteractionKind::MemoryGlobalInitControl,
1365 ),
1366 InteractionScope::Local,
1367 );
1368 }
1369
1370 #[allow(clippy::type_complexity)]
1371 fn eval_global_memory_finalize<AB: SP1AirBuilder>(
1372 public_values: &PublicValues<
1373 [AB::PublicVar; 4],
1374 [AB::PublicVar; 3],
1375 [AB::PublicVar; 4],
1376 AB::PublicVar,
1377 >,
1378 builder: &mut AB,
1379 ) {
1380 for i in 0..3 {
1382 builder.send_byte(
1383 AB::Expr::from_canonical_u32(ByteOpcode::Range as u32),
1384 public_values.previous_finalize_addr[i].into(),
1385 AB::Expr::from_canonical_u32(16),
1386 AB::Expr::zero(),
1387 AB::Expr::one(),
1388 );
1389 builder.send_byte(
1390 AB::Expr::from_canonical_u32(ByteOpcode::Range as u32),
1391 public_values.last_finalize_addr[i].into(),
1392 AB::Expr::from_canonical_u32(16),
1393 AB::Expr::zero(),
1394 AB::Expr::one(),
1395 );
1396 }
1397
1398 builder.send(
1399 AirInteraction::new(
1400 once(AB::Expr::zero())
1401 .chain(public_values.previous_finalize_addr.into_iter().map(Into::into))
1402 .chain(once(AB::Expr::one()))
1403 .collect(),
1404 AB::Expr::one(),
1405 InteractionKind::MemoryGlobalFinalizeControl,
1406 ),
1407 InteractionScope::Local,
1408 );
1409 builder.receive(
1410 AirInteraction::new(
1411 once(public_values.global_finalize_count.into())
1412 .chain(public_values.last_finalize_addr.into_iter().map(Into::into))
1413 .chain(once(AB::Expr::one()))
1414 .collect(),
1415 AB::Expr::one(),
1416 InteractionKind::MemoryGlobalFinalizeControl,
1417 ),
1418 InteractionScope::Local,
1419 );
1420 }
1421
1422 #[allow(clippy::type_complexity)]
1423 fn eval_global_page_prot_init<AB: SP1AirBuilder>(
1424 public_values: &PublicValues<
1425 [AB::PublicVar; 4],
1426 [AB::PublicVar; 3],
1427 [AB::PublicVar; 4],
1428 AB::PublicVar,
1429 >,
1430 builder: &mut AB,
1431 ) {
1432 builder.assert_bool(public_values.is_untrusted_programs_enabled.into());
1433 builder.send(
1434 AirInteraction::new(
1435 once(AB::Expr::zero())
1436 .chain(public_values.previous_init_page_idx.into_iter().map(Into::into))
1437 .chain(once(AB::Expr::one()))
1438 .collect(),
1439 public_values.is_untrusted_programs_enabled.into(),
1440 InteractionKind::PageProtGlobalInitControl,
1441 ),
1442 InteractionScope::Local,
1443 );
1444 builder.receive(
1445 AirInteraction::new(
1446 once(public_values.global_page_prot_init_count.into())
1447 .chain(public_values.last_init_page_idx.into_iter().map(Into::into))
1448 .chain(once(AB::Expr::one()))
1449 .collect(),
1450 public_values.is_untrusted_programs_enabled.into(),
1451 InteractionKind::PageProtGlobalInitControl,
1452 ),
1453 InteractionScope::Local,
1454 );
1455 }
1456
1457 #[allow(clippy::type_complexity)]
1458 fn eval_global_page_prot_finalize<AB: SP1AirBuilder>(
1459 public_values: &PublicValues<
1460 [AB::PublicVar; 4],
1461 [AB::PublicVar; 3],
1462 [AB::PublicVar; 4],
1463 AB::PublicVar,
1464 >,
1465 builder: &mut AB,
1466 ) {
1467 builder.assert_bool(public_values.is_untrusted_programs_enabled.into());
1468 builder.send(
1469 AirInteraction::new(
1470 once(AB::Expr::zero())
1471 .chain(public_values.previous_finalize_page_idx.into_iter().map(Into::into))
1472 .chain(once(AB::Expr::one()))
1473 .collect(),
1474 public_values.is_untrusted_programs_enabled.into(),
1475 InteractionKind::PageProtGlobalFinalizeControl,
1476 ),
1477 InteractionScope::Local,
1478 );
1479 builder.receive(
1480 AirInteraction::new(
1481 once(public_values.global_page_prot_finalize_count.into())
1482 .chain(public_values.last_finalize_page_idx.into_iter().map(Into::into))
1483 .chain(once(AB::Expr::one()))
1484 .collect(),
1485 public_values.is_untrusted_programs_enabled.into(),
1486 InteractionKind::PageProtGlobalFinalizeControl,
1487 ),
1488 InteractionScope::Local,
1489 );
1490 }
1491
1492 pub fn finalize_public_values<F: PrimeField32>(&mut self, is_execution_shard: bool) {
1494 let state = &mut self.public_values;
1495 state.is_execution_shard = is_execution_shard as u32;
1496
1497 let initial_timestamp_high = (state.initial_timestamp >> 24) as u32;
1498 let initial_timestamp_low = (state.initial_timestamp & 0xFFFFFF) as u32;
1499 let last_timestamp_high = (state.last_timestamp >> 24) as u32;
1500 let last_timestamp_low = (state.last_timestamp & 0xFFFFFF) as u32;
1501
1502 state.initial_timestamp_inv = if state.initial_timestamp == 1 {
1503 0
1504 } else {
1505 F::from_canonical_u32(initial_timestamp_high + initial_timestamp_low - 1)
1506 .inverse()
1507 .as_canonical_u32()
1508 };
1509
1510 state.last_timestamp_inv =
1511 F::from_canonical_u32(last_timestamp_high + last_timestamp_low - 1)
1512 .inverse()
1513 .as_canonical_u32();
1514
1515 if initial_timestamp_high == last_timestamp_high {
1516 state.is_timestamp_high_eq = 1;
1517 } else {
1518 state.is_timestamp_high_eq = 0;
1519 state.inv_timestamp_high = (F::from_canonical_u32(last_timestamp_high)
1520 - F::from_canonical_u32(initial_timestamp_high))
1521 .inverse()
1522 .as_canonical_u32();
1523 }
1524
1525 if initial_timestamp_low == last_timestamp_low {
1526 state.is_timestamp_low_eq = 1;
1527 } else {
1528 state.is_timestamp_low_eq = 0;
1529 state.inv_timestamp_low = (F::from_canonical_u32(last_timestamp_low)
1530 - F::from_canonical_u32(initial_timestamp_low))
1531 .inverse()
1532 .as_canonical_u32();
1533 }
1534 state.is_first_execution_shard = (state.initial_timestamp == 1) as u32;
1535 }
1536}