Skip to main content

sp1_core_machine/bytes/
trace.rs

1use std::mem::MaybeUninit;
2
3use slop_algebra::PrimeField32;
4use sp1_core_executor::{events::ByteRecord, ByteOpcode, ExecutionRecord, Program};
5use sp1_hypercube::air::{MachineAir, PV_DIGEST_NUM_WORDS};
6
7use super::{
8    columns::{NUM_BYTE_MULT_COLS, NUM_BYTE_PREPROCESSED_COLS},
9    ByteChip,
10};
11
12pub const NUM_ROWS: usize = 1 << 16;
13
14impl<F: PrimeField32> MachineAir<F> for ByteChip<F> {
15    type Record = ExecutionRecord;
16
17    type Program = Program;
18
19    fn name(&self) -> &'static str {
20        "Byte"
21    }
22
23    fn num_rows(&self, _: &Self::Record) -> Option<usize> {
24        Some(NUM_ROWS)
25    }
26
27    fn preprocessed_width(&self) -> usize {
28        NUM_BYTE_PREPROCESSED_COLS
29    }
30
31    fn preprocessed_num_rows(&self, _program: &Self::Program) -> Option<usize> {
32        Some(NUM_ROWS)
33    }
34
35    fn preprocessed_num_rows_with_instrs_len(
36        &self,
37        _program: &Self::Program,
38        _instrs_len: usize,
39    ) -> Option<usize> {
40        Some(NUM_ROWS)
41    }
42
43    fn generate_preprocessed_trace_into(&self, _: &Self::Program, buffer: &mut [MaybeUninit<F>]) {
44        Self::trace(buffer)
45    }
46
47    fn generate_dependencies(&self, input: &ExecutionRecord, output: &mut ExecutionRecord) {
48        let initial_timestamp_1 = ((input.public_values.initial_timestamp >> 24) & 0xFF) as u8;
49        let initial_timestamp_2 = ((input.public_values.initial_timestamp >> 16) & 0xFF) as u8;
50        let last_timestamp_1 = ((input.public_values.last_timestamp >> 24) & 0xFF) as u8;
51        let last_timestamp_2 = ((input.public_values.last_timestamp >> 16) & 0xFF) as u8;
52
53        output.add_u8_range_check(initial_timestamp_1, initial_timestamp_2);
54        output.add_u8_range_check(last_timestamp_1, last_timestamp_2);
55        for i in 0..PV_DIGEST_NUM_WORDS {
56            output.add_u8_range_checks(&u32::to_le_bytes(
57                input.public_values.prev_committed_value_digest[i],
58            ));
59            output.add_u8_range_checks(&u32::to_le_bytes(
60                input.public_values.committed_value_digest[i],
61            ));
62        }
63    }
64
65    fn generate_trace_into(
66        &self,
67        input: &ExecutionRecord,
68        _output: &mut ExecutionRecord,
69        buffer: &mut [MaybeUninit<F>],
70    ) {
71        let buffer_ptr = buffer.as_mut_ptr() as *mut F;
72        let values =
73            unsafe { core::slice::from_raw_parts_mut(buffer_ptr, NUM_BYTE_MULT_COLS * NUM_ROWS) };
74        unsafe {
75            core::ptr::write_bytes(values.as_mut_ptr(), 0, NUM_BYTE_MULT_COLS * NUM_ROWS);
76        }
77
78        for (lookup, mult) in input.byte_lookups.iter() {
79            if lookup.opcode == ByteOpcode::Range {
80                continue;
81            }
82            let row = (((lookup.b as u16) << 8) + lookup.c as u16) as usize;
83            let index = lookup.opcode as usize;
84            values[row * NUM_BYTE_MULT_COLS + index] = F::from_canonical_usize(*mult);
85        }
86    }
87
88    fn included(&self, _shard: &Self::Record) -> bool {
89        true
90    }
91}