1use core::borrow::Borrow;
2use p3_air::{Air, BaseAir, PairBuilder};
3use p3_field::{extension::BinomiallyExtendable, Field, PrimeField32};
4use p3_matrix::{dense::RowMajorMatrix, Matrix};
5use sp1_core_machine::utils::next_power_of_two;
6use sp1_derive::AlignedBorrow;
7use sp1_stark::air::{ExtensionAirBuilder, MachineAir};
8use std::iter::zip;
9
10#[cfg(feature = "sys")]
11use {
12 p3_baby_bear::BabyBear, p3_field::AbstractField, p3_maybe_rayon::prelude::*,
13 std::borrow::BorrowMut,
14};
15
16use crate::{builder::SP1RecursionAirBuilder, *};
17
18pub const NUM_EXT_ALU_ENTRIES_PER_ROW: usize = 4;
19
20#[derive(Default)]
21pub struct ExtAluChip;
22
23pub const NUM_EXT_ALU_COLS: usize = core::mem::size_of::<ExtAluCols<u8>>();
24
25#[derive(AlignedBorrow, Debug, Clone, Copy)]
26#[repr(C)]
27pub struct ExtAluCols<F: Copy> {
28 pub values: [ExtAluValueCols<F>; NUM_EXT_ALU_ENTRIES_PER_ROW],
29}
30pub const NUM_EXT_ALU_VALUE_COLS: usize = core::mem::size_of::<ExtAluValueCols<u8>>();
31
32#[derive(AlignedBorrow, Debug, Clone, Copy)]
33#[repr(C)]
34pub struct ExtAluValueCols<F: Copy> {
35 pub vals: ExtAluIo<Block<F>>,
36}
37
38pub const NUM_EXT_ALU_PREPROCESSED_COLS: usize = core::mem::size_of::<ExtAluPreprocessedCols<u8>>();
39
40#[derive(AlignedBorrow, Debug, Clone, Copy)]
41#[repr(C)]
42pub struct ExtAluPreprocessedCols<F: Copy> {
43 pub accesses: [ExtAluAccessCols<F>; NUM_EXT_ALU_ENTRIES_PER_ROW],
44}
45
46pub const NUM_EXT_ALU_ACCESS_COLS: usize = core::mem::size_of::<ExtAluAccessCols<u8>>();
47
48#[derive(AlignedBorrow, Debug, Clone, Copy)]
49#[repr(C)]
50pub struct ExtAluAccessCols<F: Copy> {
51 pub addrs: ExtAluIo<Address<F>>,
52 pub is_add: F,
53 pub is_sub: F,
54 pub is_mul: F,
55 pub is_div: F,
56 pub mult: F,
57}
58
59impl<F: Field> BaseAir<F> for ExtAluChip {
60 fn width(&self) -> usize {
61 NUM_EXT_ALU_COLS
62 }
63}
64
65impl<F: PrimeField32 + BinomiallyExtendable<D>> MachineAir<F> for ExtAluChip {
66 type Record = ExecutionRecord<F>;
67
68 type Program = crate::RecursionProgram<F>;
69
70 fn name(&self) -> String {
71 "ExtAlu".to_string()
72 }
73
74 fn preprocessed_width(&self) -> usize {
75 NUM_EXT_ALU_PREPROCESSED_COLS
76 }
77
78 fn preprocessed_num_rows(&self, program: &Self::Program, instrs_len: usize) -> Option<usize> {
79 let nb_rows = instrs_len.div_ceil(NUM_EXT_ALU_ENTRIES_PER_ROW);
80 let fixed_log2_rows = program.fixed_log2_rows(self);
81 Some(match fixed_log2_rows {
82 Some(log2_rows) => 1 << log2_rows,
83 None => next_power_of_two(nb_rows, None),
84 })
85 }
86
87 #[cfg(not(feature = "sys"))]
88 fn generate_preprocessed_trace(&self, _program: &Self::Program) -> Option<RowMajorMatrix<F>> {
89 unimplemented!("To generate traces, enable feature `sp1-recursion-core/sys`");
90 }
91
92 #[cfg(feature = "sys")]
93 fn generate_preprocessed_trace(&self, program: &Self::Program) -> Option<RowMajorMatrix<F>> {
94 assert_eq!(
95 std::any::TypeId::of::<F>(),
96 std::any::TypeId::of::<BabyBear>(),
97 "generate_preprocessed_trace only supports BabyBear field"
98 );
99
100 let instrs = unsafe {
101 std::mem::transmute::<Vec<&ExtAluInstr<F>>, Vec<&ExtAluInstr<BabyBear>>>(
102 program
103 .inner
104 .iter()
105 .filter_map(|instruction| match instruction {
106 Instruction::ExtAlu(x) => Some(x),
107 _ => None,
108 })
109 .collect::<Vec<_>>(),
110 )
111 };
112 let padded_nb_rows = self.preprocessed_num_rows(program, instrs.len()).unwrap();
113 let mut values = vec![BabyBear::zero(); padded_nb_rows * NUM_EXT_ALU_PREPROCESSED_COLS];
114
115 let populate_len = instrs.len() * NUM_EXT_ALU_ACCESS_COLS;
117 values[..populate_len].par_chunks_mut(NUM_EXT_ALU_ACCESS_COLS).zip_eq(instrs).for_each(
118 |(row, instr)| {
119 let access: &mut ExtAluAccessCols<_> = row.borrow_mut();
120 unsafe {
121 crate::sys::alu_ext_instr_to_row_babybear(instr, access);
122 }
123 },
124 );
125
126 Some(RowMajorMatrix::new(
128 unsafe { std::mem::transmute::<Vec<BabyBear>, Vec<F>>(values) },
129 NUM_EXT_ALU_PREPROCESSED_COLS,
130 ))
131 }
132
133 fn generate_dependencies(&self, _: &Self::Record, _: &mut Self::Record) {
134 }
136
137 fn num_rows(&self, input: &Self::Record) -> Option<usize> {
138 let events = &input.ext_alu_events;
139 let nb_rows = events.len().div_ceil(NUM_EXT_ALU_ENTRIES_PER_ROW);
140 let fixed_log2_rows = input.fixed_log2_rows(self);
141 Some(match fixed_log2_rows {
142 Some(log2_rows) => 1 << log2_rows,
143 None => next_power_of_two(nb_rows, None),
144 })
145 }
146
147 #[cfg(not(feature = "sys"))]
148 fn generate_trace(&self, _input: &Self::Record, _: &mut Self::Record) -> RowMajorMatrix<F> {
149 unimplemented!("To generate traces, enable feature `sp1-recursion-core/sys`");
150 }
151
152 #[cfg(feature = "sys")]
153 fn generate_trace(&self, input: &Self::Record, _: &mut Self::Record) -> RowMajorMatrix<F> {
154 assert_eq!(
155 std::any::TypeId::of::<F>(),
156 std::any::TypeId::of::<BabyBear>(),
157 "generate_trace only supports BabyBear field"
158 );
159
160 let events = unsafe {
161 std::mem::transmute::<&Vec<ExtAluIo<Block<F>>>, &Vec<ExtAluIo<Block<BabyBear>>>>(
162 &input.ext_alu_events,
163 )
164 };
165 let padded_nb_rows = self.num_rows(input).unwrap();
166 let mut values = vec![BabyBear::zero(); padded_nb_rows * NUM_EXT_ALU_COLS];
167
168 let populate_len = events.len() * NUM_EXT_ALU_VALUE_COLS;
170 values[..populate_len].par_chunks_mut(NUM_EXT_ALU_VALUE_COLS).zip_eq(events).for_each(
171 |(row, &vals)| {
172 let cols: &mut ExtAluValueCols<_> = row.borrow_mut();
173 unsafe {
174 crate::sys::alu_ext_event_to_row_babybear(&vals, cols);
175 }
176 },
177 );
178
179 RowMajorMatrix::new(
181 unsafe { std::mem::transmute::<Vec<BabyBear>, Vec<F>>(values) },
182 NUM_EXT_ALU_COLS,
183 )
184 }
185
186 fn included(&self, _record: &Self::Record) -> bool {
187 true
188 }
189
190 fn local_only(&self) -> bool {
191 true
192 }
193}
194
195impl<AB> Air<AB> for ExtAluChip
196where
197 AB: SP1RecursionAirBuilder + PairBuilder,
198{
199 fn eval(&self, builder: &mut AB) {
200 let main = builder.main();
201 let local = main.row_slice(0);
202 let local: &ExtAluCols<AB::Var> = (*local).borrow();
203 let prep = builder.preprocessed();
204 let prep_local = prep.row_slice(0);
205 let prep_local: &ExtAluPreprocessedCols<AB::Var> = (*prep_local).borrow();
206
207 for (
208 ExtAluValueCols { vals },
209 ExtAluAccessCols { addrs, is_add, is_sub, is_mul, is_div, mult },
210 ) in zip(local.values, prep_local.accesses)
211 {
212 let in1 = vals.in1.as_extension::<AB>();
213 let in2 = vals.in2.as_extension::<AB>();
214 let out = vals.out.as_extension::<AB>();
215
216 let is_real = is_add + is_sub + is_mul + is_div;
218 builder.assert_bool(is_real.clone());
219
220 builder.when(is_add).assert_ext_eq(in1.clone() + in2.clone(), out.clone());
221 builder.when(is_sub).assert_ext_eq(in1.clone(), in2.clone() + out.clone());
222 builder.when(is_mul).assert_ext_eq(in1.clone() * in2.clone(), out.clone());
223 builder.when(is_div).assert_ext_eq(in1, in2 * out);
224
225 builder.receive_block(addrs.in1, vals.in1, is_real.clone());
227
228 builder.receive_block(addrs.in2, vals.in2, is_real);
229
230 builder.send_block(addrs.out, vals.out, mult);
232 }
233 }
234}
235
236#[cfg(all(test, feature = "sys"))]
237mod tests {
238 use crate::{chips::test_fixtures, runtime::instruction as instr};
239 use machine::tests::test_recursion_linear_program;
240 use p3_baby_bear::BabyBear;
241 use p3_field::{extension::BinomialExtensionField, AbstractExtensionField, AbstractField};
242 use p3_matrix::dense::RowMajorMatrix;
243 use rand::{rngs::StdRng, Rng, SeedableRng};
244 use sp1_stark::StarkGenericConfig;
245 use stark::BabyBearPoseidon2Outer;
246
247 use super::*;
248
249 fn generate_trace_reference(
250 input: &ExecutionRecord<BabyBear>,
251 _: &mut ExecutionRecord<BabyBear>,
252 ) -> RowMajorMatrix<BabyBear> {
253 let events = &input.ext_alu_events;
254 let padded_nb_rows = ExtAluChip.num_rows(input).unwrap();
255 let mut values = vec![BabyBear::zero(); padded_nb_rows * NUM_EXT_ALU_COLS];
256
257 let populate_len = events.len() * NUM_EXT_ALU_VALUE_COLS;
258 values[..populate_len].par_chunks_mut(NUM_EXT_ALU_VALUE_COLS).zip_eq(events).for_each(
259 |(row, &vals)| {
260 let cols: &mut ExtAluValueCols<_> = row.borrow_mut();
261 *cols = ExtAluValueCols { vals };
262 },
263 );
264
265 RowMajorMatrix::new(values, NUM_EXT_ALU_COLS)
266 }
267
268 #[test]
269 fn generate_trace() {
270 let shard = test_fixtures::shard();
271 let mut execution_record = test_fixtures::default_execution_record();
272 let trace = ExtAluChip.generate_trace(&shard, &mut execution_record);
273 assert!(trace.height() >= test_fixtures::MIN_TEST_CASES);
274
275 assert_eq!(trace, generate_trace_reference(&shard, &mut execution_record));
276 }
277
278 fn generate_preprocessed_trace_reference(
279 program: &RecursionProgram<BabyBear>,
280 ) -> RowMajorMatrix<BabyBear> {
281 type F = BabyBear;
282
283 let instrs = program
284 .inner
285 .iter()
286 .filter_map(|instruction| match instruction {
287 Instruction::ExtAlu(x) => Some(x),
288 _ => None,
289 })
290 .collect::<Vec<_>>();
291 let padded_nb_rows = ExtAluChip.preprocessed_num_rows(program, instrs.len()).unwrap();
292 let mut values = vec![F::zero(); padded_nb_rows * NUM_EXT_ALU_PREPROCESSED_COLS];
293
294 let populate_len = instrs.len() * NUM_EXT_ALU_ACCESS_COLS;
295 values[..populate_len].par_chunks_mut(NUM_EXT_ALU_ACCESS_COLS).zip_eq(instrs).for_each(
296 |(row, instr)| {
297 let ExtAluInstr { opcode, mult, addrs } = instr;
298 let access: &mut ExtAluAccessCols<_> = row.borrow_mut();
299 *access = ExtAluAccessCols {
300 addrs: addrs.to_owned(),
301 is_add: F::from_bool(false),
302 is_sub: F::from_bool(false),
303 is_mul: F::from_bool(false),
304 is_div: F::from_bool(false),
305 mult: mult.to_owned(),
306 };
307 let target_flag = match opcode {
308 ExtAluOpcode::AddE => &mut access.is_add,
309 ExtAluOpcode::SubE => &mut access.is_sub,
310 ExtAluOpcode::MulE => &mut access.is_mul,
311 ExtAluOpcode::DivE => &mut access.is_div,
312 };
313 *target_flag = F::from_bool(true);
314 },
315 );
316
317 RowMajorMatrix::new(values, NUM_EXT_ALU_PREPROCESSED_COLS)
318 }
319
320 #[test]
321 #[ignore = "Failing due to merge conflicts. Will be fixed shortly."]
322 fn generate_preprocessed_trace() {
323 let program = test_fixtures::program();
324 let trace = ExtAluChip.generate_preprocessed_trace(&program).unwrap();
325 assert!(trace.height() >= test_fixtures::MIN_TEST_CASES);
326
327 assert_eq!(trace, generate_preprocessed_trace_reference(&program));
328 }
329
330 #[test]
331 pub fn four_ops() {
332 type SC = BabyBearPoseidon2Outer;
333 type F = <SC as StarkGenericConfig>::Val;
334
335 let mut rng = StdRng::seed_from_u64(0xDEADBEEF);
336 let mut random_extfelt = move || {
337 let inner: [F; 4] = core::array::from_fn(|_| rng.sample(rand::distributions::Standard));
338 BinomialExtensionField::<F, D>::from_base_slice(&inner)
339 };
340 let mut addr = 0;
341
342 let instructions = (0..1000)
343 .flat_map(|_| {
344 let quot = random_extfelt();
345 let in2 = random_extfelt();
346 let in1 = in2 * quot;
347 let alloc_size = 6;
348 let a = (0..alloc_size).map(|x| x + addr).collect::<Vec<_>>();
349 addr += alloc_size;
350 [
351 instr::mem_ext(MemAccessKind::Write, 4, a[0], in1),
352 instr::mem_ext(MemAccessKind::Write, 4, a[1], in2),
353 instr::ext_alu(ExtAluOpcode::AddE, 1, a[2], a[0], a[1]),
354 instr::mem_ext(MemAccessKind::Read, 1, a[2], in1 + in2),
355 instr::ext_alu(ExtAluOpcode::SubE, 1, a[3], a[0], a[1]),
356 instr::mem_ext(MemAccessKind::Read, 1, a[3], in1 - in2),
357 instr::ext_alu(ExtAluOpcode::MulE, 1, a[4], a[0], a[1]),
358 instr::mem_ext(MemAccessKind::Read, 1, a[4], in1 * in2),
359 instr::ext_alu(ExtAluOpcode::DivE, 1, a[5], a[0], a[1]),
360 instr::mem_ext(MemAccessKind::Read, 1, a[5], quot),
361 ]
362 })
363 .collect::<Vec<Instruction<F>>>();
364
365 test_recursion_linear_program(instructions);
366 }
367}