1use cfg_if::cfg_if;
3use core::fmt::Debug;
4use instruction::{
5 FieldEltType, HintAddCurveInstr, HintBitsInstr, HintExt2FeltsInstr, HintInstr, PrintInstr,
6};
7use itertools::Itertools;
8use slop_algebra::{AbstractExtensionField, AbstractField, Field, PrimeField64};
9#[cfg(feature = "debug")]
10use sp1_core_machine::utils::SpanBuilder;
11use sp1_hypercube::septic_curve::SepticCurve;
12use sp1_primitives::{SP1ExtensionField, SP1Field};
13use sp1_recursion_executor::{
14 BaseAluInstr, BaseAluOpcode, Block, RecursionPublicValues, PERMUTATION_WIDTH,
15 RECURSIVE_PROOF_NUM_PV_ELTS,
16};
17use std::{
18 borrow::{Borrow, Cow},
19 collections::HashMap,
20 mem::transmute,
21};
22use vec_map::VecMap;
23
24use sp1_recursion_executor::*;
25
26use crate::prelude::*;
27
28#[derive(Debug, Clone, Default)]
30pub struct AsmCompiler {
31 next_addr: SP1Field,
32 virtual_to_physical: VecMap<Address<SP1Field>>,
34 consts: HashMap<Imm<SP1Field, SP1ExtensionField>, (Address<SP1Field>, SP1Field)>,
36 addr_to_mult: VecMap<SP1Field>,
38}
39
40impl AsmCompiler
41where
42 SP1Field: PrimeField64,
43{
44 pub fn alloc(next_addr: &mut SP1Field) -> Address<SP1Field> {
46 let id = Address(*next_addr);
47 *next_addr += SP1Field::one();
48 if next_addr.is_zero() {
49 panic!("out of address space");
50 }
51 id
52 }
53
54 pub fn read_ghost_vaddr(&mut self, vaddr: usize) -> Address<SP1Field> {
58 self.read_vaddr_internal(vaddr, false)
59 }
60
61 pub fn read_vaddr(&mut self, vaddr: usize) -> Address<SP1Field> {
65 self.read_vaddr_internal(vaddr, true)
66 }
67
68 #[allow(clippy::uninlined_format_args)]
69 pub fn read_vaddr_internal(&mut self, vaddr: usize, increment_mult: bool) -> Address<SP1Field> {
70 use vec_map::Entry;
71 match self.virtual_to_physical.entry(vaddr) {
72 Entry::Vacant(_) => panic!("expected entry: virtual_physical[{vaddr:?}]"),
73 Entry::Occupied(entry) => {
74 if increment_mult {
75 match self.addr_to_mult.get_mut(entry.get().as_usize()) {
77 Some(mult) => *mult += SP1Field::one(),
78 None => panic!("expected entry: virtual_physical[{vaddr:?}]"),
79 }
80 }
81 *entry.into_mut()
82 }
83 }
84 }
85
86 pub fn write_fp(&mut self, vaddr: usize) -> Address<SP1Field> {
90 use vec_map::Entry;
91 match self.virtual_to_physical.entry(vaddr) {
92 Entry::Vacant(entry) => {
93 let addr = Self::alloc(&mut self.next_addr);
94 if let Some(x) = self.addr_to_mult.insert(addr.as_usize(), SP1Field::zero()) {
96 panic!("unexpected entry in addr_to_mult: {x:?}");
97 }
98 *entry.insert(addr)
99 }
100 Entry::Occupied(entry) => {
101 panic!("unexpected entry: virtual_to_physical[{:?}] = {:?}", vaddr, entry.get())
102 }
103 }
104 }
105
106 pub fn read_addr(&mut self, addr: Address<SP1Field>) -> &mut SP1Field {
110 self.read_addr_internal(addr, true)
111 }
112
113 pub fn read_ghost_addr(&mut self, addr: Address<SP1Field>) -> &mut SP1Field {
117 self.read_addr_internal(addr, true)
118 }
119
120 fn read_addr_internal(
121 &mut self,
122 addr: Address<SP1Field>,
123 increment_mult: bool,
124 ) -> &mut SP1Field {
125 use vec_map::Entry;
126 match self.addr_to_mult.entry(addr.as_usize()) {
127 Entry::Vacant(_) => panic!("expected entry: addr_to_mult[{:?}]", addr.as_usize()),
128 Entry::Occupied(entry) => {
129 let mult = entry.into_mut();
131 if increment_mult {
132 *mult += SP1Field::one();
133 }
134 mult
135 }
136 }
137 }
138
139 pub fn write_addr(&mut self, addr: Address<SP1Field>) -> &mut SP1Field {
143 use vec_map::Entry;
144 match self.addr_to_mult.entry(addr.as_usize()) {
145 Entry::Vacant(entry) => entry.insert(SP1Field::zero()),
146 Entry::Occupied(entry) => {
147 panic!("unexpected entry: addr_to_mult[{:?}] = {:?}", addr.as_usize(), entry.get())
148 }
149 }
150 }
151
152 pub fn read_const(&mut self, imm: Imm<SP1Field, SP1ExtensionField>) -> Address<SP1Field> {
156 self.consts
157 .entry(imm)
158 .and_modify(|(_, x)| *x += SP1Field::one())
159 .or_insert_with(|| (Self::alloc(&mut self.next_addr), SP1Field::one()))
160 .0
161 }
162
163 pub fn read_ghost_const(&mut self, imm: Imm<SP1Field, SP1ExtensionField>) -> Address<SP1Field> {
167 self.consts
168 .entry(imm)
169 .or_insert_with(|| (Self::alloc(&mut self.next_addr), SP1Field::zero()))
170 .0
171 }
172
173 fn mem_write_const(
174 &mut self,
175 dst: impl Reg,
176 src: Imm<SP1Field, SP1ExtensionField>,
177 ) -> Instruction<SP1Field> {
178 Instruction::Mem(MemInstr {
179 addrs: MemIo { inner: dst.write(self) },
180 vals: MemIo { inner: src.as_block() },
181 mult: SP1Field::zero(),
182 kind: MemAccessKind::Write,
183 })
184 }
185
186 fn base_alu(
187 &mut self,
188 opcode: BaseAluOpcode,
189 dst: impl Reg,
190 lhs: impl Reg,
191 rhs: impl Reg,
192 ) -> Instruction<SP1Field> {
193 Instruction::BaseAlu(BaseAluInstr {
194 opcode,
195 mult: SP1Field::zero(),
196 addrs: BaseAluIo { out: dst.write(self), in1: lhs.read(self), in2: rhs.read(self) },
197 })
198 }
199
200 fn ext_alu(
201 &mut self,
202 opcode: ExtAluOpcode,
203 dst: impl Reg,
204 lhs: impl Reg,
205 rhs: impl Reg,
206 ) -> Instruction<SP1Field> {
207 Instruction::ExtAlu(ExtAluInstr {
208 opcode,
209 mult: SP1Field::zero(),
210 addrs: ExtAluIo { out: dst.write(self), in1: lhs.read(self), in2: rhs.read(self) },
211 })
212 }
213
214 fn base_assert_eq(
215 &mut self,
216 lhs: impl Reg,
217 rhs: impl Reg,
218 mut f: impl FnMut(Instruction<SP1Field>),
219 ) {
220 use BaseAluOpcode::*;
221 let [diff, out] = core::array::from_fn(|_| Self::alloc(&mut self.next_addr));
222 f(self.base_alu(SubF, diff, lhs, rhs));
223 f(self.base_alu(DivF, out, diff, Imm::F(SP1Field::zero())));
224 }
225
226 fn base_assert_ne(
227 &mut self,
228 lhs: impl Reg,
229 rhs: impl Reg,
230 mut f: impl FnMut(Instruction<SP1Field>),
231 ) {
232 use BaseAluOpcode::*;
233 let [diff, out] = core::array::from_fn(|_| Self::alloc(&mut self.next_addr));
234
235 f(self.base_alu(SubF, diff, lhs, rhs));
236 f(self.base_alu(DivF, out, Imm::F(SP1Field::one()), diff));
237 }
238
239 fn ext_assert_eq(
240 &mut self,
241 lhs: impl Reg,
242 rhs: impl Reg,
243 mut f: impl FnMut(Instruction<SP1Field>),
244 ) {
245 use ExtAluOpcode::*;
246 let [diff, out] = core::array::from_fn(|_| Self::alloc(&mut self.next_addr));
247
248 f(self.ext_alu(SubE, diff, lhs, rhs));
249 f(self.ext_alu(DivE, out, diff, Imm::EF(SP1ExtensionField::zero())));
250 }
251
252 fn ext_assert_ne(
253 &mut self,
254 lhs: impl Reg,
255 rhs: impl Reg,
256 mut f: impl FnMut(Instruction<SP1Field>),
257 ) {
258 use ExtAluOpcode::*;
259 let [diff, out] = core::array::from_fn(|_| Self::alloc(&mut self.next_addr));
260
261 f(self.ext_alu(SubE, diff, lhs, rhs));
262 f(self.ext_alu(DivE, out, Imm::EF(SP1ExtensionField::one()), diff));
263 }
264
265 #[inline(always)]
266 fn ext2felt_chip(&mut self, dst: [impl Reg; D], src: impl Reg) -> Instruction<SP1Field> {
267 Instruction::ExtFelt(ExtFeltInstr {
268 addrs: [
269 src.read(self),
270 dst[0].write(self),
271 dst[1].write(self),
272 dst[2].write(self),
273 dst[3].write(self),
274 ],
275 mults: [SP1Field::zero(); D + 1],
276 ext2felt: true,
277 })
278 }
279
280 #[inline(always)]
281 fn felt2ext_chip(&mut self, dst: impl Reg, src: [impl Reg; D]) -> Instruction<SP1Field> {
282 Instruction::ExtFelt(ExtFeltInstr {
283 addrs: [
284 dst.write(self),
285 src[0].read(self),
286 src[1].read(self),
287 src[2].read(self),
288 src[3].read(self),
289 ],
290 mults: [SP1Field::zero(); D + 1],
291 ext2felt: false,
292 })
293 }
294
295 #[inline(always)]
296 fn poseidon2_permute(
297 &mut self,
298 dst: [impl Reg; PERMUTATION_WIDTH],
299 src: [impl Reg; PERMUTATION_WIDTH],
300 ) -> Instruction<SP1Field> {
301 Instruction::Poseidon2(Box::new(Poseidon2Instr {
302 addrs: Poseidon2Io {
303 input: src.map(|r| r.read(self)),
304 output: dst.map(|r| r.write(self)),
305 },
306 mults: [SP1Field::zero(); PERMUTATION_WIDTH],
307 }))
308 }
309
310 #[inline(always)]
311 fn poseidon2_external_linear_layer(
312 &mut self,
313 dst: [impl Reg; PERMUTATION_WIDTH / D],
314 src: [impl Reg; PERMUTATION_WIDTH / D],
315 ) -> Instruction<SP1Field> {
316 Instruction::Poseidon2LinearLayer(Box::new(Poseidon2LinearLayerInstr {
317 addrs: Poseidon2LinearLayerIo {
318 input: src.map(|r| r.read(self)),
319 output: dst.map(|r| r.write(self)),
320 },
321 mults: [SP1Field::zero(); PERMUTATION_WIDTH / D],
322 external: true,
323 }))
324 }
325
326 #[inline(always)]
327 fn poseidon2_internal_linear_layer(
328 &mut self,
329 dst: [impl Reg; PERMUTATION_WIDTH / D],
330 src: [impl Reg; PERMUTATION_WIDTH / D],
331 ) -> Instruction<SP1Field> {
332 Instruction::Poseidon2LinearLayer(Box::new(Poseidon2LinearLayerInstr {
333 addrs: Poseidon2LinearLayerIo {
334 input: src.map(|r| r.read(self)),
335 output: dst.map(|r| r.write(self)),
336 },
337 mults: [SP1Field::zero(); PERMUTATION_WIDTH / D],
338 external: false,
339 }))
340 }
341
342 #[inline(always)]
343 fn poseidon2_external_sbox(&mut self, dst: impl Reg, src: impl Reg) -> Instruction<SP1Field> {
344 Instruction::Poseidon2SBox(Poseidon2SBoxInstr {
345 addrs: Poseidon2SBoxIo { input: src.read(self), output: dst.write(self) },
346 mults: SP1Field::zero(),
347 external: true,
348 })
349 }
350
351 #[inline(always)]
352 fn poseidon2_internal_sbox(&mut self, dst: impl Reg, src: impl Reg) -> Instruction<SP1Field> {
353 Instruction::Poseidon2SBox(Poseidon2SBoxInstr {
354 addrs: Poseidon2SBoxIo { input: src.read(self), output: dst.write(self) },
355 mults: SP1Field::zero(),
356 external: false,
357 })
358 }
359
360 #[inline(always)]
361 fn select(
362 &mut self,
363 bit: impl Reg,
364 dst1: impl Reg,
365 dst2: impl Reg,
366 lhs: impl Reg,
367 rhs: impl Reg,
368 ) -> Instruction<SP1Field> {
369 Instruction::Select(SelectInstr {
370 addrs: SelectIo {
371 bit: bit.read(self),
372 out1: dst1.write(self),
373 out2: dst2.write(self),
374 in1: lhs.read(self),
375 in2: rhs.read(self),
376 },
377 mult1: SP1Field::zero(),
378 mult2: SP1Field::zero(),
379 })
380 }
381
382 fn hint_bit_decomposition(
383 &mut self,
384 value: impl Reg,
385 output: impl IntoIterator<Item = impl Reg>,
386 ) -> Instruction<SP1Field> {
387 Instruction::HintBits(HintBitsInstr {
388 output_addrs_mults: output
389 .into_iter()
390 .map(|r| (r.write(self), SP1Field::zero()))
391 .collect(),
392 input_addr: value.read_ghost(self),
393 })
394 }
395
396 fn add_curve(
397 &mut self,
398 output: SepticCurve<Felt<SP1Field>>,
399 input1: SepticCurve<Felt<SP1Field>>,
400 input2: SepticCurve<Felt<SP1Field>>,
401 ) -> Instruction<SP1Field> {
402 Instruction::HintAddCurve(Box::new(HintAddCurveInstr {
403 output_x_addrs_mults: output
404 .x
405 .0
406 .into_iter()
407 .map(|r| (r.write(self), SP1Field::zero()))
408 .collect(),
409 output_y_addrs_mults: output
410 .y
411 .0
412 .into_iter()
413 .map(|r| (r.write(self), SP1Field::zero()))
414 .collect(),
415 input1_x_addrs: input1.x.0.into_iter().map(|value| value.read_ghost(self)).collect(),
416 input1_y_addrs: input1.y.0.into_iter().map(|value| value.read_ghost(self)).collect(),
417 input2_x_addrs: input2.x.0.into_iter().map(|value| value.read_ghost(self)).collect(),
418 input2_y_addrs: input2.y.0.into_iter().map(|value| value.read_ghost(self)).collect(),
419 }))
420 }
421
422 fn prefix_sum_checks(
423 &mut self,
424 zero: Felt<SP1Field>,
425 one: Ext<SP1Field, SP1ExtensionField>,
426 accs: Vec<Ext<SP1Field, SP1ExtensionField>>,
427 field_accs: Vec<Felt<SP1Field>>,
428 x1: Vec<Felt<SP1Field>>,
429 x2: Vec<Ext<SP1Field, SP1ExtensionField>>,
430 ) -> Instruction<SP1Field> {
431 let acc_write_addrs: Vec<_> = accs.clone().into_iter().map(|r| r.write(self)).collect();
433 let field_acc_write_addrs = field_accs.clone().into_iter().map(|r| r.write(self)).collect();
434 let _: Vec<_> = accs.iter().take(accs.len() - 1).map(|r| r.read(self)).collect();
436 let _: Vec<_> =
437 field_accs.iter().take(field_accs.len() - 1).map(|r| r.read(self)).collect();
438 Instruction::PrefixSumChecks(Box::new(PrefixSumChecksInstr {
439 addrs: PrefixSumChecksIo {
440 zero: zero.read(self),
441 one: one.read(self),
442 x1: x1.into_iter().map(|r| r.read(self)).collect(),
443 x2: x2.into_iter().map(|r| r.read(self)).collect(),
444 accs: acc_write_addrs,
445 field_accs: field_acc_write_addrs,
446 },
447 acc_mults: vec![SP1Field::zero(); accs.len()],
448 field_acc_mults: vec![SP1Field::zero(); field_accs.len()],
449 }))
450 }
451
452 fn commit_public_values(
453 &mut self,
454 public_values: &RecursionPublicValues<Felt<SP1Field>>,
455 ) -> Instruction<SP1Field> {
456 public_values.digest.iter().for_each(|x| {
457 let _ = x.read(self);
458 });
459 let pv_addrs = unsafe {
460 transmute::<
461 RecursionPublicValues<Felt<SP1Field>>,
462 [Felt<SP1Field>; RECURSIVE_PROOF_NUM_PV_ELTS],
463 >(*public_values)
464 }
465 .map(|pv| pv.read_ghost(self));
466
467 let public_values_a: &RecursionPublicValues<Address<SP1Field>> =
468 pv_addrs.as_slice().borrow();
469 Instruction::CommitPublicValues(Box::new(CommitPublicValuesInstr {
470 pv_addrs: *public_values_a,
471 }))
472 }
473
474 fn print_f(&mut self, addr: impl Reg) -> Instruction<SP1Field> {
475 Instruction::Print(PrintInstr {
476 field_elt_type: FieldEltType::Base,
477 addr: addr.read_ghost(self),
478 })
479 }
480
481 fn print_e(&mut self, addr: impl Reg) -> Instruction<SP1Field> {
482 Instruction::Print(PrintInstr {
483 field_elt_type: FieldEltType::Extension,
484 addr: addr.read_ghost(self),
485 })
486 }
487
488 fn ext2felts(&mut self, felts: [impl Reg; D], ext: impl Reg) -> Instruction<SP1Field> {
489 Instruction::HintExt2Felts(HintExt2FeltsInstr {
490 output_addrs_mults: felts.map(|r| (r.write(self), SP1Field::zero())),
491 input_addr: ext.read_ghost(self),
492 })
493 }
494
495 fn hint(&mut self, output: impl Reg, len: usize) -> Instruction<SP1Field> {
496 let zero = SP1Field::zero();
497 Instruction::Hint(HintInstr {
498 output_addrs_mults: output
499 .write_many(self, len)
500 .into_iter()
501 .map(|a| (a, zero))
502 .collect(),
503 })
504 }
505 pub fn compile_one<C: Config<N = SP1Field>>(
510 &mut self,
511 ir_instr: DslIr<C>,
512 mut consumer: impl FnMut(Result<Instruction<SP1Field>, CompileOneErr<C>>),
513 ) {
514 use BaseAluOpcode::*;
516 use ExtAluOpcode::*;
517
518 let mut f = |instr| consumer(Ok(instr));
519 match ir_instr {
520 DslIr::ImmV(dst, src) => f(self.mem_write_const(dst, Imm::F(src))),
521 DslIr::ImmF(dst, src) => f(self.mem_write_const(dst, Imm::F(src))),
522 DslIr::ImmE(dst, src) => f(self.mem_write_const(dst, Imm::EF(src))),
523
524 DslIr::AddV(dst, lhs, rhs) => f(self.base_alu(AddF, dst, lhs, rhs)),
525 DslIr::AddVI(dst, lhs, rhs) => f(self.base_alu(AddF, dst, lhs, Imm::F(rhs))),
526 DslIr::AddF(dst, lhs, rhs) => f(self.base_alu(AddF, dst, lhs, rhs)),
527 DslIr::AddFI(dst, lhs, rhs) => f(self.base_alu(AddF, dst, lhs, Imm::F(rhs))),
528 DslIr::AddE(dst, lhs, rhs) => f(self.ext_alu(AddE, dst, lhs, rhs)),
529 DslIr::AddEI(dst, lhs, rhs) => f(self.ext_alu(AddE, dst, lhs, Imm::EF(rhs))),
530 DslIr::AddEF(dst, lhs, rhs) => f(self.ext_alu(AddE, dst, lhs, rhs)),
531 DslIr::AddEFFI(dst, lhs, rhs) => f(self.ext_alu(AddE, dst, lhs, Imm::EF(rhs))),
532
533 DslIr::SubV(dst, lhs, rhs) => f(self.base_alu(SubF, dst, lhs, rhs)),
534 DslIr::SubVI(dst, lhs, rhs) => f(self.base_alu(SubF, dst, lhs, Imm::F(rhs))),
535 DslIr::SubVIN(dst, lhs, rhs) => f(self.base_alu(SubF, dst, Imm::F(lhs), rhs)),
536 DslIr::SubF(dst, lhs, rhs) => f(self.base_alu(SubF, dst, lhs, rhs)),
537 DslIr::SubFI(dst, lhs, rhs) => f(self.base_alu(SubF, dst, lhs, Imm::F(rhs))),
538 DslIr::SubFIN(dst, lhs, rhs) => f(self.base_alu(SubF, dst, Imm::F(lhs), rhs)),
539 DslIr::SubE(dst, lhs, rhs) => f(self.ext_alu(SubE, dst, lhs, rhs)),
540 DslIr::SubEI(dst, lhs, rhs) => f(self.ext_alu(SubE, dst, lhs, Imm::EF(rhs))),
541 DslIr::SubEIN(dst, lhs, rhs) => f(self.ext_alu(SubE, dst, Imm::EF(lhs), rhs)),
542 DslIr::SubEF(dst, lhs, rhs) => f(self.ext_alu(SubE, dst, lhs, rhs)),
543
544 DslIr::MulV(dst, lhs, rhs) => f(self.base_alu(MulF, dst, lhs, rhs)),
545 DslIr::MulVI(dst, lhs, rhs) => f(self.base_alu(MulF, dst, lhs, Imm::F(rhs))),
546 DslIr::MulF(dst, lhs, rhs) => f(self.base_alu(MulF, dst, lhs, rhs)),
547 DslIr::MulFI(dst, lhs, rhs) => f(self.base_alu(MulF, dst, lhs, Imm::F(rhs))),
548 DslIr::MulE(dst, lhs, rhs) => f(self.ext_alu(MulE, dst, lhs, rhs)),
549 DslIr::MulEI(dst, lhs, rhs) => f(self.ext_alu(MulE, dst, lhs, Imm::EF(rhs))),
550 DslIr::MulEF(dst, lhs, rhs) => f(self.ext_alu(MulE, dst, lhs, rhs)),
551
552 DslIr::DivF(dst, lhs, rhs) => f(self.base_alu(DivF, dst, lhs, rhs)),
553 DslIr::DivFI(dst, lhs, rhs) => f(self.base_alu(DivF, dst, lhs, Imm::F(rhs))),
554 DslIr::DivFIN(dst, lhs, rhs) => f(self.base_alu(DivF, dst, Imm::F(lhs), rhs)),
555 DslIr::DivE(dst, lhs, rhs) => f(self.ext_alu(DivE, dst, lhs, rhs)),
556 DslIr::DivEI(dst, lhs, rhs) => f(self.ext_alu(DivE, dst, lhs, Imm::EF(rhs))),
557 DslIr::DivEIN(dst, lhs, rhs) => f(self.ext_alu(DivE, dst, Imm::EF(lhs), rhs)),
558 DslIr::DivEF(dst, lhs, rhs) => f(self.ext_alu(DivE, dst, lhs, rhs)),
559
560 DslIr::NegV(dst, src) => f(self.base_alu(SubF, dst, Imm::F(SP1Field::zero()), src)),
561 DslIr::NegF(dst, src) => f(self.base_alu(SubF, dst, Imm::F(SP1Field::zero()), src)),
562 DslIr::NegE(dst, src) => {
563 f(self.ext_alu(SubE, dst, Imm::EF(SP1ExtensionField::zero()), src))
564 }
565 DslIr::InvV(dst, src) => f(self.base_alu(DivF, dst, Imm::F(SP1Field::one()), src)),
566 DslIr::InvF(dst, src) => f(self.base_alu(DivF, dst, Imm::F(SP1Field::one()), src)),
567 DslIr::InvE(dst, src) => f(self.ext_alu(DivE, dst, Imm::F(SP1Field::one()), src)),
568
569 DslIr::Select(bit, dst1, dst2, lhs, rhs) => f(self.select(bit, dst1, dst2, lhs, rhs)),
570
571 DslIr::AssertEqV(lhs, rhs) => self.base_assert_eq(lhs, rhs, f),
572 DslIr::AssertEqF(lhs, rhs) => self.base_assert_eq(lhs, rhs, f),
573 DslIr::AssertEqE(lhs, rhs) => self.ext_assert_eq(lhs, rhs, f),
574 DslIr::AssertEqVI(lhs, rhs) => self.base_assert_eq(lhs, Imm::F(rhs), f),
575 DslIr::AssertEqFI(lhs, rhs) => self.base_assert_eq(lhs, Imm::F(rhs), f),
576 DslIr::AssertEqEI(lhs, rhs) => self.ext_assert_eq(lhs, Imm::EF(rhs), f),
577
578 DslIr::AssertNeV(lhs, rhs) => self.base_assert_ne(lhs, rhs, f),
579 DslIr::AssertNeF(lhs, rhs) => self.base_assert_ne(lhs, rhs, f),
580 DslIr::AssertNeE(lhs, rhs) => self.ext_assert_ne(lhs, rhs, f),
581 DslIr::AssertNeVI(lhs, rhs) => self.base_assert_ne(lhs, Imm::F(rhs), f),
582 DslIr::AssertNeFI(lhs, rhs) => self.base_assert_ne(lhs, Imm::F(rhs), f),
583 DslIr::AssertNeEI(lhs, rhs) => self.ext_assert_ne(lhs, Imm::EF(rhs), f),
584
585 DslIr::CircuitChipExt2Felt(dst, src) => f(self.ext2felt_chip(dst, src)),
586 DslIr::CircuitChipFelt2Ext(dst, src) => f(self.felt2ext_chip(dst, src)),
587 DslIr::Poseidon2ExternalLinearLayer(data) => {
588 f(self.poseidon2_external_linear_layer(data.0, data.1))
589 }
590 DslIr::Poseidon2InternalLinearLayer(data) => {
591 f(self.poseidon2_internal_linear_layer(data.0, data.1))
592 }
593 DslIr::Poseidon2ExternalSBOX(dst, src) => f(self.poseidon2_external_sbox(dst, src)),
594 DslIr::Poseidon2InternalSBOX(dst, src) => f(self.poseidon2_internal_sbox(dst, src)),
595
596 DslIr::CircuitV2Poseidon2PermuteKoalaBear(data) => {
597 f(self.poseidon2_permute(data.0, data.1))
598 }
599 DslIr::CircuitV2HintBitsF(output, value) => {
600 f(self.hint_bit_decomposition(value, output))
601 }
602 DslIr::CircuitV2PrefixSumChecks(data) => {
603 f(self.prefix_sum_checks(data.0, data.1, data.2, data.3, data.4, data.5))
604 }
605 DslIr::CircuitV2CommitPublicValues(public_values) => {
606 f(self.commit_public_values(&public_values))
607 }
608 DslIr::CircuitV2HintAddCurve(data) => f(self.add_curve(data.0, data.1, data.2)),
609
610 DslIr::Parallel(_) => {
611 unreachable!("parallel case should have been handled by compile_raw_program")
612 }
613
614 DslIr::PrintV(dst) => f(self.print_f(dst)),
615 DslIr::PrintF(dst) => f(self.print_f(dst)),
616 DslIr::PrintE(dst) => f(self.print_e(dst)),
617 DslIr::DebugBacktrace(trace) => f(Instruction::DebugBacktrace(trace)),
618 DslIr::CircuitV2HintFelts(output, len) => f(self.hint(output, len)),
619 DslIr::CircuitV2HintExts(output, len) => f(self.hint(output, len)),
620 DslIr::CircuitExt2Felt(felts, ext) => f(self.ext2felts(felts, ext)),
621 DslIr::CycleTrackerV2Enter(name) => {
622 consumer(Err(CompileOneErr::CycleTrackerEnter(name)))
623 }
624 DslIr::CycleTrackerV2Exit => consumer(Err(CompileOneErr::CycleTrackerExit)),
625 DslIr::ReduceE(_) => {}
626 instr => consumer(Err(CompileOneErr::Unsupported(instr))),
627 }
628 }
629
630 fn compile_raw_program<C: Config<N = SP1Field>>(
635 &mut self,
636 block: DslIrBlock<C>,
637 instrs_prefix: Vec<SeqBlock<Instruction<SP1Field>>>,
638 #[cfg(feature = "debug")] cycle_tracker: &mut SpanBuilder<Cow<'static, str>, &'static str>,
639 ) -> RawProgram<Instruction<SP1Field>> {
640 let mut seq_blocks = instrs_prefix;
643 let mut maybe_bb: Option<BasicBlock<Instruction<SP1Field>>> = None;
644
645 for op in block.ops {
646 match op {
647 DslIr::Parallel(par_blocks) => {
648 seq_blocks.extend(maybe_bb.take().map(SeqBlock::Basic));
649 seq_blocks.push(SeqBlock::Parallel(
650 par_blocks
651 .into_iter()
652 .map(|b| {
653 cfg_if! {
654 if #[cfg(feature = "debug")] {
655 self.compile_raw_program(b, vec![], cycle_tracker)
656 } else {
657 self.compile_raw_program(b, vec![])
658 }
659 }
660 })
661 .collect(),
662 ))
663 }
664 op => {
665 let bb = maybe_bb.get_or_insert_with(Default::default);
666 self.compile_one(op, |item| match item {
667 Ok(instr) => {
668 #[cfg(feature = "debug")]
669 {
670 cycle_tracker.item(instr_name(&instr));
671 }
672 bb.instrs.push(instr)
673 }
674 #[cfg(not(feature = "debug"))]
675 Err(
676 CompileOneErr::CycleTrackerEnter(_) | CompileOneErr::CycleTrackerExit,
677 ) => (),
678 #[cfg(feature = "debug")]
679 Err(CompileOneErr::CycleTrackerEnter(name)) => {
680 cycle_tracker.enter(name);
681 }
682 #[cfg(feature = "debug")]
683 Err(CompileOneErr::CycleTrackerExit) => {
684 cycle_tracker.exit().unwrap();
685 }
686 Err(CompileOneErr::Unsupported(instr)) => {
687 panic!("unsupported instruction: {instr:?}")
688 }
689 });
690 }
691 }
692 }
693
694 seq_blocks.extend(maybe_bb.map(SeqBlock::Basic));
695
696 RawProgram { seq_blocks }
697 }
698
699 fn backfill_all<'a>(&mut self, instrs: impl Iterator<Item = &'a mut Instruction<SP1Field>>) {
700 let mut backfill = |(mult, addr): (&mut SP1Field, &Address<SP1Field>)| {
701 *mult = self.addr_to_mult.remove(addr.as_usize()).unwrap()
702 };
703
704 for asm_instr in instrs {
705 match asm_instr {
707 Instruction::BaseAlu(BaseAluInstr {
708 mult,
709 addrs: BaseAluIo { out: ref addr, .. },
710 ..
711 }) => backfill((mult, addr)),
712 Instruction::ExtAlu(ExtAluInstr {
713 mult,
714 addrs: ExtAluIo { out: ref addr, .. },
715 ..
716 }) => backfill((mult, addr)),
717 Instruction::Mem(MemInstr {
718 addrs: MemIo { inner: ref addr },
719 mult,
720 kind: MemAccessKind::Write,
721 ..
722 }) => backfill((mult, addr)),
723 Instruction::ExtFelt(ExtFeltInstr { addrs, mults, ext2felt }) => {
724 if *ext2felt {
725 backfill((&mut mults[1], &addrs[1]));
726 backfill((&mut mults[2], &addrs[2]));
727 backfill((&mut mults[3], &addrs[3]));
728 backfill((&mut mults[4], &addrs[4]));
729 } else {
730 backfill((&mut mults[0], &addrs[0]));
731 }
732 }
733 Instruction::Poseidon2(instr) => {
734 let Poseidon2Instr { addrs: Poseidon2Io { output: ref addrs, .. }, mults } =
735 instr.as_mut();
736 mults.iter_mut().zip(addrs).for_each(&mut backfill);
737 }
738 Instruction::Poseidon2LinearLayer(instr) => {
739 let Poseidon2LinearLayerInstr {
740 addrs: Poseidon2LinearLayerIo { output: ref addrs, .. },
741 mults,
742 ..
743 } = instr.as_mut();
744 mults.iter_mut().zip(addrs).for_each(&mut backfill);
745 }
746 Instruction::Poseidon2SBox(Poseidon2SBoxInstr {
747 addrs: Poseidon2SBoxIo { output: ref addr, .. },
748 mults,
749 ..
750 }) => {
751 backfill((mults, addr));
752 }
753 Instruction::Select(SelectInstr {
754 addrs: SelectIo { out1: ref addr1, out2: ref addr2, .. },
755 mult1,
756 mult2,
757 }) => {
758 backfill((mult1, addr1));
759 backfill((mult2, addr2));
760 }
761 Instruction::HintBits(HintBitsInstr { output_addrs_mults, .. })
762 | Instruction::Hint(HintInstr { output_addrs_mults, .. }) => {
763 output_addrs_mults.iter_mut().for_each(|(addr, mult)| backfill((mult, addr)));
764 }
765 Instruction::PrefixSumChecks(instr) => {
766 let PrefixSumChecksInstr {
767 addrs: PrefixSumChecksIo { accs, field_accs, .. },
768 acc_mults,
769 field_acc_mults,
770 } = instr.as_mut();
771 acc_mults.iter_mut().zip(accs).for_each(|(mult, addr)| backfill((mult, addr)));
772 field_acc_mults
773 .iter_mut()
774 .zip(field_accs)
775 .for_each(|(mult, addr)| backfill((mult, addr)));
776 }
777 Instruction::HintExt2Felts(HintExt2FeltsInstr { output_addrs_mults, .. }) => {
778 output_addrs_mults.iter_mut().for_each(|(addr, mult)| backfill((mult, addr)));
779 }
780 Instruction::HintAddCurve(instr) => {
781 let HintAddCurveInstr { output_x_addrs_mults, output_y_addrs_mults, .. } =
782 instr.as_mut();
783 output_x_addrs_mults.iter_mut().for_each(|(addr, mult)| backfill((mult, addr)));
784 output_y_addrs_mults.iter_mut().for_each(|(addr, mult)| backfill((mult, addr)));
785 }
786 Instruction::Mem(MemInstr { kind: MemAccessKind::Read, .. })
788 | Instruction::CommitPublicValues(_)
789 | Instruction::Print(_)
790 | Instruction::DebugBacktrace(_) => (),
791 }
792 }
793
794 debug_assert!(self.addr_to_mult.is_empty());
795 }
796
797 pub fn compile<C: Config<N = SP1Field>>(
801 &mut self,
802 program: DslIrProgram<C>,
803 ) -> RecursionProgram<SP1Field> {
804 let inner = self.compile_inner(program.into_inner());
805 unsafe { RecursionProgram::new_unchecked(inner) }
808 }
809
810 pub fn compile_inner<C: Config<N = SP1Field>>(
814 &mut self,
815 root_block: DslIrBlock<C>,
816 ) -> RootProgram<SP1Field> {
817 let mut program = tracing::debug_span!("compile raw program").in_scope(|| {
818 cfg_if! {
822 if #[cfg(feature = "debug")] {
823 let mut cycle_tracker = SpanBuilder::new(Cow::Borrowed("cycle_tracker"));
824 let program = self.compile_raw_program(
825 root_block,
826 vec![SeqBlock::Basic(BasicBlock::default())],
827 &mut cycle_tracker,
828 );
829 let cycle_tracker_root_span = cycle_tracker.finish().unwrap();
830 for line in cycle_tracker_root_span.lines() {
831 tracing::info!("{}", line);
832 }
833 program
834 } else {
835 self.compile_raw_program(
836 root_block,
837 vec![SeqBlock::Basic(BasicBlock::default())],
838 )
839 }
840 }
841 });
842 let total_memory = self.addr_to_mult.len() + self.consts.len();
843 tracing::debug_span!("backfill mult").in_scope(|| self.backfill_all(program.iter_mut()));
844
845 tracing::debug_span!("prepend constants").in_scope(|| {
847 let Some(SeqBlock::Basic(BasicBlock { instrs: instrs_consts })) =
848 program.seq_blocks.first_mut()
849 else {
850 unreachable!()
851 };
852 instrs_consts.extend(self.consts.drain().sorted_by_key(|x| x.1 .0 .0).map(
853 |(imm, (addr, mult))| {
854 Instruction::Mem(MemInstr {
855 addrs: MemIo { inner: addr },
856 vals: MemIo { inner: imm.as_block() },
857 mult,
858 kind: MemAccessKind::Write,
859 })
860 },
861 ));
862 });
863
864 let (analyzed, counts) = program.analyze();
865
866 RootProgram { inner: analyzed, total_memory, shape: None, event_counts: counts }
867 }
868}
869
870#[cfg(feature = "debug")]
872const fn instr_name<F>(instr: &Instruction<F>) -> &'static str {
873 match instr {
874 Instruction::BaseAlu(_) => "BaseAlu",
875 Instruction::ExtAlu(_) => "ExtAlu",
876 Instruction::Mem(_) => "Mem",
877 Instruction::ExtFelt(_) => "ExtFelt",
878 Instruction::Poseidon2(_) => "Poseidon2",
879 Instruction::Poseidon2LinearLayer(_) => "Poseidon2LinearLayer",
880 Instruction::Poseidon2SBox(_) => "Poseidon2SBox",
881 Instruction::Select(_) => "Select",
882 Instruction::HintBits(_) => "HintBits",
883 Instruction::PrefixSumChecks(_) => "PrefixSumChecks",
884 Instruction::Print(_) => "Print",
885 Instruction::HintExt2Felts(_) => "HintExt2Felts",
886 Instruction::Hint(_) => "Hint",
887 Instruction::HintAddCurve(_) => "HintAddCurve",
888 Instruction::CommitPublicValues(_) => "CommitPublicValues",
889 Instruction::DebugBacktrace(_) => "DebugBacktrace",
890 }
891}
892
893#[derive(Debug, Clone)]
894pub enum CompileOneErr<C: Config> {
895 Unsupported(DslIr<C>),
896 CycleTrackerEnter(Cow<'static, str>),
897 CycleTrackerExit,
898}
899
900#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
905pub enum Imm<F, EF> {
906 F(F),
908 EF(EF),
910}
911
912impl<F, EF> Imm<F, EF>
913where
914 F: AbstractField + Copy,
915 EF: AbstractExtensionField<F>,
916{
917 pub fn as_block(&self) -> Block<F> {
919 match self {
920 Imm::F(f) => Block::from(*f),
921 Imm::EF(ef) => ef.as_base_slice().into(),
922 }
923 }
924}
925
926trait Reg {
928 fn read(&self, compiler: &mut AsmCompiler) -> Address<SP1Field>;
930
931 fn read_ghost(&self, compiler: &mut AsmCompiler) -> Address<SP1Field>;
933
934 fn write(&self, compiler: &mut AsmCompiler) -> Address<SP1Field>;
936
937 fn write_many(&self, compiler: &mut AsmCompiler, len: usize) -> Vec<Address<SP1Field>>;
938}
939
940macro_rules! impl_reg_vaddr {
941 ($a:ty) => {
942 impl Reg for $a {
943 fn read(&self, compiler: &mut AsmCompiler) -> Address<SP1Field> {
944 compiler.read_vaddr(self.idx as usize)
945 }
946 fn read_ghost(&self, compiler: &mut AsmCompiler) -> Address<SP1Field> {
947 compiler.read_ghost_vaddr(self.idx as usize)
948 }
949 fn write(&self, compiler: &mut AsmCompiler) -> Address<SP1Field> {
950 compiler.write_fp(self.idx as usize)
951 }
952
953 fn write_many(&self, compiler: &mut AsmCompiler, len: usize) -> Vec<Address<SP1Field>> {
954 (0..len).map(|i| compiler.write_fp((self.idx + i as u32) as usize)).collect()
955 }
956 }
957 };
958}
959
960impl_reg_vaddr!(Var<SP1Field>);
962impl_reg_vaddr!(Felt<SP1Field>);
963impl_reg_vaddr!(Ext<SP1Field, SP1ExtensionField>);
964
965impl Reg for Imm<SP1Field, SP1ExtensionField> {
966 fn read(&self, compiler: &mut AsmCompiler) -> Address<SP1Field> {
967 compiler.read_const(*self)
968 }
969
970 fn read_ghost(&self, compiler: &mut AsmCompiler) -> Address<SP1Field> {
971 compiler.read_ghost_const(*self)
972 }
973
974 fn write(&self, _compiler: &mut AsmCompiler) -> Address<SP1Field> {
975 panic!("cannot write to immediate in register: {self:?}")
976 }
977
978 fn write_many(&self, _compiler: &mut AsmCompiler, _len: usize) -> Vec<Address<SP1Field>> {
979 panic!("cannot write to immediate in register: {self:?}")
980 }
981}
982
983impl Reg for Address<SP1Field> {
984 fn read(&self, compiler: &mut AsmCompiler) -> Address<SP1Field> {
985 compiler.read_addr(*self);
986 *self
987 }
988
989 fn read_ghost(&self, compiler: &mut AsmCompiler) -> Address<SP1Field> {
990 compiler.read_ghost_addr(*self);
991 *self
992 }
993
994 fn write(&self, compiler: &mut AsmCompiler) -> Address<SP1Field> {
995 compiler.write_addr(*self);
996 *self
997 }
998
999 fn write_many(&self, _compiler: &mut AsmCompiler, _len: usize) -> Vec<Address<SP1Field>> {
1000 todo!()
1001 }
1002}
1003
1004#[cfg(test)]
1005mod tests {
1006 #![allow(clippy::print_stdout)]
1007
1008 use std::{collections::VecDeque, io::BufRead, iter::zip, sync::Arc};
1009
1010 use rand::{rngs::StdRng, Rng, SeedableRng};
1011 use slop_algebra::extension::BinomialExtensionField;
1012 use slop_symmetric::Permutation;
1013 use sp1_hypercube::inner_perm;
1014 use sp1_primitives::{SP1DiffusionMatrix, SP1Field};
1015
1016 use slop_algebra::PrimeField32;
1018 use sp1_core_machine::utils::setup_logger;
1019 use sp1_recursion_executor::Executor;
1020
1021 use crate::circuit::{AsmBuilder, AsmConfig, CircuitV2Builder};
1022
1023 use super::*;
1024
1025 type F = SP1Field;
1026 type EF = BinomialExtensionField<SP1Field, 4>;
1027 fn test_block(block: DslIrBlock<AsmConfig>) {
1028 test_block_with_runner(block, |program| {
1029 let mut executor = Executor::<F, EF, SP1DiffusionMatrix>::new(program, inner_perm());
1030 executor.run().unwrap();
1031 executor.record
1032 });
1033 }
1034
1035 fn test_block_with_runner(
1036 block: DslIrBlock<AsmConfig>,
1037 run: impl FnOnce(Arc<RecursionProgram<F>>) -> ExecutionRecord<F>,
1038 ) {
1039 let mut compiler = super::AsmCompiler::default();
1040 let program = Arc::new(compiler.compile_inner(block).validate().unwrap());
1041 let _ = run(program.clone());
1042
1043 }
1062
1063 #[test]
1064 fn test_poseidon2() {
1065 setup_logger();
1066
1067 let mut builder = AsmBuilder::default();
1068 let mut rng = StdRng::seed_from_u64(0xCAFEDA7E)
1069 .sample_iter::<[F; PERMUTATION_WIDTH], _>(rand::distributions::Standard);
1070 for _ in 0..100 {
1071 let input_1: [F; PERMUTATION_WIDTH] = rng.next().unwrap();
1072 let output_1 = inner_perm().permute(input_1);
1073
1074 let input_1_felts = input_1.map(|x| builder.eval(x));
1075 let output_1_felts = builder.poseidon2_permute_v2(input_1_felts);
1076 let expected: [Felt<_>; PERMUTATION_WIDTH] = output_1.map(|x| builder.eval(x));
1077 for (lhs, rhs) in output_1_felts.into_iter().zip(expected) {
1078 builder.assert_felt_eq(lhs, rhs);
1079 }
1080 }
1081
1082 test_block(builder.into_root_block());
1083 }
1084
1085 #[test]
1086 fn test_hint_bit_decomposition() {
1087 setup_logger();
1088
1089 let mut builder = AsmBuilder::default();
1090 let mut rng =
1091 StdRng::seed_from_u64(0xC0FFEE7AB1E).sample_iter::<F, _>(rand::distributions::Standard);
1092 for _ in 0..100 {
1093 let input_f = rng.next().unwrap();
1094 let input = input_f.as_canonical_u32();
1095 let output = (0..NUM_BITS).map(|i| (input >> i) & 1).collect::<Vec<_>>();
1096
1097 let input_felt: Felt<_> = builder.eval(input_f);
1098 let output_felts = builder.num2bits_v2_f(input_felt, NUM_BITS);
1099 let expected: Vec<Felt<_>> =
1100 output.into_iter().map(|x| builder.eval(F::from_canonical_u32(x))).collect();
1101 for (lhs, rhs) in output_felts.into_iter().zip(expected) {
1102 builder.assert_felt_eq(lhs, rhs);
1103 }
1104 }
1105 test_block(builder.into_root_block());
1106 }
1107
1108 #[test]
1109 #[allow(clippy::uninlined_format_args)]
1110 fn test_print_and_cycle_tracker() {
1111 const ITERS: usize = 5;
1112
1113 setup_logger();
1114
1115 let mut builder = AsmBuilder::default();
1116
1117 let input_fs = StdRng::seed_from_u64(0xC0FFEE7AB1E)
1118 .sample_iter::<F, _>(rand::distributions::Standard)
1119 .take(ITERS)
1120 .collect::<Vec<_>>();
1121
1122 let input_efs = StdRng::seed_from_u64(0x7EA7AB1E)
1123 .sample_iter::<[F; 4], _>(rand::distributions::Standard)
1124 .take(ITERS)
1125 .collect::<Vec<_>>();
1126
1127 let mut buf = VecDeque::<u8>::new();
1128
1129 builder.cycle_tracker_v2_enter("printing felts");
1130 for (i, &input_f) in input_fs.iter().enumerate() {
1131 builder.cycle_tracker_v2_enter(format!("printing felt {i}"));
1132 let input_felt = builder.eval(input_f);
1133 builder.print_f(input_felt);
1134 builder.cycle_tracker_v2_exit();
1135 }
1136 builder.cycle_tracker_v2_exit();
1137
1138 builder.cycle_tracker_v2_enter("printing exts");
1139 for (i, input_block) in input_efs.iter().enumerate() {
1140 builder.cycle_tracker_v2_enter(format!("printing ext {i}"));
1141 let input_ext = builder.eval(EF::from_base_slice(input_block).cons());
1142 builder.print_e(input_ext);
1143 builder.cycle_tracker_v2_exit();
1144 }
1145 builder.cycle_tracker_v2_exit();
1146
1147 test_block_with_runner(builder.into_root_block(), |program| {
1148 let mut executor = Executor::<F, EF, SP1DiffusionMatrix>::new(program, inner_perm());
1149 executor.debug_stdout = Box::new(&mut buf);
1150 executor.run().unwrap();
1151 executor.record
1152 });
1153
1154 let input_str_fs = input_fs.into_iter().map(|elt| format!("{elt}"));
1155 let input_str_efs = input_efs.into_iter().map(|elt| format!("{elt:?}"));
1156 let input_strs = input_str_fs.chain(input_str_efs);
1157
1158 for (input_str, line) in zip(input_strs, buf.lines()) {
1159 let line = line.unwrap();
1160 assert!(line.contains(&input_str));
1161 }
1162 }
1163
1164 #[test]
1165 fn test_ext2felts() {
1166 setup_logger();
1167
1168 let mut builder = AsmBuilder::default();
1169 let mut rng =
1170 StdRng::seed_from_u64(0x3264).sample_iter::<[F; 4], _>(rand::distributions::Standard);
1171 let mut random_ext = move || EF::from_base_slice(&rng.next().unwrap());
1172 for _ in 0..100 {
1173 let input = random_ext();
1174 let output: &[F] = input.as_base_slice();
1175
1176 let input_ext = builder.eval(input.cons());
1177 let output_felts = builder.ext2felt_v2(input_ext);
1178 let expected: Vec<Felt<_>> = output.iter().map(|&x| builder.eval(x)).collect();
1179 for (lhs, rhs) in output_felts.into_iter().zip(expected) {
1180 builder.assert_felt_eq(lhs, rhs);
1181 }
1182 }
1183 test_block(builder.into_root_block());
1184 }
1185
1186 macro_rules! test_assert_fixture {
1187 ($assert_felt:ident, $assert_ext:ident, $should_offset:literal) => {
1188 {
1189 use std::convert::identity;
1190 let mut builder = AsmBuilder::default();
1191 test_assert_fixture!(builder, identity, F, Felt<_>, 0xDEADBEEF, $assert_felt, $should_offset);
1192 test_assert_fixture!(builder, EF::cons, EF, Ext<_, _>, 0xABADCAFE, $assert_ext, $should_offset);
1193 test_block(builder.into_root_block());
1194 }
1195 };
1196 ($builder:ident, $wrap:path, $t:ty, $u:ty, $seed:expr, $assert:ident, $should_offset:expr) => {
1197 {
1198 let mut elts = StdRng::seed_from_u64($seed)
1199 .sample_iter::<$t, _>(rand::distributions::Standard);
1200 for _ in 0..100 {
1201 let a = elts.next().unwrap();
1202 let b = elts.next().unwrap();
1203 let c = a + b;
1204 let ar: $u = $builder.eval($wrap(a));
1205 let br: $u = $builder.eval($wrap(b));
1206 let cr: $u = $builder.eval(ar + br);
1207 let cm = if $should_offset {
1208 c + elts.find(|x| !x.is_zero()).unwrap()
1209 } else {
1210 c
1211 };
1212 $builder.$assert(cr, $wrap(cm));
1213 }
1214 }
1215 };
1216 }
1217
1218 #[test]
1219 fn test_assert_eq_noop() {
1220 test_assert_fixture!(assert_felt_eq, assert_ext_eq, false);
1221 }
1222
1223 #[test]
1224 #[should_panic]
1225 fn test_assert_eq_panics() {
1226 test_assert_fixture!(assert_felt_eq, assert_ext_eq, true);
1227 }
1228
1229 #[test]
1230 fn test_assert_ne_noop() {
1231 test_assert_fixture!(assert_felt_ne, assert_ext_ne, true);
1232 }
1233
1234 #[test]
1235 #[should_panic]
1236 fn test_assert_ne_panics() {
1237 test_assert_fixture!(assert_felt_ne, assert_ext_ne, false);
1238 }
1239}