sp1_core_machine/operations/
lt.rs1use itertools::izip;
2
3use p3_air::AirBuilder;
4use p3_field::{AbstractField, PrimeField32};
5
6use sp1_core_executor::{
7 events::{ByteLookupEvent, ByteRecord},
8 ByteOpcode,
9};
10use sp1_derive::AlignedBorrow;
11use sp1_stark::air::{BaseAirBuilder, SP1AirBuilder};
12
13#[derive(Debug, Clone, Copy, AlignedBorrow)]
15#[repr(C)]
16pub struct AssertLtColsBytes<T, const N: usize> {
17 pub(crate) byte_flags: [T; N],
19
20 pub(crate) a_comparison_byte: T,
21 pub(crate) b_comparison_byte: T,
22}
23
24impl<F: PrimeField32, const N: usize> AssertLtColsBytes<F, N> {
25 pub fn populate(&mut self, record: &mut impl ByteRecord, a: &[u8], b: &[u8]) {
26 let mut byte_flags = vec![0u8; N];
27
28 for (a_byte, b_byte, flag) in
29 izip!(a.iter().rev(), b.iter().rev(), byte_flags.iter_mut().rev())
30 {
31 assert!(a_byte <= b_byte);
32 if a_byte < b_byte {
33 *flag = 1;
34 self.a_comparison_byte = F::from_canonical_u8(*a_byte);
35 self.b_comparison_byte = F::from_canonical_u8(*b_byte);
36 record.add_byte_lookup_event(ByteLookupEvent {
37 opcode: ByteOpcode::LTU,
38 a1: 1,
39 a2: 0,
40 b: *a_byte,
41 c: *b_byte,
42 });
43 break;
44 }
45 }
46
47 for (byte, flag) in izip!(byte_flags.iter(), self.byte_flags.iter_mut()) {
48 *flag = F::from_canonical_u8(*byte);
49 }
50 }
51}
52
53impl<V: Copy, const N: usize> AssertLtColsBytes<V, N> {
54 pub fn eval<
55 AB: SP1AirBuilder<Var = V>,
56 Ea: Into<AB::Expr> + Clone,
57 Eb: Into<AB::Expr> + Clone,
58 >(
59 &self,
60 builder: &mut AB,
61 a: &[Ea],
62 b: &[Eb],
63 is_real: impl Into<AB::Expr> + Clone,
64 ) where
65 V: Into<AB::Expr>,
66 {
67 let mut sum_flags: AB::Expr = AB::Expr::zero();
79 for &flag in self.byte_flags.iter() {
80 builder.assert_bool(flag);
82 sum_flags = sum_flags.clone() + flag.into();
84 }
85 builder.when(is_real.clone()).assert_one(sum_flags);
87
88 let mut is_inequality_visited = AB::Expr::zero();
93
94 let a: [AB::Expr; N] = core::array::from_fn(|i| a[i].clone().into());
97 let b: [AB::Expr; N] = core::array::from_fn(|i| b[i].clone().into());
98
99 let mut first_lt_byte = AB::Expr::zero();
100 let mut b_comparison_byte = AB::Expr::zero();
101 for (a_byte, b_byte, &flag) in
102 izip!(a.iter().rev(), b.iter().rev(), self.byte_flags.iter().rev())
103 {
104 is_inequality_visited = is_inequality_visited.clone() + flag.into();
107
108 first_lt_byte = first_lt_byte.clone() + a_byte.clone() * flag;
109 b_comparison_byte = b_comparison_byte.clone() + b_byte.clone() * flag;
110
111 builder
112 .when_not(is_inequality_visited.clone())
113 .when(is_real.clone())
114 .assert_eq(a_byte.clone(), b_byte.clone());
115 }
116
117 builder.when(is_real.clone()).assert_eq(self.a_comparison_byte, first_lt_byte);
118 builder.when(is_real.clone()).assert_eq(self.b_comparison_byte, b_comparison_byte);
119
120 builder.send_byte(
122 ByteOpcode::LTU.as_field::<AB::F>(),
123 AB::F::one(),
124 self.a_comparison_byte,
125 self.b_comparison_byte,
126 is_real,
127 )
128 }
129}
130
131#[derive(Debug, Clone, Copy, AlignedBorrow)]
133#[repr(C)]
134pub struct AssertLtColsBits<T, const N: usize> {
135 pub(crate) bit_flags: [T; N],
137}
138
139impl<F: PrimeField32, const N: usize> AssertLtColsBits<F, N> {
140 pub fn populate(&mut self, a: &[u32], b: &[u32]) {
141 let mut bit_flags = vec![0u8; N];
142
143 for (a_bit, b_bit, flag) in
144 izip!(a.iter().rev(), b.iter().rev(), bit_flags.iter_mut().rev())
145 {
146 assert!(a_bit <= b_bit);
147 debug_assert!(*a_bit == 0 || *a_bit == 1);
148 debug_assert!(*b_bit == 0 || *b_bit == 1);
149 if a_bit < b_bit {
150 *flag = 1;
151 break;
152 }
153 }
154
155 for (bit, flag) in izip!(bit_flags.iter(), self.bit_flags.iter_mut()) {
156 *flag = F::from_canonical_u8(*bit);
157 }
158 }
159}
160
161impl<V: Copy, const N: usize> AssertLtColsBits<V, N> {
162 pub fn eval<
163 AB: SP1AirBuilder<Var = V>,
164 Ea: Into<AB::Expr> + Clone,
165 Eb: Into<AB::Expr> + Clone,
166 >(
167 &self,
168 builder: &mut AB,
169 a: &[Ea],
170 b: &[Eb],
171 is_real: impl Into<AB::Expr> + Clone,
172 ) where
173 V: Into<AB::Expr>,
174 {
175 let mut sum_flags: AB::Expr = AB::Expr::zero();
187 for &flag in self.bit_flags.iter() {
188 builder.assert_bool(flag);
190 sum_flags = sum_flags.clone() + flag.into();
192 }
193 builder.when(is_real.clone()).assert_one(sum_flags);
195
196 let mut is_inequality_visited = AB::Expr::zero();
201
202 let a: [AB::Expr; N] = core::array::from_fn(|i| a[i].clone().into());
204 let b: [AB::Expr; N] = core::array::from_fn(|i| b[i].clone().into());
205
206 let mut a_comparison_bit = AB::Expr::zero();
208 let mut b_comparison_bit = AB::Expr::zero();
209 for (a_bit, b_bit, &flag) in
210 izip!(a.iter().rev(), b.iter().rev(), self.bit_flags.iter().rev())
211 {
212 is_inequality_visited = is_inequality_visited.clone() + flag.into();
215
216 a_comparison_bit = a_comparison_bit.clone() + a_bit.clone() * flag;
217 b_comparison_bit = b_comparison_bit.clone() + b_bit.clone() * flag;
218
219 builder
220 .when(is_real.clone())
221 .when_not(is_inequality_visited.clone())
222 .assert_eq(a_bit.clone(), b_bit.clone());
223 }
224
225 builder.when(is_real.clone()).assert_eq(a_comparison_bit, AB::F::zero());
226 builder.when(is_real.clone()).assert_eq(b_comparison_bit, AB::F::one());
227 }
228}