sp1_core_executor/events/
byte.rs1use std::hash::Hash;
2
3use deepsize2::DeepSizeOf;
4use hashbrown::HashMap;
5use serde::{Deserialize, Serialize};
6use slop_algebra::{Field, PrimeField32};
7
8use crate::{ByteOpcode, Opcode};
9
10pub const NUM_BYTE_OPS: usize = 6;
12
13#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize, Hash, DeepSizeOf)]
18pub struct ByteLookupEvent {
19 pub opcode: ByteOpcode,
21 pub a: u16,
23 pub b: u8,
25 pub c: u8,
27}
28
29pub trait ByteRecord {
31 fn add_byte_lookup_event(&mut self, blu_event: ByteLookupEvent);
33
34 fn add_byte_lookup_events_from_maps(
36 &mut self,
37 new_blu_events_vec: Vec<&HashMap<ByteLookupEvent, usize>>,
38 );
39
40 #[inline]
42 fn add_byte_lookup_events(&mut self, blu_events: Vec<ByteLookupEvent>) {
43 for blu_event in blu_events {
44 self.add_byte_lookup_event(blu_event);
45 }
46 }
47
48 fn add_u8_range_check(&mut self, a: u8, b: u8) {
50 self.add_byte_lookup_event(ByteLookupEvent {
51 opcode: ByteOpcode::U8Range,
52 a: 0,
53 b: a,
54 c: b,
55 });
56 }
57
58 fn add_u16_range_check(&mut self, a: u16) {
60 self.add_byte_lookup_event(ByteLookupEvent { opcode: ByteOpcode::Range, a, b: 16, c: 0 });
61 }
62
63 fn add_bit_range_check(&mut self, a: u16, b: u8) {
65 self.add_byte_lookup_event(ByteLookupEvent { opcode: ByteOpcode::Range, a, b, c: 0 });
66 }
67
68 fn add_u8_range_checks(&mut self, bytes: &[u8]) {
70 let mut index = 0;
71 while index + 1 < bytes.len() {
72 self.add_u8_range_check(bytes[index], bytes[index + 1]);
73 index += 2;
74 }
75 if index < bytes.len() {
76 self.add_u8_range_check(bytes[index], 0);
78 }
79 }
80
81 fn add_u8_range_checks_field<F: PrimeField32>(&mut self, field_values: &[F]) {
84 self.add_u8_range_checks(
85 &field_values.iter().map(|x| x.as_canonical_u32() as u8).collect::<Vec<_>>(),
86 );
87 }
88
89 fn add_u16_range_checks(&mut self, ls: &[u16]) {
91 for x in ls.iter() {
92 self.add_u16_range_check(*x);
93 }
94 }
95
96 fn add_u16_range_checks_field<F: PrimeField32>(&mut self, field_values: &[F]) {
99 for x in field_values.iter() {
100 self.add_u16_range_check(x.as_canonical_u32() as u16);
101 }
102 }
103
104 fn lookup_or(&mut self, b: u8, c: u8) {
106 self.add_byte_lookup_event(ByteLookupEvent {
107 opcode: ByteOpcode::OR,
108 a: (b | c) as u16,
109 b,
110 c,
111 });
112 }
113}
114
115impl ByteLookupEvent {
116 #[must_use]
118 pub fn new(opcode: ByteOpcode, a: u16, b: u8, c: u8) -> Self {
119 Self { opcode, a, b, c }
120 }
121}
122
123impl ByteRecord for Vec<ByteLookupEvent> {
124 fn add_byte_lookup_event(&mut self, blu_event: ByteLookupEvent) {
125 self.push(blu_event);
126 }
127
128 fn add_byte_lookup_events_from_maps(&mut self, _: Vec<&HashMap<ByteLookupEvent, usize>>) {
129 unimplemented!()
130 }
131}
132
133impl ByteRecord for HashMap<ByteLookupEvent, usize> {
134 #[inline]
135 fn add_byte_lookup_event(&mut self, blu_event: ByteLookupEvent) {
136 self.entry(blu_event).and_modify(|e| *e += 1).or_insert(1);
137 }
138
139 fn add_byte_lookup_events_from_maps(
140 &mut self,
141 new_events: Vec<&HashMap<ByteLookupEvent, usize>>,
142 ) {
143 for new_blu_map in new_events {
144 for (blu_event, count) in new_blu_map.iter() {
145 *self.entry(*blu_event).or_insert(0) += count;
146 }
147 }
148 }
149}
150
151impl From<Opcode> for ByteOpcode {
152 fn from(value: Opcode) -> Self {
154 match value {
155 Opcode::AND => Self::AND,
156 Opcode::OR => Self::OR,
157 Opcode::XOR => Self::XOR,
158 _ => panic!("Invalid opcode for ByteChip: {value:?}"),
159 }
160 }
161}
162
163impl ByteOpcode {
164 #[must_use]
166 pub fn byte_table() -> Vec<Self> {
167 let opcodes = vec![
168 ByteOpcode::AND,
169 ByteOpcode::OR,
170 ByteOpcode::XOR,
171 ByteOpcode::U8Range,
172 ByteOpcode::LTU,
173 ByteOpcode::MSB,
174 ];
175 opcodes
176 }
177
178 #[must_use]
180 pub fn as_field<F: Field>(self) -> F {
181 F::from_canonical_u8(self as u8)
182 }
183}