sp1_core_executor/events/
byte.rs1use std::hash::Hash;
2
3use hashbrown::HashMap;
4use p3_field::{Field, PrimeField32};
5use serde::{Deserialize, Serialize};
6
7use crate::{ByteOpcode, Opcode};
8
9pub const NUM_BYTE_OPS: usize = 9;
11
12#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize, Hash)]
17pub struct ByteLookupEvent {
18 pub opcode: ByteOpcode,
20 pub a1: u16,
22 pub a2: u8,
24 pub b: u8,
26 pub c: u8,
28}
29
30pub trait ByteRecord {
32 fn add_byte_lookup_event(&mut self, blu_event: ByteLookupEvent);
34
35 fn add_byte_lookup_events_from_maps(
37 &mut self,
38 new_blu_events_vec: Vec<&HashMap<ByteLookupEvent, usize>>,
39 );
40
41 #[inline]
43 fn add_byte_lookup_events(&mut self, blu_events: Vec<ByteLookupEvent>) {
44 for blu_event in blu_events {
45 self.add_byte_lookup_event(blu_event);
46 }
47 }
48
49 fn add_u8_range_check(&mut self, a: u8, b: u8) {
51 self.add_byte_lookup_event(ByteLookupEvent {
52 opcode: ByteOpcode::U8Range,
53 a1: 0,
54 a2: 0,
55 b: a,
56 c: b,
57 });
58 }
59
60 fn add_u16_range_check(&mut self, a: u16) {
62 self.add_byte_lookup_event(ByteLookupEvent {
63 opcode: ByteOpcode::U16Range,
64 a1: a,
65 a2: 0,
66 b: 0,
67 c: 0,
68 });
69 }
70
71 fn add_u8_range_checks(&mut self, bytes: &[u8]) {
73 let mut index = 0;
74 while index + 1 < bytes.len() {
75 self.add_u8_range_check(bytes[index], bytes[index + 1]);
76 index += 2;
77 }
78 if index < bytes.len() {
79 self.add_u8_range_check(bytes[index], 0);
81 }
82 }
83
84 fn add_u8_range_checks_field<F: PrimeField32>(&mut self, field_values: &[F]) {
87 self.add_u8_range_checks(
88 &field_values.iter().map(|x| x.as_canonical_u32() as u8).collect::<Vec<_>>(),
89 );
90 }
91
92 fn add_u16_range_checks(&mut self, ls: &[u16]) {
94 for x in ls.iter() {
95 self.add_u16_range_check(*x);
96 }
97 }
98
99 fn lookup_or(&mut self, b: u8, c: u8) {
101 self.add_byte_lookup_event(ByteLookupEvent {
102 opcode: ByteOpcode::OR,
103 a1: (b | c) as u16,
104 a2: 0,
105 b,
106 c,
107 });
108 }
109}
110
111impl ByteLookupEvent {
112 #[must_use]
114 pub fn new(opcode: ByteOpcode, a1: u16, a2: u8, b: u8, c: u8) -> Self {
115 Self { opcode, a1, a2, b, c }
116 }
117}
118
119impl ByteRecord for Vec<ByteLookupEvent> {
120 fn add_byte_lookup_event(&mut self, blu_event: ByteLookupEvent) {
121 self.push(blu_event);
122 }
123
124 fn add_byte_lookup_events_from_maps(&mut self, _: Vec<&HashMap<ByteLookupEvent, usize>>) {
125 unimplemented!()
126 }
127}
128
129impl ByteRecord for HashMap<ByteLookupEvent, usize> {
130 #[inline]
131 fn add_byte_lookup_event(&mut self, blu_event: ByteLookupEvent) {
132 self.entry(blu_event).and_modify(|e| *e += 1).or_insert(1);
133 }
134
135 fn add_byte_lookup_events_from_maps(
136 &mut self,
137 new_events: Vec<&HashMap<ByteLookupEvent, usize>>,
138 ) {
139 for new_blu_map in new_events {
140 for (blu_event, count) in new_blu_map.iter() {
141 *self.entry(*blu_event).or_insert(0) += count;
142 }
143 }
144 }
145}
146
147impl From<Opcode> for ByteOpcode {
148 fn from(value: Opcode) -> Self {
150 match value {
151 Opcode::AND => Self::AND,
152 Opcode::OR => Self::OR,
153 Opcode::XOR => Self::XOR,
154 Opcode::SLL => Self::SLL,
155 _ => panic!("Invalid opcode for ByteChip: {value:?}"),
156 }
157 }
158}
159
160impl ByteOpcode {
161 #[must_use]
163 pub fn all() -> Vec<Self> {
164 let opcodes = vec![
165 ByteOpcode::AND,
166 ByteOpcode::OR,
167 ByteOpcode::XOR,
168 ByteOpcode::SLL,
169 ByteOpcode::U8Range,
170 ByteOpcode::ShrCarry,
171 ByteOpcode::LTU,
172 ByteOpcode::MSB,
173 ByteOpcode::U16Range,
174 ];
175 debug_assert_eq!(opcodes.len(), NUM_BYTE_OPS);
176 opcodes
177 }
178
179 #[must_use]
181 pub fn as_field<F: Field>(self) -> F {
182 F::from_canonical_u8(self as u8)
183 }
184}