sp1_core_executor/events/
byte.rs1use std::hash::Hash;
2
3use hashbrown::HashMap;
4use p3_field::{Field, PrimeField32};
5use serde::{Deserialize, Serialize};
6
7use crate::{ByteOpcode, Opcode};
8
9pub const NUM_BYTE_OPS: usize = 9;
11
12#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize, Hash)]
17pub struct ByteLookupEvent {
18 pub opcode: ByteOpcode,
20 pub a1: u16,
22 pub a2: u8,
24 pub b: u8,
26 pub c: u8,
28}
29
30pub trait ByteRecord {
32 fn add_byte_lookup_event(&mut self, blu_event: ByteLookupEvent);
34
35 fn add_byte_lookup_events_from_maps(
37 &mut self,
38 new_blu_events_vec: Vec<&HashMap<ByteLookupEvent, usize>>,
39 );
40
41 #[inline]
43 fn add_byte_lookup_events(&mut self, blu_events: Vec<ByteLookupEvent>) {
44 for blu_event in blu_events {
45 self.add_byte_lookup_event(blu_event);
46 }
47 }
48
49 fn add_u8_range_check(&mut self, a: u8, b: u8) {
51 self.add_byte_lookup_event(ByteLookupEvent {
52 opcode: ByteOpcode::U8Range,
53 a1: 0,
54 a2: 0,
55 b: a,
56 c: b,
57 });
58 }
59
60 fn add_u16_range_check(&mut self, a: u16) {
62 self.add_byte_lookup_event(ByteLookupEvent {
63 opcode: ByteOpcode::U16Range,
64 a1: a,
65 a2: 0,
66 b: 0,
67 c: 0,
68 });
69 }
70
71 fn add_u8_range_checks(&mut self, bytes: &[u8]) {
73 let mut index = 0;
74 while index + 1 < bytes.len() {
75 self.add_u8_range_check(bytes[index], bytes[index + 1]);
76 index += 2;
77 }
78 if index < bytes.len() {
79 self.add_u8_range_check(bytes[index], 0);
81 }
82 }
83
84 fn add_u8_range_checks_field<F: PrimeField32>(&mut self, field_values: &[F]) {
87 self.add_u8_range_checks(
88 &field_values.iter().map(|x| x.as_canonical_u32() as u8).collect::<Vec<_>>(),
89 );
90 }
91
92 fn add_u16_range_checks(&mut self, ls: &[u16]) {
94 ls.iter().for_each(|x| self.add_u16_range_check(*x));
95 }
96
97 fn lookup_or(&mut self, b: u8, c: u8) {
99 self.add_byte_lookup_event(ByteLookupEvent {
100 opcode: ByteOpcode::OR,
101 a1: (b | c) as u16,
102 a2: 0,
103 b,
104 c,
105 });
106 }
107}
108
109impl ByteLookupEvent {
110 #[must_use]
112 pub fn new(opcode: ByteOpcode, a1: u16, a2: u8, b: u8, c: u8) -> Self {
113 Self { opcode, a1, a2, b, c }
114 }
115}
116
117impl ByteRecord for Vec<ByteLookupEvent> {
118 fn add_byte_lookup_event(&mut self, blu_event: ByteLookupEvent) {
119 self.push(blu_event);
120 }
121
122 fn add_byte_lookup_events_from_maps(&mut self, _: Vec<&HashMap<ByteLookupEvent, usize>>) {
123 unimplemented!()
124 }
125}
126
127impl ByteRecord for HashMap<ByteLookupEvent, usize> {
128 #[inline]
129 fn add_byte_lookup_event(&mut self, blu_event: ByteLookupEvent) {
130 self.entry(blu_event).and_modify(|e| *e += 1).or_insert(1);
131 }
132
133 fn add_byte_lookup_events_from_maps(
134 &mut self,
135 new_events: Vec<&HashMap<ByteLookupEvent, usize>>,
136 ) {
137 for new_blu_map in new_events {
138 for (blu_event, count) in new_blu_map.iter() {
139 *self.entry(*blu_event).or_insert(0) += count;
140 }
141 }
142 }
143}
144
145impl From<Opcode> for ByteOpcode {
146 fn from(value: Opcode) -> Self {
148 match value {
149 Opcode::AND => Self::AND,
150 Opcode::OR => Self::OR,
151 Opcode::XOR => Self::XOR,
152 Opcode::SLL => Self::SLL,
153 _ => panic!("Invalid opcode for ByteChip: {value:?}"),
154 }
155 }
156}
157
158impl ByteOpcode {
159 #[must_use]
161 pub fn all() -> Vec<Self> {
162 let opcodes = vec![
163 ByteOpcode::AND,
164 ByteOpcode::OR,
165 ByteOpcode::XOR,
166 ByteOpcode::SLL,
167 ByteOpcode::U8Range,
168 ByteOpcode::ShrCarry,
169 ByteOpcode::LTU,
170 ByteOpcode::MSB,
171 ByteOpcode::U16Range,
172 ];
173 debug_assert_eq!(opcodes.len(), NUM_BYTE_OPS);
174 opcodes
175 }
176
177 #[must_use]
179 pub fn as_field<F: Field>(self) -> F {
180 F::from_canonical_u8(self as u8)
181 }
182}