use std::hash::Hash;
use deepsize2::DeepSizeOf;
use hashbrown::HashMap;
use serde::{Deserialize, Serialize};
use slop_algebra::{Field, PrimeField32};
use crate::{ByteOpcode, Opcode};
pub const NUM_BYTE_OPS: usize = 6;
#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize, Hash, DeepSizeOf)]
pub struct ByteLookupEvent {
pub opcode: ByteOpcode,
pub a: u16,
pub b: u8,
pub c: u8,
}
pub trait ByteRecord {
fn add_byte_lookup_event(&mut self, blu_event: ByteLookupEvent);
fn add_byte_lookup_events_from_maps(
&mut self,
new_blu_events_vec: Vec<&HashMap<ByteLookupEvent, usize>>,
);
#[inline]
fn add_byte_lookup_events(&mut self, blu_events: Vec<ByteLookupEvent>) {
for blu_event in blu_events {
self.add_byte_lookup_event(blu_event);
}
}
fn add_u8_range_check(&mut self, a: u8, b: u8) {
self.add_byte_lookup_event(ByteLookupEvent {
opcode: ByteOpcode::U8Range,
a: 0,
b: a,
c: b,
});
}
fn add_u16_range_check(&mut self, a: u16) {
self.add_byte_lookup_event(ByteLookupEvent { opcode: ByteOpcode::Range, a, b: 16, c: 0 });
}
fn add_bit_range_check(&mut self, a: u16, b: u8) {
self.add_byte_lookup_event(ByteLookupEvent { opcode: ByteOpcode::Range, a, b, c: 0 });
}
fn add_u8_range_checks(&mut self, bytes: &[u8]) {
let mut index = 0;
while index + 1 < bytes.len() {
self.add_u8_range_check(bytes[index], bytes[index + 1]);
index += 2;
}
if index < bytes.len() {
self.add_u8_range_check(bytes[index], 0);
}
}
fn add_u8_range_checks_field<F: PrimeField32>(&mut self, field_values: &[F]) {
self.add_u8_range_checks(
&field_values.iter().map(|x| x.as_canonical_u32() as u8).collect::<Vec<_>>(),
);
}
fn add_u16_range_checks(&mut self, ls: &[u16]) {
for x in ls.iter() {
self.add_u16_range_check(*x);
}
}
fn add_u16_range_checks_field<F: PrimeField32>(&mut self, field_values: &[F]) {
for x in field_values.iter() {
self.add_u16_range_check(x.as_canonical_u32() as u16);
}
}
fn lookup_or(&mut self, b: u8, c: u8) {
self.add_byte_lookup_event(ByteLookupEvent {
opcode: ByteOpcode::OR,
a: (b | c) as u16,
b,
c,
});
}
}
impl ByteLookupEvent {
#[must_use]
pub fn new(opcode: ByteOpcode, a: u16, b: u8, c: u8) -> Self {
Self { opcode, a, b, c }
}
}
impl ByteRecord for Vec<ByteLookupEvent> {
fn add_byte_lookup_event(&mut self, blu_event: ByteLookupEvent) {
self.push(blu_event);
}
fn add_byte_lookup_events_from_maps(&mut self, _: Vec<&HashMap<ByteLookupEvent, usize>>) {
unimplemented!()
}
}
impl ByteRecord for HashMap<ByteLookupEvent, usize> {
#[inline]
fn add_byte_lookup_event(&mut self, blu_event: ByteLookupEvent) {
self.entry(blu_event).and_modify(|e| *e += 1).or_insert(1);
}
fn add_byte_lookup_events_from_maps(
&mut self,
new_events: Vec<&HashMap<ByteLookupEvent, usize>>,
) {
for new_blu_map in new_events {
for (blu_event, count) in new_blu_map.iter() {
*self.entry(*blu_event).or_insert(0) += count;
}
}
}
}
impl From<Opcode> for ByteOpcode {
fn from(value: Opcode) -> Self {
match value {
Opcode::AND => Self::AND,
Opcode::OR => Self::OR,
Opcode::XOR => Self::XOR,
_ => panic!("Invalid opcode for ByteChip: {value:?}"),
}
}
}
impl ByteOpcode {
#[must_use]
pub fn byte_table() -> Vec<Self> {
let opcodes = vec![
ByteOpcode::AND,
ByteOpcode::OR,
ByteOpcode::XOR,
ByteOpcode::U8Range,
ByteOpcode::LTU,
ByteOpcode::MSB,
];
opcodes
}
#[must_use]
pub fn as_field<F: Field>(self) -> F {
F::from_canonical_u8(self as u8)
}
}