essential_types/
predicate.rsuse crate::{serde::bytecode, ConstraintBytecode, StateReadBytecode};
use header::{check_predicate_bounds, encoded_size, EncodedSize, PredicateBounds, PredicateError};
use serde::{Deserialize, Serialize};
#[cfg(feature = "schema")]
use schemars::JsonSchema;
#[cfg(test)]
mod tests;
pub mod header;
#[derive(Debug, Default, Clone, PartialEq, Eq, PartialOrd, Ord, Hash, Serialize, Deserialize)]
#[cfg_attr(feature = "schema", derive(JsonSchema))]
pub struct Predicate {
#[serde(
serialize_with = "bytecode::serialize_vec",
deserialize_with = "bytecode::deserialize_vec"
)]
pub state_read: Vec<StateReadBytecode>,
#[serde(
serialize_with = "bytecode::serialize_vec",
deserialize_with = "bytecode::deserialize_vec"
)]
pub constraints: Vec<ConstraintBytecode>,
}
impl Predicate {
pub const MAX_STATE_READS: usize = u8::MAX as usize;
pub const MAX_STATE_READ_SIZE_BYTES: usize = 10_000;
pub const MAX_CONSTRAINTS: usize = u8::MAX as usize;
pub const MAX_CONSTRAINT_SIZE_BYTES: usize = 10_000;
pub const MAX_BYTES: usize = 1024 * 50;
pub fn programs(&self) -> impl Iterator<Item = &[u8]> {
self.state_read
.iter()
.chain(self.constraints.iter())
.map(|x| x.as_slice())
}
pub fn into_programs(self) -> impl Iterator<Item = Vec<u8>> {
self.state_read.into_iter().chain(self.constraints)
}
pub fn encoded_header(&self) -> Result<header::EncodedHeader, PredicateError> {
let static_header = self.fixed_size_header()?.into();
let lens = header::encode_program_lengths(self);
Ok(header::EncodedHeader {
fixed_size_header: static_header,
lens,
})
}
pub fn encode(&self) -> Result<impl Iterator<Item = u8> + '_, PredicateError> {
let header = self.encoded_header()?;
Ok(header
.into_iter()
.chain(self.programs().flat_map(|x| x.iter().copied())))
}
pub fn encoded_size(&self) -> usize {
let sizes = EncodedSize {
num_state_reads: self.state_read.len(),
num_constraints: self.constraints.len(),
state_read_lens_sum: self
.state_read
.iter()
.fold(0, |i, p| i.saturating_add(p.len())),
constraint_lens_sum: self
.constraints
.iter()
.fold(0, |i, p| i.saturating_add(p.len())),
};
encoded_size(&sizes)
}
pub fn decode(bytes: &[u8]) -> Result<Self, header::DecodeError> {
let header = header::DecodedHeader::decode(bytes)?;
if bytes.len() < header.bytes_len() {
return Err(header::DecodeError::BufferTooSmall);
}
let num_state_reads = header.num_state_reads();
let num_constraints = header.num_constraints();
let mut predicate = Self {
state_read: Vec::with_capacity(num_state_reads),
constraints: Vec::with_capacity(num_constraints),
};
let mut offset = header::state_len_buffer_offset(num_state_reads, num_constraints);
predicate
.state_read
.extend(
header
.state_reads
.chunks_exact(header::LEN_SIZE_BYTES)
.map(|chunk| {
let len = u16::from_be_bytes([chunk[0], chunk[1]]) as usize;
let start = offset;
offset += len;
bytes[start..offset].to_vec()
}),
);
predicate
.constraints
.extend(
header
.constraints
.chunks_exact(header::LEN_SIZE_BYTES)
.map(|chunk| {
let len = u16::from_be_bytes([chunk[0], chunk[1]]) as usize;
let start = offset;
offset += len;
bytes[start..offset].to_vec()
}),
);
Ok(predicate)
}
pub fn check_predicate_bounds(&self) -> Result<(), PredicateError> {
let bounds = PredicateBounds {
num_state_reads: self.state_read.len(),
num_constraints: self.constraints.len(),
state_read_lens: self.state_read.iter().map(|x| x.len()),
constraint_lens: self.constraints.iter().map(|x| x.len()),
};
check_predicate_bounds(bounds)
}
fn fixed_size_header(&self) -> Result<header::FixedSizeHeader, PredicateError> {
self.check_predicate_bounds()?;
Ok(header::FixedSizeHeader {
num_state_reads: self.state_read.len() as u8,
num_constraints: self.constraints.len() as u8,
})
}
}