use std::cmp::Ordering;
use std::mem::MaybeUninit;
use std::rc::Rc;
use std::sync::Arc;
use crate::cell::cell_context::{CellContext, CellParts};
use crate::cell::{
Cell, CellDescriptor, CellImpl, CellInner, CellSlice, CellType, DynCell, HashBytes, LevelMask,
Size, MAX_BIT_LEN, MAX_REF_COUNT,
};
use crate::error::Error;
use crate::util::{ArrayVec, Bitstring};
use super::CellFamily;
#[cfg(feature = "stats")]
use super::CellTreeStats;
pub trait Store {
fn store_into(
&self,
builder: &mut CellBuilder,
context: &mut dyn CellContext,
) -> Result<(), Error>;
}
impl<T: Store + ?Sized> Store for &T {
#[inline]
fn store_into(
&self,
builder: &mut CellBuilder,
context: &mut dyn CellContext,
) -> Result<(), Error> {
<T as Store>::store_into(self, builder, context)
}
}
impl<T: Store + ?Sized> Store for &mut T {
#[inline]
fn store_into(
&self,
builder: &mut CellBuilder,
context: &mut dyn CellContext,
) -> Result<(), Error> {
<T as Store>::store_into(self, builder, context)
}
}
impl<T: Store + ?Sized> Store for Box<T> {
#[inline]
fn store_into(
&self,
builder: &mut CellBuilder,
context: &mut dyn CellContext,
) -> Result<(), Error> {
<T as Store>::store_into(self.as_ref(), builder, context)
}
}
impl<T: Store + ?Sized> Store for Arc<T> {
#[inline]
fn store_into(
&self,
builder: &mut CellBuilder,
context: &mut dyn CellContext,
) -> Result<(), Error> {
<T as Store>::store_into(self.as_ref(), builder, context)
}
}
impl<T: Store + ?Sized> Store for Rc<T> {
#[inline]
fn store_into(
&self,
builder: &mut CellBuilder,
context: &mut dyn CellContext,
) -> Result<(), Error> {
<T as Store>::store_into(self.as_ref(), builder, context)
}
}
impl Store for () {
#[inline]
fn store_into(&self, _: &mut CellBuilder, _: &mut dyn CellContext) -> Result<(), Error> {
Ok(())
}
}
macro_rules! impl_store_for_tuples {
($( ($($field:ident: $t:ident),+) ),*$(,)?) => {$(
impl<$($t: Store),+> Store for ($($t),*) {
fn store_into(
&self,
builder: &mut CellBuilder,
context: &mut dyn CellContext
) -> Result<(), Error> {
let ($($field),+) = self;
$(ok!($field.store_into(builder, context)));*;
Ok(())
}
}
)*};
}
impl_store_for_tuples! {
(t1: T1, t2: T2),
(t1: T1, t2: T2, t3: T3),
(t1: T1, t2: T2, t3: T3, t4: T4),
(t1: T1, t2: T2, t3: T3, t4: T4, t5: T5),
(t1: T1, t2: T2, t3: T3, t4: T4, t5: T5, t6: T6),
}
impl<T: Store> Store for Option<T> {
#[inline]
fn store_into(
&self,
builder: &mut CellBuilder,
context: &mut dyn CellContext,
) -> Result<(), Error> {
match self {
Some(data) => {
ok!(builder.store_bit_one());
data.store_into(builder, context)
}
None => builder.store_bit_zero(),
}
}
}
impl<'a> Store for CellSlice<'a> {
#[inline]
fn store_into(&self, builder: &mut CellBuilder, _: &mut dyn CellContext) -> Result<(), Error> {
builder.store_slice(self)
}
}
impl Store for Cell {
#[inline]
fn store_into(&self, builder: &mut CellBuilder, _: &mut dyn CellContext) -> Result<(), Error> {
builder.store_reference(self.clone())
}
}
macro_rules! impl_primitive_store {
($($type:ty => |$b:ident, $v:ident| $expr:expr),*$(,)?) => {
$(impl Store for $type {
#[inline]
fn store_into(&self,
$b: &mut CellBuilder,
_: &mut dyn CellContext
) -> Result<(), Error> {
let $v = self;
$expr
}
})*
};
}
impl_primitive_store! {
bool => |b, v| b.store_bit(*v),
u8 => |b, v| b.store_u8(*v),
i8 => |b, v| b.store_u8(*v as u8),
u16 => |b, v| b.store_u16(*v),
i16 => |b, v| b.store_u16(*v as u16),
u32 => |b, v| b.store_u32(*v),
i32 => |b, v| b.store_u32(*v as u32),
u64 => |b, v| b.store_u64(*v),
i64 => |b, v| b.store_u64(*v as u64),
u128 => |b, v| b.store_u128(*v),
i128 => |b, v| b.store_u128(*v as u128),
HashBytes => |b, v| b.store_u256(v),
}
pub struct CellBuilder {
data: [u8; 128],
bit_len: u16,
is_exotic: bool,
references: ArrayVec<Cell, MAX_REF_COUNT>,
}
impl Default for CellBuilder {
#[inline]
fn default() -> Self {
Self::new()
}
}
impl Clone for CellBuilder {
fn clone(&self) -> Self {
Self {
data: self.data,
bit_len: self.bit_len,
is_exotic: self.is_exotic,
references: self.references.clone(),
}
}
}
impl Eq for CellBuilder {}
impl PartialEq for CellBuilder {
#[inline]
fn eq(&self, other: &Self) -> bool {
self.bit_len == other.bit_len
&& self.data == other.data
&& self.references.as_ref() == other.references.as_ref()
}
}
impl Ord for CellBuilder {
fn cmp(&self, other: &Self) -> Ordering {
match (self.bit_len, self.references.len()).cmp(&(other.bit_len, other.references.len())) {
Ordering::Equal => {}
ord => return ord,
}
match self.data.cmp(&other.data) {
Ordering::Equal => {}
ord => return ord,
}
for (a, b) in self.references().iter().zip(other.references().iter()) {
match a.repr_hash().cmp(b.repr_hash()) {
Ordering::Equal => {}
ord => return ord,
}
}
Ordering::Equal
}
}
impl PartialOrd for CellBuilder {
#[inline]
fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
Some(self.cmp(other))
}
}
impl std::fmt::Debug for CellBuilder {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
#[repr(transparent)]
struct Data<'a, T>(&'a T);
impl<T: std::fmt::Display> std::fmt::Debug for Data<'_, T> {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
std::fmt::Display::fmt(self.0, f)
}
}
f.debug_struct("CellBuilder")
.field("data", &Data(&self.display_data()))
.field("bit_len", &self.bit_len)
.field("is_exotic", &self.is_exotic)
.field("references", &self.references.as_ref())
.finish()
}
}
macro_rules! impl_store_uint {
($self:ident, $value:ident, bytes: $bytes:literal, bits: $bits:literal) => {
if $self.bit_len + $bits <= MAX_BIT_LEN {
let q = ($self.bit_len / 8) as usize;
let r = $self.bit_len % 8;
unsafe {
let data_ptr = $self.data.as_mut_ptr().add(q);
debug_assert!(q + $bytes + usize::from(r > 0) <= 128);
if r == 0 {
let value = $value.to_be_bytes();
std::ptr::copy_nonoverlapping(value.as_ptr(), data_ptr, $bytes);
} else {
*data_ptr |= ($value >> ($bits - 8 + r)) as u8;
let value: [u8; $bytes] = ($value << (8 - r)).to_be_bytes();
std::ptr::copy_nonoverlapping(value.as_ptr(), data_ptr.add(1), $bytes);
}
};
$self.bit_len += $bits;
Ok(())
} else {
Err(Error::CellOverflow)
}
};
}
impl CellBuilder {
#[inline]
pub fn build_from<T>(data: T) -> Result<Cell, Error>
where
T: Store,
{
Self::build_from_ext(data, &mut Cell::empty_context())
}
#[inline]
pub fn build_from_ext<T>(data: T, context: &mut dyn CellContext) -> Result<Cell, Error>
where
T: Store,
{
fn build_from_ext_impl(
data: &dyn Store,
context: &mut dyn CellContext,
) -> Result<Cell, Error> {
let mut builder = CellBuilder::new();
ok!(data.store_into(&mut builder, context));
builder.build_ext(context)
}
build_from_ext_impl(&data, context)
}
pub const fn new() -> Self {
Self {
data: [0; 128],
bit_len: 0,
is_exotic: false,
references: ArrayVec::new(),
}
}
pub fn from_raw_data(value: &[u8], bits: u16) -> Result<Self, Error> {
let mut res = Self::new();
ok!(res.store_raw(value, bits));
Ok(res)
}
pub fn as_data_slice(&self) -> CellSlice<'_> {
unsafe { CellSlice::new_unchecked(IntermediateDataCell::wrap(self)) }
}
pub fn as_full_slice(&self) -> CellSlice<'_> {
unsafe { CellSlice::new_unchecked(IntermediateFullCell::wrap(self)) }
}
#[inline]
pub const fn raw_data(&self) -> &[u8; 128] {
&self.data
}
pub const fn size(&self) -> Size {
Size {
bits: self.bit_len,
refs: self.references.len() as u8,
}
}
#[inline]
pub const fn size_bits(&self) -> u16 {
self.bit_len
}
#[inline(always)]
pub const fn size_refs(&self) -> u8 {
self.references.len() as u8
}
pub const fn spare_capacity(&self) -> Size {
Size {
bits: self.spare_capacity_bits(),
refs: self.spare_capacity_refs(),
}
}
#[inline]
pub const fn spare_capacity_bits(&self) -> u16 {
MAX_BIT_LEN - self.bit_len
}
#[inline]
pub const fn spare_capacity_refs(&self) -> u8 {
(MAX_REF_COUNT - self.references.len()) as u8
}
#[inline]
pub const fn has_capacity(&self, bits: u16, refs: u8) -> bool {
self.bit_len + bits <= MAX_BIT_LEN && self.references.len() + refs as usize <= MAX_REF_COUNT
}
#[inline]
pub const fn is_exotic(&self) -> bool {
self.is_exotic
}
#[inline]
pub fn set_exotic(&mut self, is_exotic: bool) {
self.is_exotic = is_exotic;
}
pub fn rewind(&mut self, mut bits: u16) -> Result<(), Error> {
if bits == 0 {
return Ok(());
}
let Some(new_bit_len) = self.bit_len.checked_sub(bits) else {
return Err(Error::CellUnderflow);
};
let q = (new_bit_len / 8) as usize;
let r = new_bit_len % 8;
unsafe {
let mut data_ptr = self.data.as_mut_ptr().add(q);
if r != 0 {
let shift = 8 - r;
*data_ptr &= 0xff << shift;
bits = bits.saturating_sub(shift);
data_ptr = data_ptr.add(1);
}
std::ptr::write_bytes(data_ptr, 0, ((bits + 7) / 8) as usize);
}
self.bit_len = new_bit_len;
Ok(())
}
pub fn store_zeros(&mut self, bits: u16) -> Result<(), Error> {
if self.bit_len + bits <= MAX_BIT_LEN {
self.bit_len += bits;
Ok(())
} else {
Err(Error::CellOverflow)
}
}
pub fn store_ones(&mut self, bits: u16) -> Result<(), Error> {
self.store_raw(crate::cell::cell_impl::ALL_ONES_CELL.data(), bits)
}
pub fn store_bit_zero(&mut self) -> Result<(), Error> {
let fits = self.bit_len < MAX_BIT_LEN;
self.bit_len += fits as u16;
if fits {
Ok(())
} else {
Err(Error::CellOverflow)
}
}
pub fn store_bit_one(&mut self) -> Result<(), Error> {
if self.bit_len < MAX_BIT_LEN {
let q = (self.bit_len / 8) as usize;
let r = self.bit_len % 8;
unsafe { *self.data.get_unchecked_mut(q) |= 1 << (7 - r) };
self.bit_len += 1;
Ok(())
} else {
Err(Error::CellOverflow)
}
}
pub fn store_bit(&mut self, value: bool) -> Result<(), Error> {
if value {
self.store_bit_one()
} else {
self.store_bit_zero()
}
}
pub fn store_u8(&mut self, value: u8) -> Result<(), Error> {
if self.bit_len + 8 <= MAX_BIT_LEN {
let q = (self.bit_len / 8) as usize;
let r = self.bit_len % 8;
unsafe {
if r == 0 {
debug_assert!(q < 128);
*self.data.get_unchecked_mut(q) = value;
} else {
debug_assert!(q + 1 < 128);
*self.data.get_unchecked_mut(q) |= value >> r;
*self.data.get_unchecked_mut(q + 1) = value << (8 - r);
}
};
self.bit_len += 8;
Ok(())
} else {
Err(Error::CellOverflow)
}
}
pub fn store_u16(&mut self, value: u16) -> Result<(), Error> {
impl_store_uint!(self, value, bytes: 2, bits: 16)
}
pub fn store_u32(&mut self, value: u32) -> Result<(), Error> {
impl_store_uint!(self, value, bytes: 4, bits: 32)
}
pub fn store_u64(&mut self, value: u64) -> Result<(), Error> {
impl_store_uint!(self, value, bytes: 8, bits: 64)
}
pub fn store_u128(&mut self, value: u128) -> Result<(), Error> {
impl_store_uint!(self, value, bytes: 16, bits: 128)
}
#[inline]
pub fn store_u256<T>(&mut self, value: &T) -> Result<(), Error>
where
T: AsRef<[u8; 32]>,
{
fn store_u256_impl(builder: &mut CellBuilder, value: &[u8; 32]) -> Result<(), Error> {
if builder.bit_len + 256 <= MAX_BIT_LEN {
let q = (builder.bit_len / 8) as usize;
let r = builder.bit_len % 8;
unsafe {
let data_ptr = builder.data.as_mut_ptr().add(q);
debug_assert!(q + 32 + usize::from(r > 0) <= 128);
if r == 0 {
std::ptr::copy_nonoverlapping(value.as_ptr(), data_ptr, 32);
} else {
let [mut hi, mut lo]: [u128; 2] = std::mem::transmute_copy(value);
#[cfg(target_endian = "little")]
{
hi = hi.swap_bytes();
lo = lo.swap_bytes();
}
let shift = 8 - r;
*data_ptr |= (hi >> (128 - shift)) as u8;
let hi: [u8; 16] = ((hi << shift) | (lo >> (128 - shift))).to_be_bytes();
let lo: [u8; 16] = (lo << shift).to_be_bytes();
std::ptr::copy_nonoverlapping(hi.as_ptr(), data_ptr.add(1), 16);
std::ptr::copy_nonoverlapping(lo.as_ptr(), data_ptr.add(17), 16);
}
};
builder.bit_len += 256;
Ok(())
} else {
Err(Error::CellOverflow)
}
}
store_u256_impl(self, value.as_ref())
}
pub fn store_small_uint(&mut self, mut value: u8, mut bits: u16) -> Result<(), Error> {
if bits == 0 {
return Ok(());
}
if self.bit_len + bits <= MAX_BIT_LEN {
bits = if let Some(offset) = bits.checked_sub(8) {
self.bit_len += offset;
8
} else {
bits
};
value <<= 8 - bits;
let q = (self.bit_len / 8) as usize;
let r = self.bit_len % 8;
unsafe {
debug_assert!(q < 128);
if r == 0 {
*self.data.get_unchecked_mut(q) = value;
} else {
*self.data.get_unchecked_mut(q) |= value >> r;
if bits + r > 8 {
debug_assert!(q + 1 < 128);
*self.data.get_unchecked_mut(q + 1) = value << (8 - r);
}
}
};
self.bit_len += bits;
Ok(())
} else {
Err(Error::CellOverflow)
}
}
pub fn store_uint(&mut self, mut value: u64, mut bits: u16) -> Result<(), Error> {
if bits == 0 {
return Ok(());
}
if self.bit_len + bits <= MAX_BIT_LEN {
bits = if let Some(offset) = bits.checked_sub(64) {
self.bit_len += offset;
64
} else {
bits
};
value <<= 64 - bits;
let q = (self.bit_len / 8) as usize;
let r = self.bit_len % 8;
unsafe {
let data_ptr = self.data.as_mut_ptr().add(q);
if r == 0 {
let byte_len = ((bits + 7) / 8) as usize;
debug_assert!(q + byte_len <= 128);
let value = value.to_be_bytes();
std::ptr::copy_nonoverlapping(value.as_ptr(), data_ptr, byte_len);
} else {
debug_assert!(q < 128);
let shift = 8 - r;
*data_ptr |= (value >> (64 - shift)) as u8;
if let Some(bits) = bits.checked_sub(shift) {
if bits > 0 {
let byte_len = ((bits + 7) / 8) as usize;
debug_assert!(q + 1 + byte_len <= 128);
let value: [u8; 8] = (value << shift).to_be_bytes();
std::ptr::copy_nonoverlapping(
value.as_ptr(),
data_ptr.add(1),
byte_len,
);
}
}
}
}
self.bit_len += bits;
Ok(())
} else {
Err(Error::CellOverflow)
}
}
pub fn store_raw(&mut self, value: &[u8], bits: u16) -> Result<(), Error> {
store_raw(&mut self.data, &mut self.bit_len, value, bits)
}
#[inline]
pub fn store_cell_data<T>(&mut self, value: T) -> Result<(), Error>
where
T: AsRef<DynCell>,
{
fn store_cell_data_impl(builder: &mut CellBuilder, value: &DynCell) -> Result<(), Error> {
store_raw(
&mut builder.data,
&mut builder.bit_len,
value.data(),
value.bit_len(),
)
}
store_cell_data_impl(self, value.as_ref())
}
#[inline]
pub fn store_slice_data<'a, T>(&mut self, value: T) -> Result<(), Error>
where
T: AsRef<CellSlice<'a>>,
{
fn store_slice_data_impl(
builder: &mut CellBuilder,
value: &CellSlice<'_>,
) -> Result<(), Error> {
let bits = value.size_bits();
if builder.bit_len + bits <= MAX_BIT_LEN {
let mut slice_data =
unsafe { MaybeUninit::<[MaybeUninit<u8>; 128]>::uninit().assume_init() };
let slice_data = unsafe {
&mut *(&mut slice_data as *mut [MaybeUninit<u8>; 128] as *mut [u8; 128])
};
let slice_data = ok!(value.get_raw(0, slice_data, bits));
builder.store_raw(slice_data, bits)
} else {
Err(Error::CellOverflow)
}
}
store_slice_data_impl(self, value.as_ref())
}
pub fn prepend_raw(&mut self, value: &[u8], bits: u16) -> Result<(), Error> {
if bits == 0 {
return Ok(());
}
fn store_raw_impl(
data: &mut [u8; 128],
bit_len: &mut u16,
value: &[u8],
bits: u16,
) -> Result<(), Error> {
store_raw(data, bit_len, value, bits)
}
if self.bit_len + bits <= MAX_BIT_LEN {
let mut data = [0; 128];
let mut bit_len = 0;
ok!(store_raw_impl(&mut data, &mut bit_len, value, bits));
ok!(store_raw_impl(
&mut data,
&mut bit_len,
&self.data,
self.bit_len
));
self.data = data;
self.bit_len = bit_len;
Ok(())
} else {
Err(Error::CellOverflow)
}
}
}
#[inline]
fn store_raw(
data: &mut [u8; 128],
bit_len: &mut u16,
value: &[u8],
mut bits: u16,
) -> Result<(), Error> {
if *bit_len + bits <= MAX_BIT_LEN {
let max_bit_len = value.len().saturating_mul(8) as u16;
bits = if let Some(offset) = bits.checked_sub(max_bit_len) {
*bit_len += offset;
max_bit_len
} else {
bits
};
if bits == 0 {
return Ok(());
}
let q = (*bit_len / 8) as usize;
let r = *bit_len % 8;
unsafe {
let mut data_ptr = data.as_mut_ptr().add(q);
let mut value_ptr = value.as_ptr();
if r == 0 {
let byte_len = ((bits + 7) / 8) as usize;
debug_assert!(q + byte_len <= 128);
debug_assert!(byte_len <= value.len());
std::ptr::copy_nonoverlapping(value_ptr, data_ptr, byte_len);
let bits_r = bits % 8;
if bits_r != 0 {
*data_ptr.add(byte_len - 1) &= 0xff << (8 - bits_r);
}
} else {
let byte_len = ((bits + r + 7) / 8) as usize - 1;
let value_len = ((bits + 7) / 8) as usize;
debug_assert!(q + byte_len <= 128);
debug_assert!(byte_len <= value_len && value_len <= value.len());
let shift = 8 - r;
for _ in 0..byte_len {
*data_ptr |= *value_ptr >> r;
data_ptr = data_ptr.add(1);
*data_ptr = *value_ptr << shift;
value_ptr = value_ptr.add(1);
}
if byte_len < value_len {
*data_ptr |= *value_ptr >> r;
}
let bits_r = (r + bits) % 8;
if bits_r != 0 {
*data_ptr &= 0xff << (8 - bits_r);
}
}
}
*bit_len += bits;
Ok(())
} else {
Err(Error::CellOverflow)
}
}
impl CellBuilder {
#[inline]
pub fn references(&self) -> &[Cell] {
self.references.as_ref()
}
pub fn store_reference(&mut self, cell: Cell) -> Result<(), Error> {
if self.references.len() < MAX_REF_COUNT {
unsafe { self.references.push(cell) }
Ok(())
} else {
Err(Error::CellOverflow)
}
}
pub fn set_references(&mut self, refs: CellRefsBuilder) {
self.references = refs.0;
}
pub fn store_builder(&mut self, builder: &Self) -> Result<(), Error> {
if self.bit_len + builder.bit_len <= MAX_BIT_LEN
&& self.references.len() + builder.references.len() <= MAX_REF_COUNT
{
ok!(self.store_raw(&builder.data, builder.bit_len));
for cell in builder.references.as_ref() {
ok!(self.store_reference(cell.clone()));
}
Ok(())
} else {
Err(Error::CellOverflow)
}
}
#[inline]
pub fn store_slice<'a, T>(&mut self, value: T) -> Result<(), Error>
where
T: AsRef<CellSlice<'a>>,
{
fn store_slice_impl(builder: &mut CellBuilder, value: &CellSlice<'_>) -> Result<(), Error> {
if builder.bit_len + value.size_bits() <= MAX_BIT_LEN
&& builder.references.len() + value.size_refs() as usize <= MAX_REF_COUNT
{
ok!(builder.store_slice_data(value));
for cell in value.references().cloned() {
ok!(builder.store_reference(cell));
}
Ok(())
} else {
Err(Error::CellOverflow)
}
}
store_slice_impl(self, value.as_ref())
}
pub fn build_ext(mut self, context: &mut dyn CellContext) -> Result<Cell, Error> {
debug_assert!(self.bit_len <= MAX_BIT_LEN);
debug_assert!(self.references.len() <= MAX_REF_COUNT);
#[cfg(feature = "stats")]
let mut stats = CellTreeStats {
bit_count: self.bit_len as u64,
cell_count: 1,
};
let mut children_mask = LevelMask::EMPTY;
for child in self.references.as_ref() {
let child = child.as_ref();
children_mask |= child.descriptor().level_mask();
#[cfg(feature = "stats")]
{
stats += child.stats();
}
}
let is_exotic = self.is_exotic;
let level_mask = 'mask: {
if is_exotic && self.bit_len >= 8 {
if let Some(ty) = CellType::from_byte_exotic(self.data[0]) {
match ty {
CellType::PrunedBranch => break 'mask LevelMask::new(self.data[1]),
CellType::MerkleProof | CellType::MerkleUpdate => {
break 'mask children_mask.virtualize(1)
}
CellType::LibraryReference => break 'mask LevelMask::EMPTY,
_ => {}
};
}
}
children_mask
};
let d1 = CellDescriptor::compute_d1(level_mask, is_exotic, self.references.len() as u8);
let d2 = CellDescriptor::compute_d2(self.bit_len);
let rem = self.bit_len % 8;
let last_byte = (self.bit_len / 8) as usize;
if rem > 0 {
let last_byte = unsafe { self.data.get_unchecked_mut(last_byte) };
let tag_mask: u8 = 1 << (7 - rem);
let data_mask = !(tag_mask - 1);
*last_byte = (*last_byte & data_mask) | tag_mask;
}
let byte_len = (self.bit_len + 7) / 8;
let data = &self.data[..std::cmp::min(byte_len as usize, 128)];
let cell_parts = CellParts {
#[cfg(feature = "stats")]
stats,
bit_len: self.bit_len,
descriptor: CellDescriptor { d1, d2 },
children_mask,
references: self.references,
data,
};
context.finalize_cell(cell_parts)
}
pub fn build(self) -> Result<Cell, Error> {
self.build_ext(&mut Cell::empty_context())
}
pub fn display_data(&self) -> impl std::fmt::Display + std::fmt::Binary + '_ {
struct DisplayData<'a>(&'a CellBuilder);
impl std::fmt::Display for DisplayData<'_> {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
std::fmt::Display::fmt(
&Bitstring {
bytes: &self.0.data,
bit_len: self.0.bit_len,
},
f,
)
}
}
impl std::fmt::Binary for DisplayData<'_> {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
std::fmt::Binary::fmt(
&Bitstring {
bytes: &self.0.data,
bit_len: self.0.bit_len,
},
f,
)
}
}
DisplayData(self)
}
}
#[derive(Default)]
#[repr(transparent)]
pub struct CellRefsBuilder(ArrayVec<Cell, MAX_REF_COUNT>);
impl CellRefsBuilder {
pub fn store_reference(&mut self, cell: Cell) -> Result<(), Error> {
if self.0.len() < MAX_REF_COUNT {
unsafe { self.0.push(cell) }
Ok(())
} else {
Err(Error::CellOverflow)
}
}
pub fn compute_level_mask(&self) -> LevelMask {
let mut result = LevelMask::EMPTY;
for child in self.0.as_ref() {
result |= child.as_ref().level_mask();
}
result
}
}
#[repr(transparent)]
struct IntermediateDataCell(CellBuilder);
impl IntermediateDataCell {
#[inline(always)]
const fn wrap(value: &CellBuilder) -> &Self {
unsafe { &*(value as *const CellBuilder as *const Self) }
}
}
impl CellImpl for IntermediateDataCell {
fn untrack(self: CellInner<Self>) -> Cell {
Cell(self)
}
fn descriptor(&self) -> CellDescriptor {
CellDescriptor {
d1: 0,
d2: CellDescriptor::compute_d2(self.0.bit_len),
}
}
fn data(&self) -> &[u8] {
self.0.raw_data()
}
fn bit_len(&self) -> u16 {
self.0.bit_len
}
fn reference(&self, _: u8) -> Option<&DynCell> {
None
}
fn reference_cloned(&self, _: u8) -> Option<Cell> {
None
}
fn virtualize(&self) -> &DynCell {
self
}
fn hash(&self, _: u8) -> &HashBytes {
panic!("Hash for an intermediate data cell is not defined");
}
fn depth(&self, _: u8) -> u16 {
0
}
fn take_first_child(&mut self) -> Option<Cell> {
None
}
fn replace_first_child(&mut self, parent: Cell) -> Result<Cell, Cell> {
Err(parent)
}
fn take_next_child(&mut self) -> Option<Cell> {
None
}
#[cfg(feature = "stats")]
fn stats(&self) -> CellTreeStats {
CellTreeStats {
bit_count: self.0.bit_len as u64,
cell_count: 1,
}
}
}
#[repr(transparent)]
struct IntermediateFullCell(CellBuilder);
impl IntermediateFullCell {
#[inline(always)]
const fn wrap(value: &CellBuilder) -> &Self {
unsafe { &*(value as *const CellBuilder as *const Self) }
}
}
impl CellImpl for IntermediateFullCell {
fn untrack(self: CellInner<Self>) -> Cell {
Cell(self)
}
fn descriptor(&self) -> CellDescriptor {
CellDescriptor {
d1: CellDescriptor::compute_d1(LevelMask::EMPTY, false, self.0.references.len() as u8),
d2: CellDescriptor::compute_d2(self.0.bit_len),
}
}
fn data(&self) -> &[u8] {
self.0.raw_data()
}
fn bit_len(&self) -> u16 {
self.0.bit_len
}
fn reference(&self, index: u8) -> Option<&DynCell> {
match self.0.references.get(index) {
Some(cell) => Some(cell.as_ref()),
None => None,
}
}
fn reference_cloned(&self, index: u8) -> Option<Cell> {
self.0.references.get(index).cloned()
}
fn virtualize(&self) -> &DynCell {
self
}
fn hash(&self, _: u8) -> &HashBytes {
panic!("Hash for an intermediate data cell is not defined");
}
fn depth(&self, _: u8) -> u16 {
0
}
fn take_first_child(&mut self) -> Option<Cell> {
None
}
fn replace_first_child(&mut self, parent: Cell) -> Result<Cell, Cell> {
Err(parent)
}
fn take_next_child(&mut self) -> Option<Cell> {
None
}
#[cfg(feature = "stats")]
fn stats(&self) -> CellTreeStats {
CellTreeStats {
bit_count: self.0.bit_len as u64,
cell_count: 1 + self.0.references.len() as u64,
}
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn clone_builder() {
let mut builder = CellBuilder::new();
builder.store_u32(0xdeafbeaf).unwrap();
let cell1 = builder.clone().build().unwrap();
let cell2 = builder.clone().build().unwrap();
assert_eq!(cell1.as_ref(), cell2.as_ref());
builder.store_u32(0xb00b5).unwrap();
let cell3 = builder.build().unwrap();
assert_ne!(cell1.as_ref(), cell3.as_ref());
}
#[test]
fn compare_builders() {
let mut a = CellBuilder::new();
a.store_u32(0xdeafbeaf).unwrap();
let mut b = CellBuilder::new();
b.store_u32(0xdeafbeaf).unwrap();
assert_eq!(a, b);
b.store_u8(1).unwrap();
assert!(a < b);
a.store_u8(2).unwrap();
assert!(a > b);
a.rewind(8).unwrap();
a.store_u8(1).unwrap();
assert_eq!(a, b);
let child_a = a.clone().build().unwrap();
a.store_reference(child_a.clone()).unwrap();
assert!(a > b);
let child_b = b.clone().build().unwrap();
b.store_reference(child_b).unwrap();
assert_eq!(a, b);
let child_b2 = b.clone().build().unwrap();
a.store_reference(child_a).unwrap();
b.store_reference(child_b2).unwrap();
assert_ne!(a, b);
}
#[test]
fn rewind_builder() {
let mut builder = CellBuilder::new();
builder.store_u32(0xdeafbeaf).unwrap();
assert_eq!(builder.size_bits(), 32);
assert_eq!(builder.data[..4], 0xdeafbeaf_u32.to_be_bytes());
builder.rewind(5).unwrap();
assert_eq!(builder.size_bits(), 27);
assert_eq!(builder.data[..4], 0xdeafbea0_u32.to_be_bytes());
builder.store_u32(0xdeafbeaf).unwrap();
assert_eq!(builder.size_bits(), 32 + 27);
assert_eq!(
builder.data[..8],
[0xde, 0xaf, 0xbe, 0xbb, 0xd5, 0xf7, 0xd5, 0xe0]
);
builder.rewind(32).unwrap();
assert_eq!(
builder.data[..8],
[0xde, 0xaf, 0xbe, 0xa0, 0x00, 0x00, 0x00, 0x00]
);
assert_eq!(builder.rewind(32), Err(Error::CellUnderflow));
builder.rewind(27).unwrap();
assert_eq!(builder.size_bits(), 0);
assert_eq!(builder.data, [0u8; 128]);
builder.store_raw(&[0xff; 128], MAX_BIT_LEN).unwrap();
assert_eq!(builder.size_bits(), MAX_BIT_LEN);
let mut target = [0xff; 128];
target[127] = 0xfe;
assert_eq!(builder.data, target);
builder.rewind(3).unwrap();
assert_eq!(builder.size_bits(), MAX_BIT_LEN - 3);
target[127] = 0xf0;
assert_eq!(builder.data, target);
builder.rewind(8).unwrap();
assert_eq!(builder.size_bits(), MAX_BIT_LEN - 3 - 8);
target[126] = 0xf0;
target[127] = 0x00;
assert_eq!(builder.data, target);
}
#[test]
#[cfg_attr(miri, ignore)] fn store_raw() {
const ONES: &[u8] = &[0xff; 128];
for offset in 0..8 {
for bits in 0..=1016 {
let mut builder = CellBuilder::new();
builder.store_zeros(offset).unwrap();
builder.store_raw(ONES, bits).unwrap();
builder.build().unwrap();
}
}
}
#[test]
fn prepend_raw() {
let mut builder = CellBuilder::new();
builder.store_raw(&[0xde, 0xaf, 0xbe, 0xaf], 20).unwrap();
builder.prepend_raw(&[0xaa, 0x55], 5).unwrap();
let cell = builder.build().unwrap();
println!("{}", cell.display_tree());
}
#[test]
fn store_slice() -> anyhow::Result<()> {
const SOME_HASH: &HashBytes = HashBytes::wrap(&[
0xdf, 0x86, 0xce, 0xbc, 0xe8, 0xd5, 0xab, 0x0c, 0x69, 0xb4, 0xce, 0x33, 0xfe, 0x9b,
0x0e, 0x2c, 0xdf, 0x69, 0xa3, 0xe1, 0x13, 0x7e, 0x64, 0x85, 0x6b, 0xbc, 0xfd, 0x39,
0xe7, 0x9b, 0xc1, 0x6f,
]);
let mut builder = CellBuilder::new();
builder.store_zeros(3)?;
builder.store_u256(SOME_HASH)?;
let cell = builder.build()?;
println!("{}", cell.display_tree());
let mut builder = CellBuilder::new();
let mut slice = cell.as_slice()?;
slice.skip_first(3, 0)?;
builder.store_slice(slice)?;
let cell = builder.build()?;
println!("{}", cell.display_tree());
assert_eq!(cell.data(), SOME_HASH);
Ok(())
}
}