use proc_macro2::TokenStream;
use quote::{format_ident, quote};
use syn::{
parse_quote, spanned::Spanned, Data, DataEnum, DataStruct, DeriveInput, Field, Fields,
Generics, Ident, Type, WherePredicate,
};
use crate::attrs::{
parse_enum_attrs, parse_field_attrs, parse_struct_attrs, parse_variant_attrs, FieldAttrs,
FieldDefault, Tag,
};
use crate::crate_path::resolve_tonlib_core_path;
use crate::generics::{add_where_predicates, strip_type_defaults};
pub(crate) fn derive_tlb(input: DeriveInput) -> syn::Result<TokenStream> {
let crate_path = resolve_tonlib_core_path()?;
match &input.data {
Data::Struct(data) => derive_struct(&input, data, &crate_path),
Data::Enum(data) => derive_enum(&input, data, &crate_path),
Data::Union(data) => Err(syn::Error::new_spanned(
data.union_token,
"TLB derive does not support unions",
)),
}
}
fn derive_struct(
input: &DeriveInput,
data: &DataStruct,
crate_path: &TokenStream,
) -> syn::Result<TokenStream> {
let attrs = parse_struct_attrs(&input.attrs)?;
let name = &input.ident;
let prefix = prefix_tokens(crate_path, attrs.tag.unwrap_or(Tag::EMPTY));
let (_, original_ty_generics, _) = input.generics.split_for_impl();
let body = match &data.fields {
Fields::Named(fields) => derive_named_struct(fields.named.iter(), crate_path)?,
Fields::Unnamed(fields) => derive_tuple_struct(fields.unnamed.iter(), crate_path)?,
Fields::Unit => StructBody {
read_statements: Vec::new(),
init: quote!(Self),
write_statements: Vec::new(),
required_bounds: Vec::new(),
},
};
let mut required_bounds = body.required_bounds.clone();
required_bounds.push(parse_quote!(
#name #original_ty_generics: ::core::clone::Clone + ::core::fmt::Debug
));
let (impl_generics, ty_generics, where_clause) =
impl_generics(&input.generics, crate_path, &required_bounds);
let tlb_trait = quote!(#crate_path::tlb_types::tlb::TLB);
let error_path = quote!(#crate_path::cell::TonCellError);
let cell_builder_path = quote!(#crate_path::cell::CellBuilder);
let cell_parser_path = quote!(#crate_path::cell::CellParser);
let StructBody {
read_statements,
init,
write_statements,
required_bounds: _,
} = body;
Ok(quote! {
impl #impl_generics #tlb_trait for #name #ty_generics #where_clause {
const PREFIX: #crate_path::tlb_types::tlb::TLBPrefix = #prefix;
fn read_definition(parser: &mut #cell_parser_path) -> Result<Self, #error_path> {
#(#read_statements)*
Ok(#init)
}
fn write_definition(&self, builder: &mut #cell_builder_path) -> Result<(), #error_path> {
#(#write_statements)*
Ok(())
}
}
})
}
fn derive_enum(
input: &DeriveInput,
data: &DataEnum,
crate_path: &TokenStream,
) -> syn::Result<TokenStream> {
parse_enum_attrs(&input.attrs)?;
let name = &input.ident;
let (_, original_ty_generics, _) = input.generics.split_for_impl();
let tlb_trait = quote!(#crate_path::tlb_types::tlb::TLB);
let error_path = quote!(#crate_path::cell::TonCellError);
let cell_builder_path = quote!(#crate_path::cell::CellBuilder);
let cell_parser_path = quote!(#crate_path::cell::CellParser);
let mut tagged_variants = Vec::new();
let mut fallback_variants = Vec::new();
let mut write_arms = Vec::new();
let mut required_bounds = Vec::new();
for variant in &data.variants {
let attrs = parse_variant_attrs(&variant.attrs)?;
let tag = attrs.tag.unwrap_or(Tag::EMPTY);
let spec = EnumVariant::new(variant, tag, crate_path)?;
if spec.tag.is_empty() {
fallback_variants.push(spec.read_branch.clone());
} else {
tagged_variants.push((spec.tag, spec.read_branch.clone()));
}
write_arms.push(spec.write_arm);
required_bounds.extend(spec.required_bounds);
}
required_bounds.push(parse_quote!(
#name #original_ty_generics: ::core::clone::Clone + ::core::fmt::Debug
));
let (impl_generics, ty_generics, where_clause) =
impl_generics(&input.generics, crate_path, &required_bounds);
if fallback_variants.len() > 1 {
return Err(syn::Error::new_spanned(
input,
"enums may contain at most one untagged fallback variant",
));
}
for (index, (left, _)) in tagged_variants.iter().enumerate() {
for (right, _) in tagged_variants.iter().skip(index + 1) {
if left.overlaps(*right) {
return Err(syn::Error::new_spanned(
input,
format!(
"enum tags are ambiguous: {}-bit tag value {} overlaps with {}-bit tag value {}",
left.bits, left.value, right.bits, right.value
),
));
}
}
}
let tagged_reads = tagged_variants.into_iter().map(|(tag, branch)| {
let bits = tag.bits;
let value = tag.value;
quote! {
if parser.remaining_bits() >= #bits {
let actual = parser.load_u64(#bits)?;
if actual == #value {
#branch
}
parser.seek(-(#bits as i64))?;
}
}
});
let fallback_read = fallback_variants.into_iter().next().unwrap_or_else(|| {
quote! {
return Err(#error_path::InvalidCellData(format!(
"no matching tag found for enum {}",
stringify!(#name)
)));
}
});
Ok(quote! {
impl #impl_generics #tlb_trait for #name #ty_generics #where_clause {
fn read_definition(parser: &mut #cell_parser_path) -> Result<Self, #error_path> {
#(#tagged_reads)*
#fallback_read
}
fn write_definition(&self, builder: &mut #cell_builder_path) -> Result<(), #error_path> {
match self {
#(#write_arms)*
}
Ok(())
}
}
})
}
fn derive_named_struct<'a, I>(fields: I, crate_path: &TokenStream) -> syn::Result<StructBody>
where
I: Iterator<Item = &'a Field>,
{
let mut read_statements = Vec::new();
let mut write_statements = Vec::new();
let mut field_names = Vec::new();
let mut required_bounds = Vec::new();
for field in fields {
let attrs = parse_field_attrs(&field.attrs)?;
let field_name = field.ident.clone().ok_or_else(|| {
syn::Error::new(field.span(), "named struct field is missing an identifier")
})?;
let field_type = &field.ty;
read_statements.push(generate_field_read_statement(
&field_name,
field_type,
&attrs,
crate_path,
));
write_statements.push(generate_self_field_write_statement(
quote!(self.#field_name),
field_type,
&attrs,
crate_path,
));
required_bounds.extend(generate_field_bound_predicates(
field_type, &attrs, crate_path,
));
field_names.push(field_name);
}
Ok(StructBody {
read_statements,
init: quote!(Self { #(#field_names),* }),
write_statements,
required_bounds,
})
}
fn derive_tuple_struct<'a, I>(fields: I, crate_path: &TokenStream) -> syn::Result<StructBody>
where
I: Iterator<Item = &'a Field>,
{
let mut read_statements = Vec::new();
let mut write_statements = Vec::new();
let mut bindings = Vec::new();
let mut required_bounds = Vec::new();
for (index, field) in fields.enumerate() {
let attrs = parse_field_attrs(&field.attrs)?;
let binding = format_ident!("field_{index}");
let field_type = &field.ty;
let tuple_index = syn::Index::from(index);
read_statements.push(generate_field_read_statement(
&binding, field_type, &attrs, crate_path,
));
write_statements.push(generate_self_field_write_statement(
quote!(self.#tuple_index),
field_type,
&attrs,
crate_path,
));
required_bounds.extend(generate_field_bound_predicates(
field_type, &attrs, crate_path,
));
bindings.push(binding);
}
Ok(StructBody {
read_statements,
init: quote!(Self(#(#bindings),*)),
write_statements,
required_bounds,
})
}
fn impl_generics(
generics: &Generics,
crate_path: &TokenStream,
predicates: &[WherePredicate],
) -> (TokenStream, TokenStream, TokenStream) {
let bounded_generics = add_where_predicates(
strip_type_defaults(generics.clone()),
predicates.iter().cloned(),
crate_path,
);
let (impl_generics, _, where_clause) = bounded_generics.split_for_impl();
let (_, ty_generics, _) = generics.split_for_impl();
(
quote!(#impl_generics),
quote!(#ty_generics),
quote!(#where_clause),
)
}
fn prefix_tokens(crate_path: &TokenStream, tag: Tag) -> TokenStream {
let bits = tag.bits;
let value = tag.value;
quote!(#crate_path::tlb_types::tlb::TLBPrefix::new(#bits, #value))
}
fn generate_field_read_statement(
binding: &Ident,
field_type: &Type,
attrs: &FieldAttrs,
crate_path: &TokenStream,
) -> TokenStream {
if attrs.skip_read {
let default_expr = generate_default_expr(attrs);
return quote! {
let #binding = #default_expr;
};
}
if let Some(read_with) = &attrs.read_with {
return quote! {
let #binding = #read_with(parser)?;
};
}
match classify_type(field_type) {
TypeInfo::Bool => quote! {
let #binding = parser.load_bit()?;
},
TypeInfo::Unsigned { native_bits } => {
let bits = attrs.bit_len.unwrap_or(native_bits);
generate_unsigned_read(binding, field_type, native_bits, bits, crate_path)
}
TypeInfo::Signed { native_bits } => {
let bits = attrs.bit_len.unwrap_or(native_bits);
generate_signed_read(binding, field_type, native_bits, bits, crate_path)
}
TypeInfo::BigUint => {
let bits = attrs.bit_len.unwrap_or(64);
quote! {
let #binding = parser.load_uint(#bits)?;
}
}
TypeInfo::U256 => {
let bits = attrs.bit_len.unwrap_or(256);
generate_u256_read(binding, field_type, bits, crate_path)
}
TypeInfo::I256 => {
let bits = attrs.bit_len.unwrap_or(256);
generate_i256_read(binding, field_type, bits, crate_path)
}
TypeInfo::Custom => {
let tlb_trait = quote!(#crate_path::tlb_types::tlb::TLB);
quote! {
let #binding = <#field_type as #tlb_trait>::read(parser)?;
}
}
}
}
fn generate_self_field_write_statement(
field_expr: TokenStream,
field_type: &Type,
attrs: &FieldAttrs,
crate_path: &TokenStream,
) -> TokenStream {
if attrs.skip_write {
return quote! {};
}
if let Some(write_with) = &attrs.write_with {
return quote! {
#write_with(&#field_expr, builder)?;
};
}
match classify_type(field_type) {
TypeInfo::Bool => quote! {
builder.store_bit(#field_expr)?;
},
TypeInfo::Unsigned { native_bits } => {
let bits = attrs.bit_len.unwrap_or(native_bits);
generate_unsigned_self_write(field_expr, native_bits, bits, crate_path)
}
TypeInfo::Signed { native_bits } => {
let bits = attrs.bit_len.unwrap_or(native_bits);
generate_signed_self_write(field_expr, native_bits, bits, crate_path)
}
TypeInfo::BigUint => {
let bits = attrs.bit_len.unwrap_or(64);
quote! {
builder.store_uint(#bits, &#field_expr)?;
}
}
TypeInfo::U256 => {
let bits = attrs.bit_len.unwrap_or(256);
generate_u256_write(field_expr, bits, crate_path)
}
TypeInfo::I256 => {
let bits = attrs.bit_len.unwrap_or(256);
generate_i256_write(field_expr, bits, crate_path)
}
TypeInfo::Custom => {
let tlb_trait = quote!(#crate_path::tlb_types::tlb::TLB);
quote! {
<#field_type as #tlb_trait>::write(&#field_expr, builder)?;
}
}
}
}
fn generate_borrowed_field_write_statement(
field_expr: TokenStream,
field_type: &Type,
attrs: &FieldAttrs,
crate_path: &TokenStream,
) -> TokenStream {
if attrs.skip_write {
return quote! {};
}
if let Some(write_with) = &attrs.write_with {
return quote! {
#write_with(#field_expr, builder)?;
};
}
match classify_type(field_type) {
TypeInfo::Bool => quote! {
builder.store_bit(*#field_expr)?;
},
TypeInfo::Unsigned { native_bits } => {
let bits = attrs.bit_len.unwrap_or(native_bits);
generate_unsigned_borrowed_write(field_expr, native_bits, bits, crate_path)
}
TypeInfo::Signed { native_bits } => {
let bits = attrs.bit_len.unwrap_or(native_bits);
generate_signed_borrowed_write(field_expr, native_bits, bits, crate_path)
}
TypeInfo::BigUint => {
let bits = attrs.bit_len.unwrap_or(64);
quote! {
builder.store_uint(#bits, #field_expr)?;
}
}
TypeInfo::U256 => {
let bits = attrs.bit_len.unwrap_or(256);
generate_u256_write(quote!((*#field_expr)), bits, crate_path)
}
TypeInfo::I256 => {
let bits = attrs.bit_len.unwrap_or(256);
generate_i256_write(quote!((*#field_expr)), bits, crate_path)
}
TypeInfo::Custom => {
let tlb_trait = quote!(#crate_path::tlb_types::tlb::TLB);
quote! {
<#field_type as #tlb_trait>::write(#field_expr, builder)?;
}
}
}
}
fn generate_default_expr(attrs: &FieldAttrs) -> TokenStream {
match &attrs.default {
Some(FieldDefault::With(path)) => quote!(#path()),
Some(FieldDefault::Trait) | None => quote!(::core::default::Default::default()),
}
}
fn generate_field_bound_predicates(
field_type: &Type,
attrs: &FieldAttrs,
crate_path: &TokenStream,
) -> Vec<WherePredicate> {
let type_info = classify_type(field_type);
let mut predicates = Vec::new();
if attrs.needs_default_trait() {
predicates.push(parse_quote!(#field_type: ::core::default::Default));
}
let needs_tlb_bound = matches!(type_info, TypeInfo::Custom)
&& ((!attrs.skip_read && attrs.read_with.is_none())
|| (!attrs.skip_write && attrs.write_with.is_none()));
if needs_tlb_bound {
predicates.push(parse_quote!(#field_type: #crate_path::tlb_types::tlb::TLB));
}
predicates
}
fn generate_unsigned_read(
binding: &Ident,
field_type: &Type,
native_bits: usize,
bits: usize,
crate_path: &TokenStream,
) -> TokenStream {
let bytes_len = native_bits / 8;
let error_path = quote!(#crate_path::cell::TonCellError);
let field_name = binding.to_string();
quote! {
let #binding = {
const NATIVE_BITS: usize = #native_bits;
if #bits == 0 {
0
} else if #bits > NATIVE_BITS {
let prefix_bits = #bits - NATIVE_BITS;
let prefix = parser.load_bits(prefix_bits)?;
if prefix.iter().any(|byte| *byte != 0) {
return Err(#error_path::InvalidCellData(format!(
"field `{}` does not fit into {}",
#field_name,
stringify!(#field_type)
)));
}
let tail = parser.load_bits(NATIVE_BITS)?;
let tail: [u8; #bytes_len] = tail.try_into().map_err(|_| {
#error_path::InternalError(format!(
"failed to decode {} from {} bits",
stringify!(#field_type),
NATIVE_BITS
))
})?;
<#field_type>::from_be_bytes(tail)
} else {
let bytes = parser.load_bits(#bits)?;
let mut full = [0u8; #bytes_len];
let start = full.len() - bytes.len();
full[start..].copy_from_slice(&bytes);
let mut value = <#field_type>::from_be_bytes(full);
let extra_bits = #bits % 8;
if extra_bits != 0 {
value >>= (8 - extra_bits) as u32;
}
value
}
};
}
}
fn generate_signed_read(
binding: &Ident,
field_type: &Type,
native_bits: usize,
bits: usize,
crate_path: &TokenStream,
) -> TokenStream {
let bytes_len = native_bits / 8;
let error_path = quote!(#crate_path::cell::TonCellError);
let field_name = binding.to_string();
quote! {
let #binding = {
const NATIVE_BITS: usize = #native_bits;
if #bits == 0 {
0
} else if #bits > NATIVE_BITS {
let prefix_bits = #bits - NATIVE_BITS;
let prefix = parser.load_bits(prefix_bits)?;
let tail = parser.load_bits(NATIVE_BITS)?;
let tail: [u8; #bytes_len] = tail.try_into().map_err(|_| {
#error_path::InternalError(format!(
"failed to decode {} from {} bits",
stringify!(#field_type),
NATIVE_BITS
))
})?;
let is_negative = tail[0] & 0x80 != 0;
let prefix_ok = if is_negative {
let full_bytes = prefix_bits / 8;
let remainder = prefix_bits % 8;
prefix[..full_bytes].iter().all(|byte| *byte == 0xFF)
&& (remainder == 0
|| prefix[full_bytes] == (0xFFu8 << (8 - remainder)))
} else {
prefix.iter().all(|byte| *byte == 0)
};
if !prefix_ok {
return Err(#error_path::InvalidCellData(format!(
"field `{}` does not fit into {}",
#field_name,
stringify!(#field_type)
)));
}
<#field_type>::from_be_bytes(tail)
} else {
let bytes = parser.load_bits(#bits)?;
let negative = bytes.first().is_some_and(|byte| byte & 0x80 != 0);
let pad = if negative { 0xFF } else { 0x00 };
let mut full = [pad; #bytes_len];
let start = full.len() - bytes.len();
full[start..].copy_from_slice(&bytes);
let mut value = <#field_type>::from_be_bytes(full);
let extra_bits = #bits % 8;
if extra_bits != 0 {
value >>= (8 - extra_bits) as u32;
}
value
}
};
}
}
fn generate_u256_read(
binding: &Ident,
field_type: &Type,
bits: usize,
crate_path: &TokenStream,
) -> TokenStream {
let error_path = quote!(#crate_path::cell::TonCellError);
let field_name = binding.to_string();
quote! {
let #binding = {
if #bits == 0 {
<#field_type>::ZERO
} else if #bits > 256 {
let prefix_bits = #bits - 256;
let prefix = parser.load_bits(prefix_bits)?;
if prefix.iter().any(|byte| *byte != 0) {
return Err(#error_path::InvalidCellData(format!(
"field `{}` does not fit into {}",
#field_name,
stringify!(#field_type)
)));
}
let tail = parser.load_bits(256)?;
let tail: [u8; 32] = tail.try_into().map_err(|_| {
#error_path::InternalError(format!(
"failed to decode {} from 256 bits",
stringify!(#field_type)
))
})?;
<#field_type>::from_be_bytes(tail)
} else {
let bytes = parser.load_bits(#bits)?;
let mut full = [0u8; 32];
let start = full.len() - bytes.len();
full[start..].copy_from_slice(&bytes);
let mut value = <#field_type>::from_be_bytes(full);
let extra_bits = #bits % 8;
if extra_bits != 0 {
value >>= (8 - extra_bits) as u32;
}
value
}
};
}
}
fn generate_i256_read(
binding: &Ident,
field_type: &Type,
bits: usize,
crate_path: &TokenStream,
) -> TokenStream {
let error_path = quote!(#crate_path::cell::TonCellError);
let field_name = binding.to_string();
quote! {
let #binding = {
if #bits == 0 {
<#field_type>::ZERO
} else if #bits > 256 {
let prefix_bits = #bits - 256;
let prefix = parser.load_bits(prefix_bits)?;
let tail = parser.load_bits(256)?;
let tail: [u8; 32] = tail.try_into().map_err(|_| {
#error_path::InternalError(format!(
"failed to decode {} from 256 bits",
stringify!(#field_type)
))
})?;
let is_negative = tail[0] & 0x80 != 0;
let prefix_ok = if is_negative {
let full_bytes = prefix_bits / 8;
let remainder = prefix_bits % 8;
prefix[..full_bytes].iter().all(|byte| *byte == 0xFF)
&& (remainder == 0
|| prefix[full_bytes] == (0xFFu8 << (8 - remainder)))
} else {
prefix.iter().all(|byte| *byte == 0)
};
if !prefix_ok {
return Err(#error_path::InvalidCellData(format!(
"field `{}` does not fit into {}",
#field_name,
stringify!(#field_type)
)));
}
<#field_type>::from_be_bytes(tail)
} else {
let bytes = parser.load_bits(#bits)?;
let negative = bytes.first().is_some_and(|byte| byte & 0x80 != 0);
let pad = if negative { 0xFF } else { 0x00 };
let mut full = [pad; 32];
let start = full.len() - bytes.len();
full[start..].copy_from_slice(&bytes);
let mut value = <#field_type>::from_be_bytes(full);
let extra_bits = #bits % 8;
if extra_bits != 0 {
value >>= (8 - extra_bits) as u32;
}
value
}
};
}
}
fn generate_unsigned_self_write(
field_expr: TokenStream,
native_bits: usize,
bits: usize,
crate_path: &TokenStream,
) -> TokenStream {
let error_path = quote!(#crate_path::cell::TonCellError);
quote! {
{
const NATIVE_BITS: usize = #native_bits;
let value = #field_expr;
if #bits == 0 {
if value != 0 {
return Err(#error_path::CellBuilderError(format!(
"field `{}` cannot be encoded in 0 bits",
stringify!(#field_expr)
)));
}
} else if #bits >= NATIVE_BITS {
let padding_bits = #bits - NATIVE_BITS;
if padding_bits > 0 {
builder.write_bits(vec![0u8; padding_bits.div_ceil(8)], padding_bits)?;
}
builder.write_bits(value.to_be_bytes(), NATIVE_BITS)?;
} else {
let min_bits = if value == 0 {
0usize
} else {
NATIVE_BITS - value.leading_zeros() as usize
};
if min_bits > #bits {
return Err(#error_path::CellBuilderError(format!(
"field `{}` does not fit into {} bits",
stringify!(#field_expr),
#bits
)));
}
let padding_bits = #bits - min_bits;
if padding_bits > 0 {
builder.write_bits(vec![0u8; padding_bits.div_ceil(8)], padding_bits)?;
}
builder.write_bits_with_offset(value.to_be_bytes(), #bits - padding_bits, NATIVE_BITS - min_bits)?;
}
}
}
}
fn generate_unsigned_borrowed_write(
field_expr: TokenStream,
native_bits: usize,
bits: usize,
crate_path: &TokenStream,
) -> TokenStream {
let error_path = quote!(#crate_path::cell::TonCellError);
quote! {
{
const NATIVE_BITS: usize = #native_bits;
let value = *#field_expr;
if #bits == 0 {
if value != 0 {
return Err(#error_path::CellBuilderError(format!(
"field `{}` cannot be encoded in 0 bits",
stringify!(#field_expr)
)));
}
} else if #bits >= NATIVE_BITS {
let padding_bits = #bits - NATIVE_BITS;
if padding_bits > 0 {
builder.write_bits(vec![0u8; padding_bits.div_ceil(8)], padding_bits)?;
}
builder.write_bits(value.to_be_bytes(), NATIVE_BITS)?;
} else {
let min_bits = if value == 0 {
0usize
} else {
NATIVE_BITS - value.leading_zeros() as usize
};
if min_bits > #bits {
return Err(#error_path::CellBuilderError(format!(
"field `{}` does not fit into {} bits",
stringify!(#field_expr),
#bits
)));
}
let padding_bits = #bits - min_bits;
if padding_bits > 0 {
builder.write_bits(vec![0u8; padding_bits.div_ceil(8)], padding_bits)?;
}
builder.write_bits_with_offset(value.to_be_bytes(), #bits - padding_bits, NATIVE_BITS - min_bits)?;
}
}
}
}
fn generate_u256_write(
field_expr: TokenStream,
bits: usize,
crate_path: &TokenStream,
) -> TokenStream {
let error_path = quote!(#crate_path::cell::TonCellError);
quote! {
{
let bytes = #field_expr.to_be_bytes::<32>();
let min_bits = {
let mut first_non_zero = 0usize;
while first_non_zero < bytes.len() && bytes[first_non_zero] == 0 {
first_non_zero += 1;
}
if first_non_zero == bytes.len() {
0usize
} else {
let leading = bytes[first_non_zero].leading_zeros() as usize;
(bytes.len() - first_non_zero) * 8 - leading
}
};
if min_bits > #bits {
return Err(#error_path::CellBuilderError(format!(
"field `{}` does not fit into {} bits",
stringify!(#field_expr),
#bits
)));
}
let padding_bits = #bits - min_bits;
if padding_bits > 0 {
builder.write_bits(vec![0u8; padding_bits.div_ceil(8)], padding_bits)?;
}
builder.write_bits_with_offset(bytes, #bits - padding_bits, 256usize.saturating_sub(min_bits))?;
}
}
}
fn generate_i256_write(
field_expr: TokenStream,
bits: usize,
crate_path: &TokenStream,
) -> TokenStream {
let error_path = quote!(#crate_path::cell::TonCellError);
quote! {
{
let bytes = #field_expr.to_be_bytes::<32>();
let is_negative = bytes[0] & 0x80 != 0;
let min_bits = if bytes.iter().all(|byte| *byte == 0) {
0usize
} else if !is_negative {
let mut first_non_zero = 0usize;
while first_non_zero < bytes.len() && bytes[first_non_zero] == 0 {
first_non_zero += 1;
}
let leading = bytes[first_non_zero].leading_zeros() as usize;
(bytes.len() - first_non_zero) * 8 - leading + 1
} else {
let inverted = bytes.map(|byte| !byte);
let mut first_non_zero = 0usize;
while first_non_zero < inverted.len() && inverted[first_non_zero] == 0 {
first_non_zero += 1;
}
if first_non_zero == inverted.len() {
1usize
} else {
let leading = inverted[first_non_zero].leading_zeros() as usize;
(inverted.len() - first_non_zero) * 8 - leading + 1
}
};
if min_bits > #bits {
return Err(#error_path::CellBuilderError(format!(
"field `{}` does not fit into {} bits",
stringify!(#field_expr),
#bits
)));
}
let padding_bits = #bits - min_bits;
let pad_byte = if is_negative { 0xFF } else { 0x00 };
if padding_bits > 0 {
builder.write_bits(vec![pad_byte; padding_bits.div_ceil(8)], padding_bits)?;
}
builder.write_bits_with_offset(bytes, #bits - padding_bits, 256usize.saturating_sub(min_bits))?;
}
}
}
fn generate_signed_self_write(
field_expr: TokenStream,
native_bits: usize,
bits: usize,
crate_path: &TokenStream,
) -> TokenStream {
let error_path = quote!(#crate_path::cell::TonCellError);
quote! {
{
const NATIVE_BITS: usize = #native_bits;
let value = #field_expr;
if #bits == 0 {
if value != 0 {
return Err(#error_path::CellBuilderError(format!(
"field `{}` cannot be encoded in 0 bits",
stringify!(#field_expr)
)));
}
} else if #bits >= NATIVE_BITS {
let padding_bits = #bits - NATIVE_BITS;
let pad_byte = if value < 0 { 0xFF } else { 0x00 };
if padding_bits > 0 {
builder.write_bits(vec![pad_byte; padding_bits.div_ceil(8)], padding_bits)?;
}
builder.write_bits(value.to_be_bytes(), NATIVE_BITS)?;
} else {
let min_bits = if value == 0 {
0usize
} else if value > 0 {
NATIVE_BITS + 1 - value.leading_zeros() as usize
} else {
NATIVE_BITS + 1 - (!value).leading_zeros() as usize
};
if min_bits > #bits {
return Err(#error_path::CellBuilderError(format!(
"field `{}` does not fit into {} bits",
stringify!(#field_expr),
#bits
)));
}
let padding_bits = #bits - min_bits;
let pad_byte = if value < 0 { 0xFF } else { 0x00 };
if padding_bits > 0 {
builder.write_bits(vec![pad_byte; padding_bits.div_ceil(8)], padding_bits)?;
}
builder.write_bits_with_offset(value.to_be_bytes(), #bits - padding_bits, NATIVE_BITS - min_bits)?;
}
}
}
}
fn generate_signed_borrowed_write(
field_expr: TokenStream,
native_bits: usize,
bits: usize,
crate_path: &TokenStream,
) -> TokenStream {
let error_path = quote!(#crate_path::cell::TonCellError);
quote! {
{
const NATIVE_BITS: usize = #native_bits;
let value = *#field_expr;
if #bits == 0 {
if value != 0 {
return Err(#error_path::CellBuilderError(format!(
"field `{}` cannot be encoded in 0 bits",
stringify!(#field_expr)
)));
}
} else if #bits >= NATIVE_BITS {
let padding_bits = #bits - NATIVE_BITS;
let pad_byte = if value < 0 { 0xFF } else { 0x00 };
if padding_bits > 0 {
builder.write_bits(vec![pad_byte; padding_bits.div_ceil(8)], padding_bits)?;
}
builder.write_bits(value.to_be_bytes(), NATIVE_BITS)?;
} else {
let min_bits = if value == 0 {
0usize
} else if value > 0 {
NATIVE_BITS + 1 - value.leading_zeros() as usize
} else {
NATIVE_BITS + 1 - (!value).leading_zeros() as usize
};
if min_bits > #bits {
return Err(#error_path::CellBuilderError(format!(
"field `{}` does not fit into {} bits",
stringify!(#field_expr),
#bits
)));
}
let padding_bits = #bits - min_bits;
let pad_byte = if value < 0 { 0xFF } else { 0x00 };
if padding_bits > 0 {
builder.write_bits(vec![pad_byte; padding_bits.div_ceil(8)], padding_bits)?;
}
builder.write_bits_with_offset(value.to_be_bytes(), #bits - padding_bits, NATIVE_BITS - min_bits)?;
}
}
}
}
fn classify_type(ty: &Type) -> TypeInfo {
if let Type::Path(type_path) = ty {
if let Some(segment) = type_path.path.segments.last() {
return match segment.ident.to_string().as_str() {
"bool" => TypeInfo::Bool,
"u8" => TypeInfo::Unsigned { native_bits: 8 },
"u16" => TypeInfo::Unsigned { native_bits: 16 },
"u32" => TypeInfo::Unsigned { native_bits: 32 },
"u64" => TypeInfo::Unsigned { native_bits: 64 },
"i8" => TypeInfo::Signed { native_bits: 8 },
"i16" => TypeInfo::Signed { native_bits: 16 },
"i32" => TypeInfo::Signed { native_bits: 32 },
"i64" => TypeInfo::Signed { native_bits: 64 },
"BigUint" => TypeInfo::BigUint,
"U256" => TypeInfo::U256,
"I256" => TypeInfo::I256,
_ => TypeInfo::Custom,
};
}
}
TypeInfo::Custom
}
#[derive(Clone)]
struct EnumVariant {
tag: Tag,
read_branch: TokenStream,
write_arm: TokenStream,
required_bounds: Vec<WherePredicate>,
}
impl EnumVariant {
fn new(variant: &syn::Variant, tag: Tag, crate_path: &TokenStream) -> syn::Result<Self> {
let variant_name = &variant.ident;
match &variant.fields {
Fields::Unit => {
let read_branch = quote! {
return Ok(Self::#variant_name);
};
let write_arm = if tag.is_empty() {
quote! {
Self::#variant_name => {}
}
} else {
let bits = tag.bits;
let value = tag.value;
quote! {
Self::#variant_name => {
builder.store_u64(#bits, #value)?;
}
}
};
Ok(Self {
tag,
read_branch,
write_arm,
required_bounds: Vec::new(),
})
}
Fields::Named(fields) => {
let mut read_statements = Vec::new();
let mut write_statements = Vec::new();
let mut names = Vec::new();
let mut required_bounds = Vec::new();
for field in &fields.named {
let attrs = parse_field_attrs(&field.attrs)?;
let name = field.ident.clone().ok_or_else(|| {
syn::Error::new(field.span(), "named enum field is missing an identifier")
})?;
let ty = &field.ty;
read_statements
.push(generate_field_read_statement(&name, ty, &attrs, crate_path));
write_statements.push(generate_borrowed_field_write_statement(
quote!(#name),
ty,
&attrs,
crate_path,
));
required_bounds.extend(generate_field_bound_predicates(ty, &attrs, crate_path));
names.push(name);
}
let read_branch = quote! {
#(#read_statements)*
return Ok(Self::#variant_name { #(#names),* });
};
let write_arm = if tag.is_empty() {
quote! {
Self::#variant_name { #(#names),* } => {
#(#write_statements)*
}
}
} else {
let bits = tag.bits;
let value = tag.value;
quote! {
Self::#variant_name { #(#names),* } => {
builder.store_u64(#bits, #value)?;
#(#write_statements)*
}
}
};
Ok(Self {
tag,
read_branch,
write_arm,
required_bounds,
})
}
Fields::Unnamed(fields) if fields.unnamed.len() == 1 => {
let field = fields.unnamed.first().expect("length checked");
let attrs = parse_field_attrs(&field.attrs)?;
let ty = &field.ty;
let binding = format_ident!("value");
let read_statement =
generate_field_read_statement(&binding, ty, &attrs, crate_path);
let write_statement = generate_borrowed_field_write_statement(
quote!(#binding),
ty,
&attrs,
crate_path,
);
let required_bounds = generate_field_bound_predicates(ty, &attrs, crate_path);
let read_branch = quote! {
#read_statement
return Ok(Self::#variant_name(#binding));
};
let write_arm = if tag.is_empty() {
quote! {
Self::#variant_name(#binding) => {
#write_statement
}
}
} else {
let bits = tag.bits;
let value = tag.value;
quote! {
Self::#variant_name(#binding) => {
builder.store_u64(#bits, #value)?;
#write_statement
}
}
};
Ok(Self {
tag,
read_branch,
write_arm,
required_bounds,
})
}
Fields::Unnamed(fields) => Err(syn::Error::new_spanned(
fields,
"enum tuple variants must contain exactly one field",
)),
}
}
}
struct StructBody {
read_statements: Vec<TokenStream>,
init: TokenStream,
write_statements: Vec<TokenStream>,
required_bounds: Vec<WherePredicate>,
}
enum TypeInfo {
Bool,
Unsigned { native_bits: usize },
Signed { native_bits: usize },
BigUint,
U256,
I256,
Custom,
}