use std::collections::HashSet;
use crate::ast::{BinaryOp, ComputedExpr, UnaryOp};
use proc_macro2::TokenTree;
pub fn parse_computed_expression(tokens: &proc_macro2::TokenStream) -> ComputedExpr {
let tokens_vec: Vec<proc_macro2::TokenTree> = tokens.clone().into_iter().collect();
let (expr, _) = parse_expr(&tokens_vec, 0);
resolve_bindings_in_expr(expr, &HashSet::new())
}
fn resolver_for_method(method: &str) -> Option<&'static str> {
match method {
"ui_amount" | "raw_amount" => Some("TokenMetadata"),
"slot_hash" | "keccak_rng" => Some("SlotHash"),
_ => None,
}
}
pub fn resolver_output_type(method: &str) -> Option<&'static str> {
match method {
"slot_hash" => Some("SlotHashBytes"),
"keccak_rng" => Some("KeccakRngValue"),
"ui_amount" => Some("TokenUiAmount"),
"raw_amount" => Some("TokenRawAmount"),
_ => None,
}
}
pub fn expr_contains_u64_from_bytes(expr: &crate::ast::ComputedExpr) -> bool {
use crate::ast::ComputedExpr;
match expr {
ComputedExpr::U64FromLeBytes { .. } | ComputedExpr::U64FromBeBytes { .. } => true,
ComputedExpr::Some { value } => expr_contains_u64_from_bytes(value),
ComputedExpr::Paren { expr } => expr_contains_u64_from_bytes(expr),
ComputedExpr::Binary { left, right, .. } => {
expr_contains_u64_from_bytes(left) || expr_contains_u64_from_bytes(right)
}
ComputedExpr::Unary { expr, .. } => expr_contains_u64_from_bytes(expr),
ComputedExpr::MethodCall { expr, args, .. } => {
expr_contains_u64_from_bytes(expr) || args.iter().any(expr_contains_u64_from_bytes)
}
ComputedExpr::Cast { expr, .. } => expr_contains_u64_from_bytes(expr),
ComputedExpr::UnwrapOr { expr, .. } => expr_contains_u64_from_bytes(expr),
ComputedExpr::Slice { expr, .. } => expr_contains_u64_from_bytes(expr),
ComputedExpr::Index { expr, .. } => expr_contains_u64_from_bytes(expr),
ComputedExpr::Let { value, body, .. } => {
expr_contains_u64_from_bytes(value) || expr_contains_u64_from_bytes(body)
}
ComputedExpr::If {
condition: _,
then_branch,
else_branch,
} => expr_contains_u64_from_bytes(then_branch) || expr_contains_u64_from_bytes(else_branch),
ComputedExpr::Keccak256 { expr } => expr_contains_u64_from_bytes(expr),
ComputedExpr::JsonToBytes { expr } => expr_contains_u64_from_bytes(expr),
ComputedExpr::Closure { body, .. } => expr_contains_u64_from_bytes(body),
ComputedExpr::ResolverComputed { .. }
| ComputedExpr::FieldRef { .. }
| ComputedExpr::Var { .. }
| ComputedExpr::Literal { .. }
| ComputedExpr::ByteArray { .. }
| ComputedExpr::None
| ComputedExpr::ContextSlot
| ComputedExpr::ContextTimestamp => false,
}
}
pub fn extract_resolver_type_from_computed_expr(
expr: &crate::ast::ComputedExpr,
) -> Option<&'static str> {
use crate::ast::ComputedExpr;
match expr {
ComputedExpr::ResolverComputed { method, .. } => resolver_output_type(method),
ComputedExpr::Some { value } => extract_resolver_type_from_computed_expr(value),
ComputedExpr::Paren { expr } => extract_resolver_type_from_computed_expr(expr),
_ => None,
}
}
pub fn qualify_field_refs(expr: ComputedExpr, section: &str) -> ComputedExpr {
match expr {
ComputedExpr::FieldRef { path } => {
if !path.contains('.') {
ComputedExpr::FieldRef {
path: format!("{}.{}", section, path),
}
} else {
ComputedExpr::FieldRef { path }
}
}
ComputedExpr::UnwrapOr { expr, default } => ComputedExpr::UnwrapOr {
expr: Box::new(qualify_field_refs(*expr, section)),
default,
},
ComputedExpr::Binary { op, left, right } => ComputedExpr::Binary {
op,
left: Box::new(qualify_field_refs(*left, section)),
right: Box::new(qualify_field_refs(*right, section)),
},
ComputedExpr::Cast { expr, to_type } => ComputedExpr::Cast {
expr: Box::new(qualify_field_refs(*expr, section)),
to_type,
},
ComputedExpr::MethodCall { expr, method, args } => ComputedExpr::MethodCall {
expr: Box::new(qualify_field_refs(*expr, section)),
method,
args: args
.into_iter()
.map(|a| qualify_field_refs(a, section))
.collect(),
},
ComputedExpr::ResolverComputed {
resolver,
method,
args,
} => ComputedExpr::ResolverComputed {
resolver,
method,
args: args
.into_iter()
.map(|a| qualify_field_refs(a, section))
.collect(),
},
ComputedExpr::Paren { expr } => ComputedExpr::Paren {
expr: Box::new(qualify_field_refs(*expr, section)),
},
ComputedExpr::Literal { value } => ComputedExpr::Literal { value },
ComputedExpr::Var { name } => ComputedExpr::Var { name },
ComputedExpr::Let { name, value, body } => ComputedExpr::Let {
name,
value: Box::new(qualify_field_refs(*value, section)),
body: Box::new(qualify_field_refs(*body, section)),
},
ComputedExpr::If {
condition,
then_branch,
else_branch,
} => ComputedExpr::If {
condition: Box::new(qualify_field_refs(*condition, section)),
then_branch: Box::new(qualify_field_refs(*then_branch, section)),
else_branch: Box::new(qualify_field_refs(*else_branch, section)),
},
ComputedExpr::None => ComputedExpr::None,
ComputedExpr::Some { value } => ComputedExpr::Some {
value: Box::new(qualify_field_refs(*value, section)),
},
ComputedExpr::Slice { expr, start, end } => ComputedExpr::Slice {
expr: Box::new(qualify_field_refs(*expr, section)),
start,
end,
},
ComputedExpr::Index { expr, index } => ComputedExpr::Index {
expr: Box::new(qualify_field_refs(*expr, section)),
index,
},
ComputedExpr::U64FromLeBytes { bytes } => ComputedExpr::U64FromLeBytes {
bytes: Box::new(qualify_field_refs(*bytes, section)),
},
ComputedExpr::U64FromBeBytes { bytes } => ComputedExpr::U64FromBeBytes {
bytes: Box::new(qualify_field_refs(*bytes, section)),
},
ComputedExpr::ByteArray { bytes } => ComputedExpr::ByteArray { bytes },
ComputedExpr::Closure { param, body } => ComputedExpr::Closure {
param,
body: Box::new(qualify_field_refs(*body, section)),
},
ComputedExpr::Unary { op, expr } => ComputedExpr::Unary {
op,
expr: Box::new(qualify_field_refs(*expr, section)),
},
ComputedExpr::JsonToBytes { expr } => ComputedExpr::JsonToBytes {
expr: Box::new(qualify_field_refs(*expr, section)),
},
ComputedExpr::ContextSlot => ComputedExpr::ContextSlot,
ComputedExpr::ContextTimestamp => ComputedExpr::ContextTimestamp,
ComputedExpr::Keccak256 { expr } => ComputedExpr::Keccak256 {
expr: Box::new(qualify_field_refs(*expr, section)),
},
}
}
pub fn extract_section_references(expression: &proc_macro2::TokenStream) -> HashSet<String> {
let mut sections = HashSet::new();
extract_section_references_recursive(expression, &mut sections);
sections
}
fn extract_section_references_recursive(
expression: &proc_macro2::TokenStream,
sections: &mut HashSet<String>,
) {
let tokens: Vec<TokenTree> = expression.clone().into_iter().collect();
let mut i = 0;
while i < tokens.len() {
match &tokens[i] {
TokenTree::Ident(ident) => {
let ident_str = ident.to_string();
if i + 1 < tokens.len() {
if let TokenTree::Punct(punct) = &tokens[i + 1] {
if punct.as_char() == '.' {
if i + 2 < tokens.len() {
if let TokenTree::Ident(_field_ident) = &tokens[i + 2] {
sections.insert(ident_str);
}
}
}
}
}
}
TokenTree::Group(group) => {
extract_section_references_recursive(&group.stream(), sections);
}
_ => {}
}
i += 1;
}
}
pub fn extract_field_references_from_section(
expression: &proc_macro2::TokenStream,
section_name: &str,
) -> HashSet<String> {
let mut fields = HashSet::new();
extract_field_references_recursive(expression, section_name, &mut fields);
fields
}
fn extract_field_references_recursive(
expression: &proc_macro2::TokenStream,
section_name: &str,
fields: &mut HashSet<String>,
) {
let tokens: Vec<TokenTree> = expression.clone().into_iter().collect();
let mut i = 0;
while i < tokens.len() {
match &tokens[i] {
TokenTree::Ident(ident) => {
if *ident == section_name {
if i + 1 < tokens.len() {
if let TokenTree::Punct(punct) = &tokens[i + 1] {
if punct.as_char() == '.' {
if i + 2 < tokens.len() {
if let TokenTree::Ident(field_ident) = &tokens[i + 2] {
fields.insert(field_ident.to_string());
}
}
}
}
}
}
}
TokenTree::Group(group) => {
extract_field_references_recursive(&group.stream(), section_name, fields);
}
_ => {}
}
i += 1;
}
}
fn parse_expr(tokens: &[proc_macro2::TokenTree], start: usize) -> (ComputedExpr, usize) {
if start < tokens.len() {
if let proc_macro2::TokenTree::Ident(ident) = &tokens[start] {
if *ident == "let" {
return parse_let_expr(tokens, start);
}
if *ident == "if" {
return parse_if_expr(tokens, start);
}
}
}
parse_binary_expr(tokens, start, 0)
}
fn resolve_bindings_in_expr(expr: ComputedExpr, bindings: &HashSet<String>) -> ComputedExpr {
match expr {
ComputedExpr::FieldRef { ref path } => {
if !path.contains('.') && bindings.contains(path) {
ComputedExpr::Var { name: path.clone() }
} else {
expr
}
}
ComputedExpr::Let { name, value, body } => {
let resolved_value = resolve_bindings_in_expr(*value, bindings);
let mut new_bindings = bindings.clone();
new_bindings.insert(name.clone());
let resolved_body = resolve_bindings_in_expr(*body, &new_bindings);
ComputedExpr::Let {
name,
value: Box::new(resolved_value),
body: Box::new(resolved_body),
}
}
ComputedExpr::If {
condition,
then_branch,
else_branch,
} => ComputedExpr::If {
condition: Box::new(resolve_bindings_in_expr(*condition, bindings)),
then_branch: Box::new(resolve_bindings_in_expr(*then_branch, bindings)),
else_branch: Box::new(resolve_bindings_in_expr(*else_branch, bindings)),
},
ComputedExpr::Binary { op, left, right } => ComputedExpr::Binary {
op,
left: Box::new(resolve_bindings_in_expr(*left, bindings)),
right: Box::new(resolve_bindings_in_expr(*right, bindings)),
},
ComputedExpr::Unary { op, expr } => ComputedExpr::Unary {
op,
expr: Box::new(resolve_bindings_in_expr(*expr, bindings)),
},
ComputedExpr::MethodCall { expr, method, args } => ComputedExpr::MethodCall {
expr: Box::new(resolve_bindings_in_expr(*expr, bindings)),
method,
args: args
.into_iter()
.map(|a| resolve_bindings_in_expr(a, bindings))
.collect(),
},
ComputedExpr::ResolverComputed {
resolver,
method,
args,
} => ComputedExpr::ResolverComputed {
resolver,
method,
args: args
.into_iter()
.map(|a| resolve_bindings_in_expr(a, bindings))
.collect(),
},
ComputedExpr::UnwrapOr { expr, default } => ComputedExpr::UnwrapOr {
expr: Box::new(resolve_bindings_in_expr(*expr, bindings)),
default,
},
ComputedExpr::Cast { expr, to_type } => ComputedExpr::Cast {
expr: Box::new(resolve_bindings_in_expr(*expr, bindings)),
to_type,
},
ComputedExpr::Paren { expr } => ComputedExpr::Paren {
expr: Box::new(resolve_bindings_in_expr(*expr, bindings)),
},
ComputedExpr::Some { value } => ComputedExpr::Some {
value: Box::new(resolve_bindings_in_expr(*value, bindings)),
},
ComputedExpr::Slice { expr, start, end } => ComputedExpr::Slice {
expr: Box::new(resolve_bindings_in_expr(*expr, bindings)),
start,
end,
},
ComputedExpr::Index { expr, index } => ComputedExpr::Index {
expr: Box::new(resolve_bindings_in_expr(*expr, bindings)),
index,
},
ComputedExpr::U64FromLeBytes { bytes } => ComputedExpr::U64FromLeBytes {
bytes: Box::new(resolve_bindings_in_expr(*bytes, bindings)),
},
ComputedExpr::U64FromBeBytes { bytes } => ComputedExpr::U64FromBeBytes {
bytes: Box::new(resolve_bindings_in_expr(*bytes, bindings)),
},
ComputedExpr::JsonToBytes { expr } => ComputedExpr::JsonToBytes {
expr: Box::new(resolve_bindings_in_expr(*expr, bindings)),
},
ComputedExpr::Keccak256 { expr } => ComputedExpr::Keccak256 {
expr: Box::new(resolve_bindings_in_expr(*expr, bindings)),
},
ComputedExpr::Closure { param, body } => {
let mut new_bindings = bindings.clone();
new_bindings.insert(param.clone());
ComputedExpr::Closure {
param,
body: Box::new(resolve_bindings_in_expr(*body, &new_bindings)),
}
}
ComputedExpr::Var { .. }
| ComputedExpr::None
| ComputedExpr::Literal { .. }
| ComputedExpr::ByteArray { .. }
| ComputedExpr::ContextSlot
| ComputedExpr::ContextTimestamp => expr,
}
}
fn parse_let_expr(tokens: &[proc_macro2::TokenTree], start: usize) -> (ComputedExpr, usize) {
let mut pos = start + 1;
let name = if pos < tokens.len() {
if let proc_macro2::TokenTree::Ident(ident) = &tokens[pos] {
pos += 1;
ident.to_string()
} else {
return (
ComputedExpr::Literal {
value: serde_json::Value::Null,
},
pos,
);
}
} else {
return (
ComputedExpr::Literal {
value: serde_json::Value::Null,
},
pos,
);
};
if pos < tokens.len() {
if let proc_macro2::TokenTree::Punct(p) = &tokens[pos] {
if p.as_char() == '=' {
pos += 1;
}
}
}
let (value, new_pos) = parse_expr_until_semicolon(tokens, pos);
pos = new_pos;
if pos < tokens.len() {
if let proc_macro2::TokenTree::Punct(p) = &tokens[pos] {
if p.as_char() == ';' {
pos += 1;
}
}
}
let (body, final_pos) = parse_expr(tokens, pos);
(
ComputedExpr::Let {
name,
value: Box::new(value),
body: Box::new(body),
},
final_pos,
)
}
fn parse_expr_until_semicolon(
tokens: &[proc_macro2::TokenTree],
start: usize,
) -> (ComputedExpr, usize) {
let mut depth: i32 = 0;
let mut end = start;
while end < tokens.len() {
match &tokens[end] {
proc_macro2::TokenTree::Punct(p) if p.as_char() == ';' && depth == 0 => break,
proc_macro2::TokenTree::Group(g) => {
if g.delimiter() == proc_macro2::Delimiter::Brace {
depth += 1;
}
end += 1;
}
proc_macro2::TokenTree::Punct(p) if p.as_char() == '{' => {
depth += 1;
end += 1;
}
proc_macro2::TokenTree::Punct(p) if p.as_char() == '}' => {
depth = depth.saturating_sub(1);
end += 1;
}
_ => end += 1,
}
}
let expr_tokens: Vec<_> = tokens[start..end].to_vec();
let (expr, _) = parse_binary_expr(&expr_tokens, 0, 0);
(expr, end)
}
fn parse_if_expr(tokens: &[proc_macro2::TokenTree], start: usize) -> (ComputedExpr, usize) {
let mut pos = start + 1;
let mut cond_tokens = Vec::new();
while pos < tokens.len() {
if let proc_macro2::TokenTree::Group(g) = &tokens[pos] {
if g.delimiter() == proc_macro2::Delimiter::Brace {
break;
}
}
cond_tokens.push(tokens[pos].clone());
pos += 1;
}
let (condition, _) = parse_binary_expr(&cond_tokens, 0, 0);
let then_branch = if pos < tokens.len() {
if let proc_macro2::TokenTree::Group(g) = &tokens[pos] {
if g.delimiter() == proc_macro2::Delimiter::Brace {
pos += 1;
let inner_tokens: Vec<_> = g.stream().into_iter().collect();
let (expr, _) = parse_expr(&inner_tokens, 0);
expr
} else {
ComputedExpr::Literal {
value: serde_json::Value::Null,
}
}
} else {
ComputedExpr::Literal {
value: serde_json::Value::Null,
}
}
} else {
ComputedExpr::Literal {
value: serde_json::Value::Null,
}
};
if pos < tokens.len() {
if let proc_macro2::TokenTree::Ident(ident) = &tokens[pos] {
if *ident == "else" {
pos += 1;
}
}
}
let else_branch = if pos < tokens.len() {
if let proc_macro2::TokenTree::Group(g) = &tokens[pos] {
if g.delimiter() == proc_macro2::Delimiter::Brace {
pos += 1;
let inner_tokens: Vec<_> = g.stream().into_iter().collect();
let (expr, _) = parse_expr(&inner_tokens, 0);
expr
} else {
ComputedExpr::Literal {
value: serde_json::Value::Null,
}
}
} else {
ComputedExpr::Literal {
value: serde_json::Value::Null,
}
}
} else {
ComputedExpr::Literal {
value: serde_json::Value::Null,
}
};
(
ComputedExpr::If {
condition: Box::new(condition),
then_branch: Box::new(then_branch),
else_branch: Box::new(else_branch),
},
pos,
)
}
fn parse_binary_expr(
tokens: &[proc_macro2::TokenTree],
start: usize,
min_prec: u8,
) -> (ComputedExpr, usize) {
let (mut left, mut pos) = parse_unary_expr(tokens, start);
loop {
if pos >= tokens.len() {
break;
}
let op_result = try_parse_binary_op(tokens, pos);
if let Some((op, op_prec, new_pos)) = op_result {
if op_prec < min_prec {
break;
}
pos = new_pos;
let (right, next_pos) = parse_binary_expr(tokens, pos, op_prec + 1);
pos = next_pos;
left = ComputedExpr::Binary {
op,
left: Box::new(left),
right: Box::new(right),
};
} else {
break;
}
}
(left, pos)
}
fn try_parse_binary_op(
tokens: &[proc_macro2::TokenTree],
pos: usize,
) -> Option<(BinaryOp, u8, usize)> {
if pos >= tokens.len() {
return None;
}
if let proc_macro2::TokenTree::Punct(p) = &tokens[pos] {
let c = p.as_char();
if pos + 1 < tokens.len() {
if let proc_macro2::TokenTree::Punct(p2) = &tokens[pos + 1] {
let c2 = p2.as_char();
match (c, c2) {
('=', '=') => return Some((BinaryOp::Eq, 6, pos + 2)),
('!', '=') => return Some((BinaryOp::Ne, 6, pos + 2)),
('>', '=') => return Some((BinaryOp::Gte, 7, pos + 2)),
('<', '=') => return Some((BinaryOp::Lte, 7, pos + 2)),
('&', '&') => return Some((BinaryOp::And, 2, pos + 2)),
('|', '|') => return Some((BinaryOp::Or, 1, pos + 2)),
('<', '<') => return Some((BinaryOp::Shl, 8, pos + 2)),
('>', '>') => return Some((BinaryOp::Shr, 8, pos + 2)),
_ => {}
}
}
}
match c {
'+' => return Some((BinaryOp::Add, 9, pos + 1)),
'-' => return Some((BinaryOp::Sub, 9, pos + 1)),
'*' => return Some((BinaryOp::Mul, 10, pos + 1)),
'/' => return Some((BinaryOp::Div, 10, pos + 1)),
'%' => return Some((BinaryOp::Mod, 10, pos + 1)),
'>' => return Some((BinaryOp::Gt, 7, pos + 1)),
'<' => return Some((BinaryOp::Lt, 7, pos + 1)),
'^' => return Some((BinaryOp::Xor, 4, pos + 1)),
'&' => {
if pos + 1 < tokens.len() {
if let proc_macro2::TokenTree::Punct(p2) = &tokens[pos + 1] {
if p2.as_char() == '&' {
return None; }
}
}
return Some((BinaryOp::BitAnd, 5, pos + 1));
}
'|' => {
if pos + 1 < tokens.len() {
if let proc_macro2::TokenTree::Punct(p2) = &tokens[pos + 1] {
if p2.as_char() == '|' {
return None; }
}
}
return Some((BinaryOp::BitOr, 3, pos + 1));
}
_ => {}
}
}
None
}
fn parse_unary_expr(tokens: &[proc_macro2::TokenTree], start: usize) -> (ComputedExpr, usize) {
if start >= tokens.len() {
return (
ComputedExpr::Literal {
value: serde_json::Value::Null,
},
start,
);
}
if let proc_macro2::TokenTree::Punct(p) = &tokens[start] {
if p.as_char() == '!' {
let (inner, pos) = parse_unary_expr(tokens, start + 1);
return (
ComputedExpr::Unary {
op: UnaryOp::Not,
expr: Box::new(inner),
},
pos,
);
}
}
parse_postfix_expr(tokens, start)
}
fn parse_postfix_expr(tokens: &[proc_macro2::TokenTree], start: usize) -> (ComputedExpr, usize) {
let (mut expr, mut pos) = parse_primary_expr(tokens, start);
loop {
if pos >= tokens.len() {
break;
}
if let proc_macro2::TokenTree::Group(group) = &tokens[pos] {
if group.delimiter() == proc_macro2::Delimiter::Bracket {
pos += 1;
let inner_tokens: Vec<_> = group.stream().into_iter().collect();
let mut is_range = false;
let mut dot_dot_pos = None;
for (i, token) in inner_tokens.iter().enumerate() {
if let proc_macro2::TokenTree::Punct(p) = token {
if p.as_char() == '.' && i + 1 < inner_tokens.len() {
if let proc_macro2::TokenTree::Punct(p2) = &inner_tokens[i + 1] {
if p2.as_char() == '.' {
is_range = true;
dot_dot_pos = Some(i);
break;
}
}
}
}
}
if is_range {
let dot_pos = dot_dot_pos.unwrap();
let start_tokens: Vec<_> = inner_tokens[..dot_pos].to_vec();
let end_tokens: Vec<_> = inner_tokens[dot_pos + 2..].to_vec();
let start_val = if start_tokens.is_empty() {
0
} else {
parse_usize_literal(&start_tokens)
};
let end_val = if end_tokens.is_empty() {
usize::MAX
} else {
parse_usize_literal(&end_tokens)
};
expr = ComputedExpr::Slice {
expr: Box::new(expr),
start: start_val,
end: end_val,
};
continue;
} else {
let index = parse_usize_literal(&inner_tokens);
expr = ComputedExpr::Index {
expr: Box::new(expr),
index,
};
continue;
}
}
}
if let proc_macro2::TokenTree::Punct(p) = &tokens[pos] {
if p.as_char() == '.' {
pos += 1;
if pos >= tokens.len() {
break;
}
if let proc_macro2::TokenTree::Ident(ident) = &tokens[pos] {
let name = ident.to_string();
pos += 1;
if pos < tokens.len() {
if let proc_macro2::TokenTree::Group(group) = &tokens[pos] {
if group.delimiter() == proc_macro2::Delimiter::Parenthesis {
let args = parse_method_args(&group.stream());
pos += 1;
if name == "unwrap_or" && args.len() == 1 {
if let ComputedExpr::Literal { value } = &args[0] {
expr = ComputedExpr::UnwrapOr {
expr: Box::new(expr),
default: value.clone(),
};
continue;
}
}
if name == "reverse_bits" && args.is_empty() {
expr = ComputedExpr::Unary {
op: UnaryOp::ReverseBits,
expr: Box::new(expr),
};
continue;
}
if name == "to_bytes" && args.is_empty() {
expr = ComputedExpr::JsonToBytes {
expr: Box::new(expr),
};
continue;
}
if let Some(resolver) = resolver_for_method(&name) {
let base_expr = expr;
let mut resolver_args = Vec::with_capacity(args.len() + 1);
resolver_args.push(base_expr);
resolver_args.extend(args);
expr = ComputedExpr::ResolverComputed {
resolver: resolver.to_string(),
method: name,
args: resolver_args,
};
continue;
}
expr = ComputedExpr::MethodCall {
expr: Box::new(expr),
method: name,
args,
};
continue;
}
}
}
if let ComputedExpr::FieldRef { path } = &expr {
expr = ComputedExpr::FieldRef {
path: format!("{}.{}", path, name),
};
} else {
expr = ComputedExpr::MethodCall {
expr: Box::new(expr),
method: name,
args: vec![],
};
}
continue;
}
}
}
if let proc_macro2::TokenTree::Ident(ident) = &tokens[pos] {
if *ident == "as" {
pos += 1;
if pos < tokens.len() {
if let proc_macro2::TokenTree::Ident(type_ident) = &tokens[pos] {
let to_type = type_ident.to_string();
pos += 1;
expr = ComputedExpr::Cast {
expr: Box::new(expr),
to_type,
};
continue;
}
}
}
}
break;
}
(expr, pos)
}
fn parse_usize_literal(tokens: &[proc_macro2::TokenTree]) -> usize {
if tokens.is_empty() {
return 0;
}
if let proc_macro2::TokenTree::Literal(lit) = &tokens[0] {
let lit_str = lit.to_string().replace('_', "");
lit_str.parse::<usize>().unwrap_or(0)
} else {
0
}
}
fn parse_primary_expr(tokens: &[proc_macro2::TokenTree], start: usize) -> (ComputedExpr, usize) {
if start >= tokens.len() {
return (
ComputedExpr::Literal {
value: serde_json::Value::Null,
},
start,
);
}
match &tokens[start] {
proc_macro2::TokenTree::Group(group)
if group.delimiter() == proc_macro2::Delimiter::Parenthesis =>
{
let inner_tokens: Vec<_> = group.stream().into_iter().collect();
let (inner_expr, _) = parse_expr(&inner_tokens, 0);
(
ComputedExpr::Paren {
expr: Box::new(inner_expr),
},
start + 1,
)
}
proc_macro2::TokenTree::Group(group)
if group.delimiter() == proc_macro2::Delimiter::Bracket =>
{
let inner_tokens: Vec<_> = group.stream().into_iter().collect();
let bytes = parse_byte_array_literal(&inner_tokens);
(ComputedExpr::ByteArray { bytes }, start + 1)
}
proc_macro2::TokenTree::Group(group)
if group.delimiter() == proc_macro2::Delimiter::Brace =>
{
let inner_tokens: Vec<_> = group.stream().into_iter().collect();
let (inner_expr, _) = parse_expr(&inner_tokens, 0);
(inner_expr, start + 1)
}
proc_macro2::TokenTree::Punct(p) if p.as_char() == '|' => parse_closure(tokens, start),
proc_macro2::TokenTree::Ident(ident) => {
let name = ident.to_string();
if name == "None" {
return (ComputedExpr::None, start + 1);
}
if name == "__slot" {
return (ComputedExpr::ContextSlot, start + 1);
}
if name == "__timestamp" {
return (ComputedExpr::ContextTimestamp, start + 1);
}
if name == "Some" && start + 1 < tokens.len() {
if let proc_macro2::TokenTree::Group(group) = &tokens[start + 1] {
if group.delimiter() == proc_macro2::Delimiter::Parenthesis {
let inner_tokens: Vec<_> = group.stream().into_iter().collect();
let (inner_expr, _) = parse_expr(&inner_tokens, 0);
return (
ComputedExpr::Some {
value: Box::new(inner_expr),
},
start + 2,
);
}
}
}
if start + 1 < tokens.len() {
if let proc_macro2::TokenTree::Punct(p1) = &tokens[start + 1] {
if p1.as_char() == ':' && start + 2 < tokens.len() {
if let proc_macro2::TokenTree::Punct(p2) = &tokens[start + 2] {
if p2.as_char() == ':' && start + 3 < tokens.len() {
if let proc_macro2::TokenTree::Ident(method_ident) =
&tokens[start + 3]
{
let method_name = method_ident.to_string();
if start + 4 < tokens.len() {
if let proc_macro2::TokenTree::Group(group) =
&tokens[start + 4]
{
if group.delimiter()
== proc_macro2::Delimiter::Parenthesis
{
let inner_tokens: Vec<_> =
group.stream().into_iter().collect();
let (arg_expr, _) = parse_expr(&inner_tokens, 0);
if name == "u64" && method_name == "from_le_bytes" {
return (
ComputedExpr::U64FromLeBytes {
bytes: Box::new(arg_expr),
},
start + 5,
);
}
if name == "u64" && method_name == "from_be_bytes" {
return (
ComputedExpr::U64FromBeBytes {
bytes: Box::new(arg_expr),
},
start + 5,
);
}
return (
ComputedExpr::MethodCall {
expr: Box::new(arg_expr),
method: format!(
"{}::{}",
name, method_name
),
args: vec![],
},
start + 5,
);
}
}
}
}
}
}
}
}
}
(ComputedExpr::FieldRef { path: name }, start + 1)
}
proc_macro2::TokenTree::Literal(lit) => {
let lit_str = lit.to_string();
let value = if lit_str.contains('.') {
lit_str
.parse::<f64>()
.map(serde_json::Value::from)
.unwrap_or(serde_json::Value::String(lit_str))
} else {
let clean = lit_str
.replace('_', "")
.trim_end_matches(|c: char| c.is_alphabetic())
.to_string();
clean
.parse::<i64>()
.map(serde_json::Value::from)
.unwrap_or(serde_json::Value::String(lit_str))
};
(ComputedExpr::Literal { value }, start + 1)
}
_ => (
ComputedExpr::Literal {
value: serde_json::Value::Null,
},
start + 1,
),
}
}
fn parse_closure(tokens: &[proc_macro2::TokenTree], start: usize) -> (ComputedExpr, usize) {
let mut pos = start + 1;
let param = if pos < tokens.len() {
if let proc_macro2::TokenTree::Ident(ident) = &tokens[pos] {
pos += 1;
ident.to_string()
} else {
"x".to_string()
}
} else {
"x".to_string()
};
if pos < tokens.len() {
if let proc_macro2::TokenTree::Punct(p) = &tokens[pos] {
if p.as_char() == '|' {
pos += 1;
}
}
}
let remaining: Vec<_> = tokens[pos..].to_vec();
let (body, consumed) = parse_expr(&remaining, 0);
(
ComputedExpr::Closure {
param,
body: Box::new(body),
},
pos + consumed,
)
}
fn parse_byte_array_literal(tokens: &[proc_macro2::TokenTree]) -> Vec<u8> {
if tokens.is_empty() {
return vec![];
}
let mut semicolon_pos = None;
for (i, token) in tokens.iter().enumerate() {
if let proc_macro2::TokenTree::Punct(p) = token {
if p.as_char() == ';' {
semicolon_pos = Some(i);
break;
}
}
}
if let Some(semi_pos) = semicolon_pos {
let value_tokens: Vec<_> = tokens[..semi_pos].to_vec();
let count_tokens: Vec<_> = tokens[semi_pos + 1..].to_vec();
let value = parse_byte_value(&value_tokens);
let count = parse_usize_from_tokens(&count_tokens);
return vec![value; count];
}
let mut bytes = Vec::new();
let mut current_tokens = Vec::new();
for token in tokens {
if let proc_macro2::TokenTree::Punct(p) = token {
if p.as_char() == ',' {
if !current_tokens.is_empty() {
bytes.push(parse_byte_value(¤t_tokens));
current_tokens.clear();
}
continue;
}
}
current_tokens.push(token.clone());
}
if !current_tokens.is_empty() {
bytes.push(parse_byte_value(¤t_tokens));
}
bytes
}
fn parse_byte_value(tokens: &[proc_macro2::TokenTree]) -> u8 {
if tokens.is_empty() {
return 0;
}
if let proc_macro2::TokenTree::Literal(lit) = &tokens[0] {
let lit_str = lit.to_string();
let lower = lit_str.to_lowercase();
if lower.starts_with("0x") {
let hex_str = lower.trim_start_matches("0x");
let hex_part = if let Some(pos) = hex_str.find(['u', 'i']) {
&hex_str[..pos]
} else {
hex_str
};
return u8::from_str_radix(hex_part, 16).unwrap_or(0);
}
let clean = lit_str.trim_end_matches(|c: char| c.is_alphabetic());
return clean.parse::<u8>().unwrap_or(0);
}
0
}
fn parse_usize_from_tokens(tokens: &[proc_macro2::TokenTree]) -> usize {
if tokens.is_empty() {
return 0;
}
if let proc_macro2::TokenTree::Literal(lit) = &tokens[0] {
let lit_str = lit
.to_string()
.trim_end_matches(|c: char| c.is_alphabetic())
.to_string();
return lit_str.parse::<usize>().unwrap_or(0);
}
0
}
pub fn parse_method_args(tokens: &proc_macro2::TokenStream) -> Vec<ComputedExpr> {
let tokens_vec: Vec<_> = tokens.clone().into_iter().collect();
if tokens_vec.is_empty() {
return vec![];
}
let mut args = vec![];
let mut current_start = 0;
let mut depth: usize = 0;
for (i, token) in tokens_vec.iter().enumerate() {
match token {
proc_macro2::TokenTree::Punct(p) if p.as_char() == ',' && depth == 0 => {
let arg_tokens: Vec<_> = tokens_vec[current_start..i].to_vec();
if !arg_tokens.is_empty() {
let (expr, _) = parse_expr(&arg_tokens, 0);
args.push(expr);
}
current_start = i + 1;
}
proc_macro2::TokenTree::Group(_) => {
depth += 1;
}
proc_macro2::TokenTree::Punct(p) if p.as_char() == ')' => {
depth = depth.saturating_sub(1);
}
_ => {}
}
}
let remaining: Vec<_> = tokens_vec[current_start..].to_vec();
if !remaining.is_empty() {
let (expr, _) = parse_expr(&remaining, 0);
args.push(expr);
}
args
}