use crate::ast::*;
use crate::errors::Diagnostic;
use crate::lexer::{Token, TokenKind};
pub fn parse(tokens: Vec<Token>, source: &str, filename: &str) -> Result<Program, Vec<Diagnostic>> {
let mut parser = Parser::new(tokens, source, filename);
parser.parse_program()
}
struct Parser<'a> {
tokens: Vec<Token>,
current: usize,
_source: &'a str,
filename: &'a str,
errors: Vec<Diagnostic>,
}
impl<'a> Parser<'a> {
fn new(tokens: Vec<Token>, source: &'a str, filename: &'a str) -> Self {
Self {
tokens,
current: 0,
_source: source,
filename,
errors: Vec::new(),
}
}
fn parse_program(&mut self) -> Result<Program, Vec<Diagnostic>> {
let mut statements = Vec::new();
while !self.is_at_end() {
match self.parse_statement() {
Ok(stmt) => statements.push(stmt),
Err(_) => {
self.synchronize();
}
}
}
if self.errors.is_empty() {
Ok(Program { statements })
} else {
Err(std::mem::take(&mut self.errors))
}
}
fn parse_statement(&mut self) -> Result<Spanned<Statement>, ()> {
let token = self.peek();
match &token.kind {
TokenKind::Import => self.parse_import(),
TokenKind::Val => self.parse_val_decl(),
TokenKind::Module => self.parse_module(),
TokenKind::Assert => self.parse_assert(),
TokenKind::Hcl => self.parse_hcl_block(),
TokenKind::Unsafe => self.parse_unsafe_stmt(),
_ => {
self.error(&format!("expected statement, found {}", token.kind));
Err(())
}
}
}
fn parse_import(&mut self) -> Result<Spanned<Statement>, ()> {
let start = self.expect(TokenKind::Import)?;
let path = self.expect_string("import path")?;
let alias = if self.check(&TokenKind::As) {
self.advance();
Some(self.expect_ident("import alias")?)
} else {
None
};
let span = start
.span
.merge(alias.as_ref().map(|a| a.span).unwrap_or(path.span));
Ok(Spanned::new(
Statement::Import(ImportStmt { path, alias }),
span,
))
}
fn parse_val_decl(&mut self) -> Result<Spanned<Statement>, ()> {
let start = self.expect(TokenKind::Val)?;
let name = self.expect_ident("variable name")?;
let type_ann = if self.check(&TokenKind::Colon) {
self.advance();
Some(self.parse_type()?)
} else {
None
};
self.expect(TokenKind::Eq)?;
let value = self.parse_expr()?;
let span = start.span.merge(value.span);
Ok(Spanned::new(
Statement::ValDecl(ValDecl {
name,
type_ann,
value,
}),
span,
))
}
fn parse_module(&mut self) -> Result<Spanned<Statement>, ()> {
let start = self.expect(TokenKind::Module)?;
let name = self.expect_ident("module name")?;
self.expect(TokenKind::LBrace)?;
let mut body = Vec::new();
while !self.check(&TokenKind::RBrace) && !self.is_at_end() {
body.push(self.parse_statement()?);
}
let end = self.expect(TokenKind::RBrace)?;
let span = start.span.merge(end.span);
Ok(Spanned::new(
Statement::Module(ModuleDecl { name, body }),
span,
))
}
fn parse_assert(&mut self) -> Result<Spanned<Statement>, ()> {
let start = self.expect(TokenKind::Assert)?;
self.expect(TokenKind::LParen)?;
let condition = self.parse_expr()?;
self.expect(TokenKind::Comma)?;
let message = self.expect_string("assertion message")?;
let end = self.expect(TokenKind::RParen)?;
let span = start.span.merge(end.span);
Ok(Spanned::new(
Statement::Assert(AssertStmt { condition, message }),
span,
))
}
fn parse_hcl_block(&mut self) -> Result<Spanned<Statement>, ()> {
let start = self.expect(TokenKind::Hcl)?;
self.expect(TokenKind::LParen)?;
let reason = self.expect_string("HCL block reason")?;
if reason.node.trim().is_empty() {
self.errors.push(
Diagnostic::error_at(
"HCL blocks require a non-empty justification",
reason.span,
self.filename,
)
.with_code(crate::errors::ErrorCode::UnexpectedToken)
.with_primary_label("justification cannot be empty")
.with_help("provide a reason: hcl(\"DynamoDB not yet supported\") { ... }"),
);
return Err(());
}
self.expect(TokenKind::RParen)?;
self.expect(TokenKind::LBrace)?;
let content_start = self.peek().span.start as usize;
let mut depth = 1;
while depth > 0 && !self.is_at_end() {
if self.check(&TokenKind::LBrace) {
depth += 1;
} else if self.check(&TokenKind::RBrace) {
depth -= 1;
if depth == 0 {
break;
}
}
self.advance();
}
let content_end = self.peek().span.start as usize;
let raw_content = self._source[content_start..content_end].to_string();
let end = self.expect(TokenKind::RBrace)?;
let span = start.span.merge(end.span);
let content_span = Span::new(
content_start as u32,
content_end as u32,
start.span.start_line, start.span.start_col, );
Ok(Spanned::new(
Statement::HclBlock(HclBlock {
reason,
content: Spanned::new(raw_content, content_span),
}),
span,
))
}
fn parse_unsafe_stmt(&mut self) -> Result<Spanned<Statement>, ()> {
let start = self.expect(TokenKind::Unsafe)?;
self.expect(TokenKind::LParen)?;
let reason = self.expect_string("unsafe reason")?;
self.expect(TokenKind::RParen)?;
self.expect(TokenKind::LBrace)?;
let mut body = Vec::new();
while !self.check(&TokenKind::RBrace) && !self.is_at_end() {
body.push(self.parse_statement()?);
}
let end = self.expect(TokenKind::RBrace)?;
let span = start.span.merge(end.span);
Ok(Spanned::new(
Statement::Unsafe(UnsafeStmt { reason, body }),
span,
))
}
fn parse_type(&mut self) -> Result<Spanned<TypeExpr>, ()> {
let name_token = self.expect_ident("type name")?;
let name = name_token.node.clone();
let mut span = name_token.span;
if self.check(&TokenKind::Lt) {
self.advance();
let mut args = Vec::new();
loop {
args.push(self.parse_type()?);
if self.check(&TokenKind::Comma) {
self.advance();
} else {
break;
}
}
let end = self.expect(TokenKind::Gt)?;
span = span.merge(end.span);
Ok(Spanned::new(TypeExpr::Generic { name, args }, span))
} else {
Ok(Spanned::new(TypeExpr::Named(name), span))
}
}
fn parse_expr(&mut self) -> Result<Spanned<Expr>, ()> {
self.parse_expr_bp(0)
}
#[allow(clippy::while_let_loop)]
fn parse_expr_bp(&mut self, min_bp: u8) -> Result<Spanned<Expr>, ()> {
if let Some(lambda) = self.try_parse_lambda()? {
return Ok(lambda);
}
let mut lhs = self.parse_unary_or_primary()?;
loop {
let op = match self.peek_binary_op() {
Some(op) => op,
None => break,
};
let (l_bp, r_bp) = Self::infix_binding_power(op);
if l_bp < min_bp {
break;
}
self.advance();
let rhs = self.parse_expr_bp(r_bp)?;
let span = lhs.span.merge(rhs.span);
lhs = Spanned::new(
Expr::Binary {
left: Box::new(lhs),
op,
right: Box::new(rhs),
},
span,
);
}
Ok(lhs)
}
fn try_parse_lambda(&mut self) -> Result<Option<Spanned<Expr>>, ()> {
if self.peek().is_ident() {
if let Some(next) = self.peek_next() {
if next.kind == TokenKind::Arrow {
let param = self.expect_ident("lambda parameter")?;
let start_span = param.span;
self.expect(TokenKind::Arrow)?;
let body = self.parse_expr()?;
let span = start_span.merge(body.span);
return Ok(Some(Spanned::new(
Expr::Lambda {
params: vec![param],
body: Box::new(body),
},
span,
)));
}
}
}
if self.check(&TokenKind::LParen) {
if self.is_lambda_params() {
let start = self.advance(); let mut params = Vec::new();
if !self.check(&TokenKind::RParen) {
loop {
params.push(self.expect_ident("lambda parameter")?);
if self.check(&TokenKind::Comma) {
self.advance();
} else {
break;
}
}
}
self.expect(TokenKind::RParen)?;
self.expect(TokenKind::Arrow)?;
let body = self.parse_expr()?;
let span = start.span.merge(body.span);
return Ok(Some(Spanned::new(
Expr::Lambda {
params,
body: Box::new(body),
},
span,
)));
}
}
Ok(None)
}
fn is_lambda_params(&self) -> bool {
let mut depth = 0;
let mut i = self.current;
while i < self.tokens.len() {
match &self.tokens[i].kind {
TokenKind::LParen => depth += 1,
TokenKind::RParen => {
depth -= 1;
if depth == 0 {
if i + 1 < self.tokens.len() {
return self.tokens[i + 1].kind == TokenKind::Arrow;
}
return false;
}
}
TokenKind::Ident(_) | TokenKind::Comma => {}
_ => return false, }
i += 1;
}
false
}
fn parse_unary_or_primary(&mut self) -> Result<Spanned<Expr>, ()> {
match &self.peek().kind {
TokenKind::Bang => {
let op_token = self.advance();
let operand = self.parse_unary_or_primary()?;
let span = op_token.span.merge(operand.span);
Ok(Spanned::new(
Expr::Unary {
op: UnaryOp::Not,
operand: Box::new(operand),
},
span,
))
}
TokenKind::Minus => {
let op_token = self.advance();
let operand = self.parse_unary_or_primary()?;
let span = op_token.span.merge(operand.span);
Ok(Spanned::new(
Expr::Unary {
op: UnaryOp::Neg,
operand: Box::new(operand),
},
span,
))
}
_ => self.parse_postfix_expr(),
}
}
fn parse_postfix_expr(&mut self) -> Result<Spanned<Expr>, ()> {
let mut expr = self.parse_atom()?;
loop {
if self.check(&TokenKind::Dot) {
self.advance();
let field = self.expect_ident("field name")?;
let span = expr.span.merge(field.span);
expr = Spanned::new(
Expr::MemberAccess {
object: Box::new(expr),
field,
},
span,
);
} else if self.check(&TokenKind::LParen) {
expr = self.parse_call(expr)?;
} else {
break;
}
}
Ok(expr)
}
fn peek_binary_op(&self) -> Option<BinaryOp> {
match &self.peek().kind {
TokenKind::Plus => Some(BinaryOp::Add),
TokenKind::Minus => Some(BinaryOp::Sub),
TokenKind::Star => Some(BinaryOp::Mul),
TokenKind::Slash => Some(BinaryOp::Div),
TokenKind::EqEq => Some(BinaryOp::Eq),
TokenKind::BangEq => Some(BinaryOp::NotEq),
TokenKind::Lt => Some(BinaryOp::Lt),
TokenKind::LtEq => Some(BinaryOp::LtEq),
TokenKind::Gt => Some(BinaryOp::Gt),
TokenKind::GtEq => Some(BinaryOp::GtEq),
TokenKind::And => Some(BinaryOp::And),
TokenKind::Or => Some(BinaryOp::Or),
_ => None,
}
}
fn infix_binding_power(op: BinaryOp) -> (u8, u8) {
match op {
BinaryOp::Or => (1, 2),
BinaryOp::And => (3, 4),
BinaryOp::Eq | BinaryOp::NotEq => (5, 6),
BinaryOp::Lt | BinaryOp::LtEq | BinaryOp::Gt | BinaryOp::GtEq => (7, 8),
BinaryOp::Add | BinaryOp::Sub => (9, 10),
BinaryOp::Mul | BinaryOp::Div => (11, 12),
}
}
fn parse_atom(&mut self) -> Result<Spanned<Expr>, ()> {
let token = self.peek().clone();
match &token.kind {
TokenKind::Unsafe => {
let start = self.advance();
self.expect(TokenKind::LParen)?;
let reason = self.expect_string("unsafe reason")?;
if reason.node.trim().is_empty() {
self.errors.push(
Diagnostic::error_at(
"unsafe blocks require a non-empty justification",
reason.span,
self.filename,
)
.with_code(crate::errors::ErrorCode::UnexpectedToken)
.with_primary_label("justification cannot be empty")
.with_help("provide a reason that explains why this security exception is needed:\n\n unsafe(\"TICKET-123: Public website approved by security team\") { ... }"),
);
return Err(());
}
self.expect(TokenKind::RParen)?;
self.expect(TokenKind::LBrace)?;
if self.check(&TokenKind::Val) {
let val_token = self.peek().clone();
self.errors.push(
Diagnostic::error_at(
"unsafe blocks must contain a single expression, not statements",
val_token.span,
self.filename,
)
.with_code(crate::errors::ErrorCode::UnexpectedToken)
.with_primary_label("`val` declarations are not allowed inside unsafe")
.with_help("move the val declaration outside and wrap only the risky expression:\n\n val x = unsafe(\"reason\") { riskyExpression }"),
);
return Err(());
}
let body = self.parse_expr()?;
if !self.check(&TokenKind::RBrace) {
let next = self.peek().clone();
self.errors.push(
Diagnostic::error_at(
"unsafe blocks can only contain one expression",
next.span,
self.filename,
)
.with_code(crate::errors::ErrorCode::UnexpectedToken)
.with_primary_label("unexpected content after expression")
.with_help("each security decision needs its own unsafe block:\n\n val a = unsafe(\"TICKET-1\") { expr1 }\n val b = unsafe(\"TICKET-2\") { expr2 }"),
);
return Err(());
}
let end = self.expect(TokenKind::RBrace)?;
let span = start.span.merge(end.span);
Ok(Spanned::new(
Expr::Unsafe {
reason,
body: Box::new(body),
},
span,
))
}
TokenKind::If => {
let start = self.advance(); let condition = self.parse_expr()?;
let then_branch = if self.check(&TokenKind::Then) {
self.advance(); self.parse_expr()?
} else if self.check(&TokenKind::LBrace) {
self.advance(); let body = self.parse_expr()?;
self.expect(TokenKind::RBrace)?;
body
} else {
self.error("expected 'then' or '{' after if condition");
return Err(());
};
self.expect(TokenKind::Else)?;
let else_branch = if self.check(&TokenKind::LBrace) {
self.advance(); let body = self.parse_expr()?;
self.expect(TokenKind::RBrace)?;
body
} else {
self.parse_expr()?
};
let span = start.span.merge(else_branch.span);
Ok(Spanned::new(
Expr::If {
condition: Box::new(condition),
then_branch: Box::new(then_branch),
else_branch: Box::new(else_branch),
},
span,
))
}
TokenKind::String(_) | TokenKind::Number(_) | TokenKind::True | TokenKind::False => {
let token = self.advance();
match &token.kind {
TokenKind::String(s) => Ok(Spanned::new(
Expr::Literal(Literal::String(s.clone())),
token.span,
)),
TokenKind::Number(n) => {
Ok(Spanned::new(Expr::Literal(Literal::Number(*n)), token.span))
}
TokenKind::True => {
Ok(Spanned::new(Expr::Literal(Literal::Bool(true)), token.span))
}
TokenKind::False => Ok(Spanned::new(
Expr::Literal(Literal::Bool(false)),
token.span,
)),
_ => unreachable!(),
}
}
TokenKind::Ident(_) => {
let token = self.advance();
if let TokenKind::Ident(name) = &token.kind {
Ok(Spanned::new(Expr::Identifier(name.clone()), token.span))
} else {
unreachable!()
}
}
TokenKind::LBracket => {
let token = self.advance();
let start_span = token.span;
let mut elements = Vec::new();
if !self.check(&TokenKind::RBracket) {
loop {
elements.push(self.parse_expr()?);
if self.check(&TokenKind::Comma) {
self.advance();
if self.check(&TokenKind::RBracket) {
break;
}
} else {
break;
}
}
}
let end = self.expect(TokenKind::RBracket)?;
Ok(Spanned::new(
Expr::List(elements),
start_span.merge(end.span),
))
}
TokenKind::LBrace => {
let token = self.advance();
let start_span = token.span;
let mut fields = Vec::new();
if !self.check(&TokenKind::RBrace) {
loop {
let name = self.expect_ident("field name")?;
self.expect(TokenKind::Colon)?;
let value = self.parse_expr()?;
fields.push(RecordField { name, value });
if self.check(&TokenKind::Comma) {
self.advance();
if self.check(&TokenKind::RBrace) {
break;
}
} else {
break;
}
}
}
let end = self.expect(TokenKind::RBrace)?;
Ok(Spanned::new(
Expr::Record(fields),
start_span.merge(end.span),
))
}
TokenKind::LParen => {
self.advance();
let expr = self.parse_expr()?;
self.expect(TokenKind::RParen)?;
Ok(expr)
}
_ => {
self.error(&format!("expected expression, found {}", token.kind));
Err(())
}
}
}
fn parse_call(&mut self, callee: Spanned<Expr>) -> Result<Spanned<Expr>, ()> {
self.expect(TokenKind::LParen)?;
let mut args = Vec::new();
if !self.check(&TokenKind::RParen) {
loop {
let arg = if self.peek().is_ident()
&& self.peek_next().is_some_and(|t| t.kind == TokenKind::Colon)
{
let name = self.expect_ident("argument name")?;
self.expect(TokenKind::Colon)?;
let value = self.parse_expr()?;
Arg {
name: Some(name),
value,
}
} else {
let value = self.parse_expr()?;
Arg { name: None, value }
};
args.push(arg);
if self.check(&TokenKind::Comma) {
self.advance();
if self.check(&TokenKind::RParen) {
break;
}
} else {
break;
}
}
}
let end = self.expect(TokenKind::RParen)?;
let span = callee.span.merge(end.span);
Ok(Spanned::new(
Expr::FuncCall {
callee: Box::new(callee),
args,
},
span,
))
}
fn peek(&self) -> &Token {
&self.tokens[self.current]
}
fn peek_next(&self) -> Option<&Token> {
self.tokens.get(self.current + 1)
}
fn is_at_end(&self) -> bool {
self.peek().kind == TokenKind::Eof
}
fn advance(&mut self) -> Token {
if !self.is_at_end() {
self.current += 1;
}
self.tokens[self.current - 1].clone()
}
fn check(&self, kind: &TokenKind) -> bool {
if self.is_at_end() {
false
} else {
std::mem::discriminant(&self.peek().kind) == std::mem::discriminant(kind)
}
}
fn expect(&mut self, kind: TokenKind) -> Result<Token, ()> {
if self.check(&kind) {
Ok(self.advance())
} else {
self.error(&format!(
"expected {}, found {}",
kind.name(),
self.peek().kind
));
Err(())
}
}
fn expect_ident(&mut self, context: &str) -> Result<Spanned<String>, ()> {
let token = self.advance();
match token.kind {
TokenKind::Ident(name) => Ok(Spanned::new(name, token.span)),
_ => {
self.error(&format!(
"expected {} (identifier), found {}",
context, token.kind
));
Err(())
}
}
}
fn expect_string(&mut self, context: &str) -> Result<Spanned<String>, ()> {
let token = self.advance();
match token.kind {
TokenKind::String(s) => Ok(Spanned::new(s, token.span)),
_ => {
self.error(&format!(
"expected {} (string), found {}",
context, token.kind
));
Err(())
}
}
}
fn error(&mut self, message: &str) {
let token = self.peek();
self.errors.push(Diagnostic::error_at(
message.to_string(),
token.span,
self.filename.to_string(),
));
}
fn synchronize(&mut self) {
self.advance();
while !self.is_at_end() {
match self.peek().kind {
TokenKind::Val
| TokenKind::Import
| TokenKind::Unsafe
| TokenKind::Module
| TokenKind::Assert => return,
_ => {
self.advance();
}
}
}
}
}
#[cfg(test)]
mod tests {
use super::*;
use crate::lexer::tokenize;
fn parse_str(source: &str) -> Result<Program, Vec<Diagnostic>> {
let tokens = tokenize(source, "test.hk").unwrap();
parse(tokens, source, "test.hk")
}
#[test]
fn test_parse_val_decl() {
let program = parse_str("val x = 42").unwrap();
assert_eq!(program.statements.len(), 1);
match &program.statements[0].node {
Statement::ValDecl(v) => {
assert_eq!(v.name.node, "x");
}
_ => panic!("expected ValDecl"),
}
}
#[test]
fn test_parse_import() {
let program = parse_str(r#"import "legacy.tf" as legacy"#).unwrap();
assert_eq!(program.statements.len(), 1);
match &program.statements[0].node {
Statement::Import(i) => {
assert_eq!(i.path.node, "legacy.tf");
assert_eq!(i.alias.as_ref().unwrap().node, "legacy");
}
_ => panic!("expected Import"),
}
}
#[test]
fn test_parse_member_access() {
let program = parse_str("val bucket = S3.createBucket(\"data\")").unwrap();
assert_eq!(program.statements.len(), 1);
}
#[test]
fn test_parse_unsafe_expr() {
let source = r#"val x = unsafe("ticket #123") { legacy.bucket }"#;
let program = parse_str(source).unwrap();
assert_eq!(program.statements.len(), 1);
match &program.statements[0].node {
Statement::ValDecl(decl) => match &decl.value.node {
Expr::Unsafe { reason, .. } => {
assert_eq!(reason.node, "ticket #123");
}
_ => panic!("expected Unsafe expression"),
},
_ => panic!("expected ValDecl"),
}
}
#[test]
fn test_parse_list() {
let program = parse_str(r#"val zones = ["a", "b", "c"]"#).unwrap();
assert_eq!(program.statements.len(), 1);
}
#[test]
fn test_parse_lambda() {
let source = r#"val f = x => x + 1"#;
let program = parse_str(source).unwrap();
match &program.statements[0].node {
Statement::ValDecl(decl) => match &decl.value.node {
Expr::Lambda { params, .. } => {
assert_eq!(params.len(), 1);
assert_eq!(params[0].node, "x");
}
_ => panic!("expected Lambda"),
},
_ => panic!("expected ValDecl"),
}
}
#[test]
fn test_parse_lambda_in_call() {
let source = r#"val subnets = zones.map(z => Network.createSubnet(zone: z))"#;
let program = parse_str(source).unwrap();
assert_eq!(program.statements.len(), 1);
match &program.statements[0].node {
Statement::ValDecl(decl) => match &decl.value.node {
Expr::FuncCall { args, .. } => {
assert_eq!(args.len(), 1);
match &args[0].value.node {
Expr::Lambda { params, .. } => {
assert_eq!(params[0].node, "z");
}
_ => panic!("expected Lambda argument"),
}
}
_ => panic!("expected FuncCall"),
},
_ => panic!("expected ValDecl"),
}
}
#[test]
fn test_parse_binary_expr() {
let source = "val x = 1 + 2 * 3";
let program = parse_str(source).unwrap();
match &program.statements[0].node {
Statement::ValDecl(decl) => {
match &decl.value.node {
Expr::Binary { op, .. } => {
assert_eq!(*op, BinaryOp::Add);
}
_ => panic!("expected Binary"),
}
}
_ => panic!("expected ValDecl"),
}
}
#[test]
fn test_parse_comparison() {
let source = "val cond = x == 5 && y < 10";
let program = parse_str(source).unwrap();
assert_eq!(program.statements.len(), 1);
}
#[test]
fn test_parse_unary() {
let source = "val neg = -x";
let program = parse_str(source).unwrap();
match &program.statements[0].node {
Statement::ValDecl(decl) => match &decl.value.node {
Expr::Unary { op, .. } => {
assert_eq!(*op, UnaryOp::Neg);
}
_ => panic!("expected Unary"),
},
_ => panic!("expected ValDecl"),
}
}
#[test]
fn test_parse_filter_with_lambda() {
let source = r#"val public = buckets.filter(b => b.isPublic)"#;
let program = parse_str(source).unwrap();
assert_eq!(program.statements.len(), 1);
}
#[test]
fn test_parse_multi_param_lambda() {
let source = r#"val sum = (a, b) => a + b"#;
let program = parse_str(source).unwrap();
match &program.statements[0].node {
Statement::ValDecl(decl) => match &decl.value.node {
Expr::Lambda { params, .. } => {
assert_eq!(params.len(), 2);
assert_eq!(params[0].node, "a");
assert_eq!(params[1].node, "b");
}
_ => panic!("expected Lambda"),
},
_ => panic!("expected ValDecl"),
}
}
#[test]
fn test_parse_hcl_block() {
let source = r#"hcl("legacy") { resource "foo" "bar" {} }"#;
let program = parse_str(source).unwrap();
match &program.statements[0].node {
Statement::HclBlock(hcl) => {
assert_eq!(hcl.reason.node, "legacy");
assert!(hcl.content.node.contains("resource \"foo\" \"bar\""));
}
_ => panic!("expected HclBlock"),
}
}
#[test]
fn test_parse_unsafe_stmt() {
let source = r#"unsafe("bypass") { hcl("inner") { } }"#;
let program = parse_str(source).unwrap();
match &program.statements[0].node {
Statement::Unsafe(u) => {
assert_eq!(u.reason.node, "bypass");
assert_eq!(u.body.len(), 1);
match &u.body[0].node {
Statement::HclBlock(hcl) => {
assert_eq!(hcl.reason.node, "inner");
}
_ => panic!("expected HclBlock inside Unsafe"),
}
}
_ => panic!("expected Unsafe statement"),
}
}
}