use crate::encoding::Encoding;
use crate::{TermLexer, TermToken, TokenID, Value};
use arena_terms::{Arena, Assoc, Fixity, MAX_OPER_PREC, MIN_OPER_PREC, Term, View};
use parlex::{
LexerStats, ParlexError, Parser, ParserAction, ParserData, ParserDriver, ParserStats, Token,
};
use parser_data::{AmbigID, ParData, ProdID, StateID};
use std::marker::PhantomData;
use try_next::TryNextWithContext;
pub mod parser_data {
include!(concat!(env!("OUT_DIR"), "/parser_data.rs"));
}
pub struct TermParserDriver<I> {
_marker: PhantomData<I>,
terms: Vec<Term>,
}
impl<I> ParserDriver for TermParserDriver<I>
where
I: TryNextWithContext<Arena, LexerStats, Item = TermToken, Error: std::fmt::Display + 'static>,
{
type ParserData = ParData;
type Token = TermToken;
type Parser = Parser<I, Self, Self::Context>;
type Context = Arena;
fn resolve_ambiguity(
&mut self,
parser: &mut Self::Parser,
arena: &mut Self::Context,
ambig: <Self::ParserData as ParserData>::AmbigID,
tok2: &Self::Token,
) -> Result<ParserAction<StateID, ProdID, AmbigID>, ParlexError> {
let ambigs = ParData::lookup_ambig(ambig);
let shift_action = ambigs[0];
let ParserAction::Shift(_) = shift_action else {
panic!("expected shift");
};
let reduce_action = ambigs[1];
let ParserAction::Reduce(prod_id) = reduce_action else {
panic!("expected reduce");
};
log::trace!(
"Conflict between reducing {:?} and shifting {:?}",
prod_id,
tok2
);
let (fixity1, tok1) = match prod_id {
ProdID::Infix1 => {
(Fixity::Infix, parser.tokens_peek(1))
}
ProdID::Infix2 => {
(Fixity::Infix, parser.tokens_peek(3))
}
ProdID::Prefix1 => {
(Fixity::Prefix, parser.tokens_peek(1))
}
ProdID::Prefix2 => {
(Fixity::Prefix, parser.tokens_peek(3))
}
ProdID::Postfix1 => {
(Fixity::Postfix, parser.tokens_peek(0))
}
ProdID::Postfix2 => {
(Fixity::Postfix, parser.tokens_peek(2))
}
_ => {
return Err(ParlexError {
message: format!(
"unexpected conflict: reduction of {:?} with shifting token {:?}",
prod_id, tok2
),
span: tok2.span(),
});
}
};
let op_tab1 = arena.get_oper(tok1.op_tab_index);
let op_tab2 = arena.get_oper(tok2.op_tab_index);
assert!(op_tab1.is_oper());
if op_tab2.is_oper() {
let op_def1 = match op_tab1[fixity1] {
Some(ref op_def1) => op_def1,
None => return Ok(shift_action),
};
let prec1 = op_def1.prec;
let assoc1 = op_def1.assoc;
let min_prec2 = std::cmp::min(
op_tab2[Fixity::Infix]
.as_ref()
.map(|x| x.prec)
.unwrap_or(MAX_OPER_PREC),
op_tab2[Fixity::Postfix]
.as_ref()
.map(|x| x.prec)
.unwrap_or(MAX_OPER_PREC),
);
let max_prec2 = std::cmp::max(
op_tab2[Fixity::Infix]
.as_ref()
.map(|x| x.prec)
.unwrap_or(MIN_OPER_PREC),
op_tab2[Fixity::Postfix]
.as_ref()
.map(|x| x.prec)
.unwrap_or(MIN_OPER_PREC),
);
if prec1 > min_prec2 {
Ok(reduce_action)
} else if prec1 < max_prec2 {
Ok(shift_action)
} else if min_prec2 == max_prec2 && prec1 == min_prec2 {
if assoc1 == Assoc::None {
return Err(ParlexError {
message: format!(
"precedence conflict: cannot chain non-associative operator {:?}; use parenthesis",
tok1
),
span: tok2.span(),
});
}
if op_tab2[Fixity::Infix]
.as_ref()
.is_some_and(|x| x.assoc == Assoc::None)
|| op_tab2[Fixity::Postfix]
.as_ref()
.is_some_and(|x| x.assoc == Assoc::None)
{
return Err(ParlexError {
message: format!(
"precedence conflict: cannot chain non-associative operator {:?}; use parenthesis",
tok2
),
span: tok2.span(),
});
}
if op_tab2[Fixity::Infix]
.as_ref()
.is_some_and(|x| x.assoc != assoc1)
|| op_tab2[Fixity::Postfix]
.as_ref()
.is_some_and(|x| x.assoc != assoc1)
{
return Err(ParlexError {
message: format!(
"associativity conflict: cannot chain operators {:?} and {:?}; use parenthesis",
tok1, tok2
),
span: tok2.span(),
});
} else {
if assoc1 == Assoc::Left {
Ok(reduce_action)
} else {
Ok(shift_action)
}
}
} else {
return Err(ParlexError {
message: format!(
"precedence conflict: cannot chain operators {:?} and {:?}; use parenthesis",
tok1, tok2
),
span: tok2.span(),
});
}
} else {
Ok(shift_action)
}
}
fn reduce(
&mut self,
parser: &mut Self::Parser,
arena: &mut Self::Context,
prod_id: <Self::ParserData as ParserData>::ProdID,
token: &Self::Token,
) -> Result<(), ParlexError> {
match prod_id {
ProdID::Start => {
unreachable!()
}
ProdID::Term1 => {
let mut expr_tok = parser.tokens_pop();
expr_tok.token_id = TokenID::Term;
parser.tokens_push(expr_tok);
}
ProdID::Term2 => {
let dot = parser.tokens_pop();
let mut expr_tok = parser.tokens_pop();
expr_tok.token_id = TokenID::Term;
expr_tok.merge_span(&dot);
parser.tokens_push(expr_tok);
}
ProdID::Term3 => {
parser.tokens_push(TermToken::new(TokenID::Term, Value::None, token.span()));
}
ProdID::Term4 => {
let dot = parser.tokens_pop();
parser.tokens_push(TermToken::new(TokenID::Term, Value::None, dot.span()));
}
ProdID::Func => {
let right_paren = parser.tokens_pop();
let index = usize::try_from(parser.tokens_pop().value)?;
let mut func_tok = parser.tokens_pop();
func_tok.merge_span(&right_paren);
let span = func_tok.span();
let op_tab_index = func_tok.op_tab_index;
let functor = Term::try_from(func_tok.value)?;
let vs = std::iter::once(&functor).chain(self.terms[index..].iter());
let term = arena
.funcv(vs)
.map_err(|e| ParlexError::from_err(e, span))?;
self.terms.truncate(index);
let term = arena
.normalize_term(term, Fixity::Fun, op_tab_index)
.map_err(|e| ParlexError::from_err(e, span))?;
parser.tokens_push(TermToken::new(TokenID::Expr, Value::Term(term), span));
}
ProdID::List => {
let right_brack_tok = parser.tokens_pop();
let seq_tok = parser.tokens_pop();
let mut left_brack_tok = parser.tokens_pop();
left_brack_tok.merge_span(&right_brack_tok);
let index = usize::try_from(seq_tok.value)?;
let term = arena.list(&self.terms[index..]);
self.terms.truncate(index);
parser.tokens_push(TermToken::new(
TokenID::Expr,
Value::Term(term),
left_brack_tok.span(),
));
}
ProdID::Nil => {
let right_brack_tok = parser.tokens_pop();
let mut left_brack_tok = parser.tokens_pop();
left_brack_tok.merge_span(&right_brack_tok);
parser.tokens_push(TermToken::new(
TokenID::Expr,
Value::Term(Term::NIL),
left_brack_tok.span(),
));
}
ProdID::List2 => {
let right_brack_tok = parser.tokens_pop();
let tail = Term::try_from(parser.tokens_pop().value)?;
parser.tokens_pop();
let index = usize::try_from(parser.tokens_pop().value)?;
let mut left_brack_tok = parser.tokens_pop();
left_brack_tok.merge_span(&right_brack_tok);
let term = arena.listc(&self.terms[index..], tail);
self.terms.truncate(index);
parser.tokens_push(TermToken::new(
TokenID::Expr,
Value::Term(term),
left_brack_tok.span(),
));
}
ProdID::Tuple => {
let right_paren_tok = parser.tokens_pop();
let seq_tok = parser.tokens_pop();
let mut left_paren_tok = parser.tokens_pop();
left_paren_tok.merge_span(&right_paren_tok);
let index = usize::try_from(seq_tok.value)?;
let vs = &self.terms[index..];
let term = if vs.len() == 1 {
vs[0]
} else {
arena.tuple(vs)
};
self.terms.truncate(index);
parser.tokens_push(TermToken::new(
TokenID::Expr,
Value::Term(term),
left_paren_tok.span(),
));
}
ProdID::Unit => {
let right_paren_tok = parser.tokens_pop();
let mut left_paren_tok = parser.tokens_pop();
left_paren_tok.merge_span(&right_paren_tok);
parser.tokens_push(TermToken::new(
TokenID::Expr,
Value::Term(Term::UNIT),
left_paren_tok.span(),
));
}
ProdID::Var | ProdID::Int | ProdID::Real | ProdID::Date | ProdID::Str | ProdID::Bin => {
let mut tok = parser.tokens_pop();
tok.token_id = TokenID::Expr;
parser.tokens_push(tok);
}
ProdID::Atom => {
let atom_tok = parser.tokens_pop();
let span = atom_tok.span();
let op_tab_index = atom_tok.op_tab_index;
let atom = Term::try_from(atom_tok.value)?;
let term = arena
.normalize_term(atom, Fixity::Fun, op_tab_index)
.map_err(|e| ParlexError::from_err(e, span))?;
parser.tokens_push(TermToken::new(TokenID::Expr, Value::Term(term), span));
}
ProdID::Infix1 => {
let expr2_tok = parser.tokens_pop();
let oper_tok = parser.tokens_pop();
let mut expr1_tok = parser.tokens_pop();
expr1_tok.merge_span(&expr2_tok);
let span = expr1_tok.span();
let op_tab_index = oper_tok.op_tab_index;
let expr2 = Term::try_from(expr2_tok.value)?;
let oper = Term::try_from(oper_tok.value)?;
let expr1 = Term::try_from(expr1_tok.value)?;
let term = arena
.funcv([oper, expr1, expr2])
.map_err(|e| ParlexError::from_err(e, span))?;
let term = arena
.normalize_term(term, Fixity::Infix, op_tab_index)
.map_err(|e| ParlexError::from_err(e, span))?;
parser.tokens_push(TermToken::new(TokenID::Expr, Value::Term(term), span));
}
ProdID::Infix2 => {
let expr2_tok = parser.tokens_pop();
parser.tokens_pop();
let index = usize::try_from(parser.tokens_pop().value)?;
let oper_tok = parser.tokens_pop();
let mut expr1_tok = parser.tokens_pop();
expr1_tok.merge_span(&expr2_tok);
let span = expr1_tok.span();
let op_tab_index = oper_tok.op_tab_index;
let expr2 = Term::try_from(expr2_tok.value)?;
let oper = Term::try_from(oper_tok.value)?;
let expr1 = Term::try_from(expr1_tok.value)?;
let xs = [oper, expr1, expr2];
let vs = xs.iter().chain(self.terms[index..].iter());
let term = arena
.funcv(vs)
.map_err(|e| ParlexError::from_err(e, span))?;
self.terms.truncate(index);
let term = arena
.normalize_term(term, Fixity::Infix, op_tab_index)
.map_err(|e| ParlexError::from_err(e, span))?;
parser.tokens_push(TermToken::new(TokenID::Expr, Value::Term(term), span));
}
ProdID::Prefix1 => {
let expr1_tok = parser.tokens_pop();
let mut oper_tok = parser.tokens_pop();
oper_tok.merge_span(&expr1_tok);
let span = oper_tok.span();
let op_tab_index = oper_tok.op_tab_index;
let expr1 = Term::try_from(expr1_tok.value)?;
let oper = Term::try_from(oper_tok.value)?;
let term = match oper
.view(arena)
.map_err(|e| ParlexError::from_err(e, span))?
{
View::Atom(s)
if s == "-"
&& matches!(
expr1
.view(arena)
.map_err(|e| ParlexError::from_err(e, span))?,
View::Int(_) | View::Real(_)
) =>
{
match expr1
.view(arena)
.map_err(|e| ParlexError::from_err(e, span))?
{
View::Int(i) => arena.int(-i),
View::Real(r) => arena.real(-r),
_ => unreachable!(),
}
}
_ => {
let term = arena
.funcv([oper, expr1])
.map_err(|e| ParlexError::from_err(e, span))?;
arena
.normalize_term(term, Fixity::Prefix, op_tab_index)
.map_err(|e| ParlexError::from_err(e, span))?
}
};
parser.tokens_push(TermToken::new(TokenID::Expr, Value::Term(term), span));
}
ProdID::Prefix2 => {
let expr1_tok = parser.tokens_pop();
parser.tokens_pop();
let index = usize::try_from(parser.tokens_pop().value)?;
let mut oper_tok = parser.tokens_pop();
oper_tok.merge_span(&expr1_tok);
let span = oper_tok.span();
let op_tab_index = oper_tok.op_tab_index;
let oper = Term::try_from(oper_tok.value)?;
let expr1 = Term::try_from(expr1_tok.value)?;
let xs = [oper, expr1];
let vs = xs.iter().chain(self.terms[index..].iter());
let term = arena
.funcv(vs)
.map_err(|e| ParlexError::from_err(e, span))?;
self.terms.truncate(index);
let term = arena
.normalize_term(term, Fixity::Prefix, op_tab_index)
.map_err(|e| ParlexError::from_err(e, span))?;
parser.tokens_push(TermToken::new(TokenID::Expr, Value::Term(term), span));
}
ProdID::Postfix1 => {
let oper_tok = parser.tokens_pop();
let mut expr1_tok = parser.tokens_pop();
expr1_tok.merge_span(&oper_tok);
let span = expr1_tok.span();
let op_tab_index = oper_tok.op_tab_index;
let oper = Term::try_from(oper_tok.value)?;
let expr1 = Term::try_from(expr1_tok.value)?;
let term = arena
.funcv([oper, expr1])
.map_err(|e| ParlexError::from_err(e, span))?;
let term = arena
.normalize_term(term, Fixity::Postfix, op_tab_index)
.map_err(|e| ParlexError::from_err(e, span))?;
parser.tokens_push(TermToken::new(TokenID::Expr, Value::Term(term), span));
}
ProdID::Postfix2 => {
let right_paren_tok = parser.tokens_pop();
let index = usize::try_from(parser.tokens_pop().value)?;
let oper_tok = parser.tokens_pop();
let mut expr1_tok = parser.tokens_pop();
expr1_tok.merge_span(&right_paren_tok);
let span = expr1_tok.span();
let op_tab_index = oper_tok.op_tab_index;
let oper = Term::try_from(oper_tok.value)?;
let expr1 = Term::try_from(expr1_tok.value)?;
let xs = [oper, expr1];
let vs = xs.iter().chain(self.terms[index..].iter());
let term = arena
.funcv(vs)
.map_err(|e| ParlexError::from_err(e, span))?;
self.terms.truncate(index);
let term = arena
.normalize_term(term, Fixity::Postfix, op_tab_index)
.map_err(|e| ParlexError::from_err(e, span))?;
parser.tokens_push(TermToken::new(TokenID::Expr, Value::Term(term), span));
}
ProdID::Seq1 => {
let mut bare_seq_tok = parser.tokens_pop();
bare_seq_tok.token_id = TokenID::Seq;
parser.tokens_push(bare_seq_tok);
}
ProdID::Seq2 => {
parser.tokens_pop();
let mut bare_seq_tok = parser.tokens_pop();
bare_seq_tok.token_id = TokenID::Seq;
parser.tokens_push(bare_seq_tok);
}
ProdID::BareSeq1 => {
let expr_tok = parser.tokens_pop();
let span = expr_tok.span();
let expr = Term::try_from(expr_tok.value)?;
let index = self.terms.len();
self.terms.push(expr);
parser.tokens_push(TermToken::new(TokenID::BareSeq, Value::Index(index), span));
}
ProdID::BareSeq2 => {
let expr_tok = parser.tokens_pop();
let expr = Term::try_from(expr_tok.value)?;
parser.tokens_pop();
self.terms.push(expr);
}
}
Ok(())
}
}
pub struct TermTokenParser<I>
where
I: TryNextWithContext<Arena, Item = u8, Error: std::fmt::Display + 'static>,
{
parser: Parser<TermLexer<I>, TermParserDriver<TermLexer<I>>, Arena>,
}
impl<I> TermTokenParser<I>
where
I: TryNextWithContext<Arena, Item = u8, Error: std::fmt::Display + 'static>,
{
pub fn try_new(input: I, encoding: Encoding) -> Result<Self, ParlexError> {
let lexer = TermLexer::try_new(input, encoding)?;
let driver = TermParserDriver {
_marker: PhantomData,
terms: Vec::new(),
};
let parser = Parser::new(lexer, driver);
Ok(Self { parser })
}
}
pub fn define_opers<I>(arena: &mut Arena, defs_input: I, encoding: Encoding) -> Result<(), ParlexError>
where
I: TryNextWithContext<Arena, Item = u8, Error: std::fmt::Display + 'static>,
{
let mut defs_parser = TermParser::try_new(defs_input, encoding)?;
while let Some(term) = defs_parser.try_next_with_context(arena)? {
arena
.define_opers(term)
.map_err(|e| ParlexError::from_err(e, None))?;
}
Ok(())
}
impl<I> TryNextWithContext<Arena, (LexerStats, ParserStats)> for TermTokenParser<I>
where
I: TryNextWithContext<Arena, Item = u8, Error: std::fmt::Display + 'static>,
{
type Item = TermToken;
type Error = ParlexError;
fn try_next_with_context(
&mut self,
context: &mut Arena,
) -> Result<Option<TermToken>, ParlexError> {
self.parser.try_next_with_context(context)
}
fn stats(&self) -> (LexerStats, ParserStats) {
self.parser.stats()
}
}
pub struct TermParser<I>
where
I: TryNextWithContext<Arena, Item = u8, Error: std::fmt::Display + 'static>,
{
pub(crate) parser: TermTokenParser<I>,
}
impl<I> TermParser<I>
where
I: TryNextWithContext<Arena, Item = u8, Error: std::fmt::Display + 'static>,
{
pub fn try_new(input: I, encoding: Encoding) -> Result<Self, ParlexError> {
let parser: TermTokenParser<I> = TermTokenParser::try_new(input, encoding)?;
Ok(Self { parser })
}
}
impl<I> TryNextWithContext<Arena, (LexerStats, ParserStats)> for TermParser<I>
where
I: TryNextWithContext<Arena, Item = u8, Error: std::fmt::Display + 'static>,
{
type Item = Term;
type Error = ParlexError;
fn try_next_with_context(&mut self, context: &mut Arena) -> Result<Option<Term>, ParlexError> {
while let Some(TermToken { value, .. }) = self.parser.try_next_with_context(context)? {
match value {
Value::Term(term) => return Ok(Some(term)),
Value::None => continue,
Value::Index(_) => {
return Err(ParlexError {
message: format!("index token not expected"),
span: None,
});
}
}
}
Ok(None)
}
fn stats(&self) -> (LexerStats, ParserStats) {
self.parser.stats()
}
}
#[cfg(test)]
mod tests {
use super::*;
use try_next::IterInput;
const SAMPLE_DEFS: &str = r#"[
op(==(x,y),infix,350,none),
op(!=(x,y),infix,350,none),
op( <(x,y),infix,350,none),
op( >(x,y),infix,350,none),
op(<=(x,y),infix,350,none),
op(>=(x,y),infix,350,none),
op('+'(x,y),infix,380,left),
op('-'(x,y),infix,380,left),
op('-'(x),postfix,900,left, rename_to=some('postfix_minus')),
op('*'(x,y),infix,400,left),
op('/'(x,y),infix,400,left),
op('+'(x),prefix,800,right),
op(and(x,y),infix,300,left),
op(or(x,y),infix,250,left),
op(not(x),prefix,800,right),
]"#;
fn parse(arena: &mut Arena, defs: Option<&str>, s: &str) -> Vec<Term> {
let input = IterInput::from(s.bytes());
let mut parser = TermParser::try_new(input, Encoding::Utf8).expect("cannot create parser");
if let Some(defs) = defs {
let defs_input = IterInput::from(defs.bytes());
define_opers(arena, defs_input, Encoding::Utf8).expect("cannot define ops");
}
let ts = parser
.try_collect_with_context(arena)
.expect("parser error");
dbg!(parser.stats());
ts
}
#[test]
fn one_term() {
let _ = env_logger::builder().is_test(true).try_init();
let arena = &mut Arena::try_with_default_opers().unwrap();
let ts = parse(arena, Some(SAMPLE_DEFS), " . . 2 * 2 <= 5 . .");
dbg!(&ts);
let s = format!("{}", ts[0].display(arena));
dbg!(&s);
assert_eq!(ts.len(), 1);
assert_eq!(s, "'<='('*'(2, 2), 5)");
}
#[test]
fn string_interpolation_outer_paren_isolation() {
let _ = env_logger::builder().is_test(true).try_init();
let arena = &mut Arena::try_with_default_opers().unwrap();
let ts = parse(arena, Some(SAMPLE_DEFS), r#""a{xx}b" + 1 ."#);
assert_eq!(ts.len(), 1);
let s = format!("{}", ts[0].display(arena));
assert_eq!(s, r#"'+'('++'('++'("a", xx), "b"), 1)"#);
}
#[test]
fn bare_string_unwraps_to_plain_string() {
let _ = env_logger::builder().is_test(true).try_init();
let arena = &mut Arena::try_with_default_opers().unwrap();
let ts = parse(arena, None, r#""hello" ."#);
assert_eq!(ts.len(), 1);
assert_eq!(format!("{}", ts[0].display(arena)), r#""hello""#);
}
#[test]
fn bare_strings_as_func_args() {
let _ = env_logger::builder().is_test(true).try_init();
let arena = &mut Arena::try_with_default_opers().unwrap();
let ts = parse(arena, None, r#"foo("hello", "world") ."#);
assert_eq!(ts.len(), 1);
assert_eq!(
format!("{}", ts[0].display(arena)),
r#"foo("hello", "world")"#
);
}
#[test]
fn prefix_op_on_interpolated_string() {
let _ = env_logger::builder().is_test(true).try_init();
let arena = &mut Arena::try_with_default_opers().unwrap();
let ts = parse(arena, Some(SAMPLE_DEFS), r#"- "a{xx}b" ."#);
assert_eq!(ts.len(), 1);
let s = format!("{}", ts[0].display(arena));
assert_eq!(s, r#"'-'('++'('++'("a", xx), "b"))"#);
}
#[test]
fn prefix_op_on_bare_string() {
let _ = env_logger::builder().is_test(true).try_init();
let arena = &mut Arena::try_with_default_opers().unwrap();
let ts = parse(arena, None, r#"- "hello" ."#);
assert_eq!(ts.len(), 1);
assert_eq!(format!("{}", ts[0].display(arena)), r#"'-'("hello")"#);
}
#[test]
#[should_panic]
fn missing_ops() {
let arena = &mut Arena::try_with_default_opers().unwrap();
let _ts = parse(arena, None, "2 * 2 <= 5");
}
#[test]
fn more_complicated_term() {
let _ = env_logger::builder().is_test(true).try_init();
let arena = &mut Arena::try_with_default_opers().unwrap();
let x = "(
[(1, 2) | unit] ++ foo(baz(1e-9)),
date{2025-09-30T18:24:22.154Z},
\"aaa{
1 + 2
}bbb{
3 * 4
}ccc\",
{player = {pos = {x = 0, y = 0}, health = 100}},
)";
let ts = parse(arena, Some(SAMPLE_DEFS), x);
let s = format!("{}", ts[0].display(arena));
assert_eq!(ts.len(), 1);
assert_eq!(
s,
"('++'([(1, 2) | unit], foo(baz(0.000000001))), date{2025-09-30T18:24:22.154+00:00}, '++'('++'('++'('++'(\"aaa\", '+'(1, 2)), \"bbb\"), '*'(3, 4)), \"ccc\"), \"player = \\{pos = \\{x = 0, y = 0\\}, health = 100\\}\")"
);
}
#[test]
fn string_roundtrip_vectors() {
let _ = env_logger::builder().is_test(true).try_init();
let vectors: Vec<(&str, Option<&str>, Option<&str>)> = vec![
(r#""hello""#, None, Some("hello")),
(r#""""#, None, Some("")),
(r#""hello world""#, None, Some("hello world")),
(r#""abc def ghi""#, None, Some("abc def ghi")),
(r#""a\\b""#, None, Some("a\\b")),
(r#""a\"b""#, None, Some("a\"b")),
(r#""line1\nline2""#, None, Some("line1\nline2")),
(r#""col1\tcol2""#, None, Some("col1\tcol2")),
(r#""ret\r""#, None, Some("ret\r")),
(r#""bell\a""#, None, Some("bell\x07")),
(r#""bs\b""#, None, Some("bs\x08")),
(r#""ff\f""#, None, Some("ff\x0C")),
(r#""vt\v""#, None, Some("vt\x0B")),
(r#""esc\e""#, None, Some("esc\x1B")),
(r#""del\d""#, None, Some("del\x7F")),
(r#""a\\b\\c""#, None, Some("a\\b\\c")),
(r#""\\\\""#, None, Some("\\\\")),
(r#""\\""#, None, Some("\\")),
(r#""hello \{world\}""#, None, Some("hello {world}")),
(r#""\{""#, None, Some("{")),
(r#""\}""#, None, Some("}")),
(r#""\{\}""#, None, Some("{}")),
(r#""a\{b\}c""#, None, Some("a{b}c")),
(r#""nested \{a \{b\} c\}""#, None, Some("nested {a {b} c}")),
(r#""\\attrDef\{name\}\{value\}""#, None, Some("\\attrDef{name}{value}")),
(r#""\\vDefine\{r_\}\{text\}""#, None, Some("\\vDefine{r_}{text}")),
(r#""\x41""#, Some(r#""A""#), Some("A")),
(r#""\x00""#, Some(r#""\x00""#), Some("\x00")),
(r#""\x7E""#, Some(r#""~""#), Some("~")),
(r#""\101""#, Some(r#""A""#), Some("A")),
(r#""\0""#, Some(r#""\x00""#), Some("\x00")),
(r#""\176""#, Some(r#""~""#), Some("~")),
(r#""\^A""#, Some(r#""\x01""#), Some("\x01")),
(r#""\^Z""#, Some(r#""\x1A""#), Some("\x1A")),
(r#""tab\there\nnewline""#, None, Some("tab\there\nnewline")),
(r#""path\\to\\file\{name\}""#, None, Some("path\\to\\file{name}")),
(r#""say \"hello\" \{world\}""#, None, Some("say \"hello\" {world}")),
("hello", Some("hello"), None),
("'hello world'", None, None),
("'it\\'s'", None, None),
("42", Some("42"), None),
("-7", Some("-7"), None),
("3.14", Some("3.14"), None),
("0", Some("0"), None),
("0.0", Some("0.0"), None),
("[1, 2, 3]", Some("[1, 2, 3]"), None),
("[]", Some("nil"), None),
(r#"["a", "b", "c"]"#, Some(r#"["a", "b", "c"]"#), None),
("foo(1, 2)", Some("foo(1, 2)"), None),
(r#"f("hello \{world\}")"#, Some(r#"f("hello \{world\}")"#), None),
("{1, 2}", Some(r#""1, 2""#), Some("1, 2")),
("{1, 2, 3}", Some(r#""1, 2, 3""#), Some("1, 2, 3")),
("{hello {world} end}", Some(r#""hello \{world\} end""#), Some("hello {world} end")),
(r#"" spaces ""#, None, Some(" spaces ")),
(r#""\n\n\n""#, None, Some("\n\n\n")),
(r#""\t\t""#, None, Some("\t\t")),
(r#""abc\ndef\tghi""#, None, Some("abc\ndef\tghi")),
];
let arena = &mut Arena::try_with_default_opers().unwrap();
for (i, (term_str, expected_display, expected_raw)) in vectors.iter().enumerate() {
let terms = parse(arena, None, &format!("{} .", term_str));
assert!(
!terms.is_empty(),
"vector {}: failed to parse: {}",
i, term_str
);
let term = terms[0];
if let Some(raw) = expected_raw {
match term.view(arena).unwrap() {
View::Str(s) => {
assert_eq!(
s, *raw,
"vector {}: raw value mismatch for {}\n got: {:?}\n expected: {:?}",
i, term_str, s, raw
);
}
_ => {
}
}
}
let displayed = format!("{}", term.display(arena));
let expected_disp = expected_display.unwrap_or(term_str);
assert_eq!(
displayed, expected_disp,
"vector {}: display mismatch for {}\n got: {}\n expected: {}",
i, term_str, displayed, expected_disp
);
let terms2 = parse(arena, None, &format!("{} .", displayed));
assert!(
!terms2.is_empty(),
"vector {}: failed to reparse displayed: {}",
i, displayed
);
let term2 = terms2[0];
let redisplayed = format!("{}", term2.display(arena));
assert_eq!(
redisplayed, displayed,
"vector {}: roundtrip display mismatch\n original: {}\n displayed: {}\n redisplayed: {}",
i, term_str, displayed, redisplayed
);
if let Some(raw) = expected_raw {
match term2.view(arena).unwrap() {
View::Str(s) => {
assert_eq!(
s, *raw,
"vector {}: roundtrip raw value mismatch\n got: {:?}\n expected: {:?}",
i, s, raw
);
}
_ => {}
}
}
}
}
}