1use crate::lexer::{TermLexer, TermToken, Value};
13use crate::oper::{Assoc, Fixity, MAX_OPER_PREC, MIN_OPER_PREC, OperDef, OperDefTab, OperDefs};
14use anyhow::{Context, Result, anyhow, bail};
15use arena_terms::{Arena, IntoTerm, Term, View, atom, func, list};
16use once_cell::sync::Lazy;
17use parlex::{Lexer, LexerCtx, LexerData, Token};
18use smartstring::alias::String;
19use std::iter::FusedIterator;
20use std::str::FromStr;
21use std::{fmt, mem};
22
23include!(concat!(env!("OUT_DIR"), "/parser_data.rs"));
24
25pub fn parser_oper_defs(arena: &mut Arena) -> OperDefs {
27 let term = list![
28 func!(
29 "op";
30 func!("-"; atom!("x")),
31 atom!("prefix"),
32 800,
33 atom!("right"),
34 atom!("none"),
35 atom!("false"),
36 ),
37 func!(
38 "op";
39 func!("++"; atom!("x"), atom!("y")),
40 atom!("infix"),
41 500,
42 atom!("left"),
43 atom!("none"),
44 atom!("false"),
45 ),
46 func!(
47 "op";
48 func!("="; atom!("x"), atom!("y")),
49 atom!("infix"),
50 100,
51 atom!("right"),
52 atom!("none"),
53 atom!("false"),
54 ),
55 func!(
56 "op";
57 func!(
58 "op";
59 atom!("f"),
60 func!("="; atom!("type"), atom!("fun")),
61 func!("="; atom!("prec"), 0),
62 func!("="; atom!("assoc"), atom!("none")),
63 func!("="; atom!("rename_to"), atom!("none")),
64 func!("="; atom!("embed_type"), atom!("false")),
65 ),
66 atom!("fun"),
67 0,
68 atom!("none"),
69 atom!("none"),
70 atom!("false"),
71 ),
72 => arena
73 ];
74 OperDefs::try_from_ops(arena, term).unwrap()
75}
76
77pub struct TermParser<I>
78where
79 I: FusedIterator<Item = u8>,
80{
81 ctx: ParserCtx<TermLexer<I>, <Self as Parser<Arena>>::ParserData, Arena>,
82 terms: Vec<Term>,
83}
84
85impl<I> TermParser<I>
86where
87 I: FusedIterator<Item = u8>,
88{
89 pub fn try_new(input: I, opers: Option<OperDefs>) -> Result<Self> {
90 let lexer = TermLexer::try_new(input, opers)?;
91 let ctx = ParserCtx::new(lexer);
92 Ok(Self {
93 ctx,
94 terms: Vec::new(),
95 })
96 }
97
98 pub fn try_collect_terms(&mut self, arena: &mut Arena) -> Result<Vec<Term>> {
99 let mut ts = Vec::new();
100 while let Some(t) = self.try_next_term(arena)? {
101 ts.push(t);
102 }
103 Ok(ts)
104 }
105
106 #[inline]
107 pub fn try_next_term(&mut self, arena: &mut Arena) -> Result<Option<Term>> {
108 while let Some(tok) = self.try_next(arena)? {
109 match tok.token_id {
110 TokenID::Term => match tok.value {
111 Value::None => {}
112 Value::Term(term) => return Ok(Some(term)),
113 value => bail!("Unexpected token value {:?}", value),
114 },
115 token_id => bail!("Unexpected token id {:?}", token_id),
116 }
117 }
118 Ok(None)
119 }
120
121 pub fn define_opers<J: FusedIterator<Item = u8>>(
122 &mut self,
123 arena: &mut Arena,
124 defs_input: J,
125 opers: Option<OperDefs>,
126 ) -> Result<()> {
127 let opers = match opers {
128 Some(opers) => opers,
129 None => parser_oper_defs(arena),
130 };
131
132 let defs_lexer = TermLexer::try_new(defs_input, Some(opers))?;
133 let defs_ctx = ParserCtx::new(defs_lexer);
134 let mut defs_parser = TermParser {
135 ctx: defs_ctx,
136 terms: Vec::new(),
137 };
138 while let Some(term) = defs_parser.try_next_term(arena)? {
139 log::trace!(
140 "Stats: {:?}, {:?}",
141 defs_parser.ctx().lexer.stats(),
142 defs_parser.stats()
143 );
144 defs_parser
145 .ctx_mut()
146 .lexer
147 .opers
148 .define_opers(arena, term)?;
149 }
150 let defs_opers = std::mem::take(&mut defs_parser.ctx_mut().lexer.opers);
151 self.ctx_mut().lexer.opers = defs_opers;
152
153 Ok(())
154 }
155
156 fn normalize_term(
157 &self,
158 arena: &mut Arena,
159 term: Term,
160 fixity: Fixity,
161 op_tab_index: Option<usize>,
162 ) -> Result<Term> {
163 match self.ctx().lexer.opers.get(op_tab_index)[fixity] {
164 Some(ref op_def) => {
165 let (functor, vs) = match term.view(arena)? {
166 View::Atom(_) => (term, &[] as &[Term]),
167 View::Func(_, functor, args) => {
168 if args.is_empty() {
169 bail!("invalid Func");
170 }
171 (*functor, args)
172 }
173 _ => {
174 return Ok(term);
175 }
176 };
177 let name = functor.atom_name(arena)?;
178
179 let n_required_args = OperDef::required_arity(fixity);
180 if vs.len() < n_required_args {
181 bail!(
182 "missing {} required arguments in term {:?}",
183 n_required_args - vs.len(),
184 name
185 );
186 }
187
188 let args = &op_def.args;
189 let mut xs: Vec<Option<Term>> = vec![None; args.len()];
190
191 for (i, value) in vs.iter().enumerate() {
192 if i < n_required_args {
193 xs[i] = Some(*value);
194 } else {
195 match value.view(arena)? {
196 View::Func(ar, functor, vs)
197 if vs.len() == 2 && functor.atom_name(ar)? == "=" =>
198 {
199 let arg_name = vs[0].atom_name(arena)?;
200
201 if let Some(pos) = args.iter().position(|x| x.name == arg_name) {
202 if xs[pos].is_none() {
203 xs[pos] = Some(vs[1]);
204 } else {
205 bail!(
206 "cannot redefine argument {:?} at position {} in {:?}",
207 arg_name,
208 pos,
209 name
210 );
211 }
212 } else {
213 bail!("invalid argument name {:?} in {:?}", arg_name, name);
214 }
215 }
216 _ => {
217 if xs[i].is_none() {
218 xs[i] = Some(*value);
219 } else {
220 bail!(
221 "cannot redefine argument {:?} at position {} in {:?}",
222 args[i].name,
223 i,
224 name
225 );
226 }
227 }
228 }
229 }
230 }
231
232 let vs: Option<Vec<_>> = xs
233 .into_iter()
234 .enumerate()
235 .map(|(i, x)| x.or(args[i].default))
236 .collect();
237 let mut vs = match vs {
238 Some(vs) => vs,
239 None => bail!("missing arguments in {:?}", name),
240 };
241
242 let rename_to = match op_def.rename_to {
243 Some(rename_to) => rename_to,
244 None => functor,
245 };
246
247 if op_def.embed_fixity {
248 vs.insert(0, arena.atom(String::from(fixity)));
249 }
250
251 if vs.is_empty() {
252 Ok(rename_to)
253 } else {
254 Ok(arena.funcv(std::iter::once(&rename_to).chain(vs.iter()))?)
255 }
256 }
257 None => match fixity {
258 Fixity::Fun => Ok(term),
259 _ => bail!("missing opdef for fixity {:?}", fixity),
260 },
261 }
262 }
263}
264
265impl<I> Parser<Arena> for TermParser<I>
266where
267 I: FusedIterator<Item = u8>,
268{
269 type Lexer = TermLexer<I>;
270 type ParserData = ParData;
271
272 fn ctx(&self) -> &ParserCtx<Self::Lexer, Self::ParserData, Arena> {
273 &self.ctx
274 }
275
276 fn ctx_mut(&mut self) -> &mut ParserCtx<Self::Lexer, Self::ParserData, Arena> {
277 &mut self.ctx
278 }
279
280 fn stats(&self) -> ParserStats {
281 self.ctx().stats.clone()
282 }
283
284 fn resolve_ambiguity(
285 &mut self,
286 _arena: &mut Arena,
287 ambig: AmbigID,
288 tok2: &TermToken,
289 ) -> Result<Action> {
290 let ambigs = ParData::lookup_ambig(ambig);
291
292 let shift_action = ambigs[0];
293 assert!(matches!(shift_action, Action::Shift(_)));
294
295 let reduce_action = ambigs[1];
296 assert!(matches!(reduce_action, Action::Reduce(_)));
297
298 let Action::Reduce(prod) = reduce_action else {
299 bail!("can't match reduce action")
300 };
301
302 log::trace!(
303 "Conflict between reducing {:?} and shifting {:?}",
304 prod,
305 tok2
306 );
307
308 let (fixity1, tok1) = match prod {
309 ProdID::Infix1 => {
310 (Fixity::Infix, self.tokens_peek(1))
312 }
313 ProdID::Infix2 => {
314 (Fixity::Infix, self.tokens_peek(3))
316 }
317 ProdID::Prefix1 => {
318 (Fixity::Prefix, self.tokens_peek(1))
320 }
321 ProdID::Prefix2 => {
322 (Fixity::Prefix, self.tokens_peek(3))
324 }
325 ProdID::Postfix1 => {
326 (Fixity::Postfix, self.tokens_peek(0))
328 }
329 ProdID::Postfix2 => {
330 (Fixity::Postfix, self.tokens_peek(2))
332 }
333 _ => bail!(
334 "unexpected conflict: reduction of {:?} with shifting token {:?}",
335 prod,
336 tok2
337 ),
338 };
339
340 let op_tab1 = self.ctx().lexer.opers.get(tok1.op_tab_index);
341 let op_tab2 = self.ctx().lexer.opers.get(tok2.op_tab_index);
342
343 assert!(op_tab1.is_oper());
344
345 if op_tab2.is_oper() {
346 let op_def1 = match op_tab1[fixity1] {
347 Some(ref op_def1) => op_def1,
348 None => return Ok(shift_action),
349 };
350
351 let prec1 = op_def1.prec;
352 let assoc1 = op_def1.assoc;
353
354 let min_prec2 = std::cmp::min(
355 op_tab2[Fixity::Infix]
356 .as_ref()
357 .map(|x| x.prec)
358 .unwrap_or(MAX_OPER_PREC),
359 op_tab2[Fixity::Postfix]
360 .as_ref()
361 .map(|x| x.prec)
362 .unwrap_or(MAX_OPER_PREC),
363 );
364 let max_prec2 = std::cmp::max(
365 op_tab2[Fixity::Infix]
366 .as_ref()
367 .map(|x| x.prec)
368 .unwrap_or(MIN_OPER_PREC),
369 op_tab2[Fixity::Postfix]
370 .as_ref()
371 .map(|x| x.prec)
372 .unwrap_or(MIN_OPER_PREC),
373 );
374
375 if prec1 > min_prec2 {
376 Ok(reduce_action)
377 } else if prec1 < max_prec2 {
378 Ok(shift_action)
379 } else if min_prec2 == max_prec2 && prec1 == min_prec2 {
380 if assoc1 == Assoc::None {
381 bail!(
382 "precedence conflict: cannot chain non-associative operator {:?}; use parenthesis",
383 tok1
384 );
385 }
386 if op_tab2[Fixity::Infix]
387 .as_ref()
388 .is_some_and(|x| x.assoc == Assoc::None)
389 || op_tab2[Fixity::Postfix]
390 .as_ref()
391 .is_some_and(|x| x.assoc == Assoc::None)
392 {
393 bail!(
394 "precedence conflict: cannot chain non-associative operator {:?}; use parenthesis",
395 tok2
396 );
397 }
398 if op_tab2[Fixity::Infix]
399 .as_ref()
400 .is_some_and(|x| x.assoc != assoc1)
401 || op_tab2[Fixity::Postfix]
402 .as_ref()
403 .is_some_and(|x| x.assoc != assoc1)
404 {
405 bail!(
406 "associativity conflict: cannot chain operators {:?} and {:?}; use parenthesis",
407 tok1,
408 tok2
409 );
410 } else {
411 if assoc1 == Assoc::Left {
412 Ok(reduce_action)
413 } else {
414 Ok(shift_action)
415 }
416 }
417 } else {
418 bail!(
419 "precedence conflict: cannot chain operators {:?} and {:?}; use parenthesis",
420 tok1,
421 tok2
422 );
423 }
424 } else {
425 Ok(shift_action)
426 }
427 }
428
429 fn reduce(&mut self, arena: &mut Arena, prod: ProdID, token: &TermToken) -> Result<()> {
430 match prod {
431 ProdID::Start => {
432 unreachable!()
434 }
435
436 ProdID::Term1 => {
437 let mut expr_tok = self.tokens_pop()?;
439 expr_tok.token_id = TokenID::Term;
440 self.tokens_push(expr_tok);
441 }
442
443 ProdID::Term2 => {
444 self.tokens_pop()?;
446 let mut expr_tok = self.tokens_pop()?;
447 expr_tok.token_id = TokenID::Term;
448 self.tokens_push(expr_tok);
449 }
450
451 ProdID::Term3 => {
452 self.tokens_push(TermToken::new(TokenID::Term, Value::None, token.line_no));
454 }
455
456 ProdID::Term4 => {
457 self.tokens_pop()?;
459 self.tokens_push(TermToken::new(TokenID::Term, Value::None, token.line_no));
460 }
461
462 ProdID::Func => {
463 self.tokens_pop()?;
465 let index = usize::try_from(self.tokens_pop()?.value)?;
466 let func_tok = self.tokens_pop()?;
467 let line_no = func_tok.line_no;
468 let op_tab_index = func_tok.op_tab_index;
469 let functor = Term::try_from(func_tok.value)?;
470
471 let vs = std::iter::once(&functor).chain(self.terms[index..].iter());
472 let term = arena.funcv(vs)?;
473 self.terms.truncate(index);
474
475 let term = self.normalize_term(arena, term, Fixity::Fun, op_tab_index)?;
476
477 self.tokens_push(TermToken::new(TokenID::Expr, Value::Term(term), line_no));
478 }
479
480 ProdID::List => {
481 self.tokens_pop()?;
483 let seq_tok = self.tokens_pop()?;
484 let left_brack_tok = self.tokens_pop()?;
485 let index = usize::try_from(seq_tok.value)?;
486
487 let term = arena.list(&self.terms[index..]);
488 self.terms.truncate(index);
489
490 self.tokens_push(TermToken::new(
491 TokenID::Expr,
492 Value::Term(term),
493 left_brack_tok.line_no,
494 ));
495 }
496
497 ProdID::Nil => {
498 self.tokens_pop()?;
500 let left_brack_tok = self.tokens_pop()?;
501 self.tokens_push(TermToken::new(
502 TokenID::Expr,
503 Value::Term(Term::NIL),
504 left_brack_tok.line_no,
505 ));
506 }
507
508 ProdID::List2 => {
509 self.tokens_pop()?;
511 let tail = Term::try_from(self.tokens_pop()?.value)?;
512 self.tokens_pop()?;
513 let index = usize::try_from(self.tokens_pop()?.value)?;
514 let left_brack_tok = self.tokens_pop()?;
515
516 let term = arena.listc(&self.terms[index..], tail);
517 self.terms.truncate(index);
518
519 self.tokens_push(TermToken::new(
520 TokenID::Expr,
521 Value::Term(term),
522 left_brack_tok.line_no,
523 ));
524 }
525
526 ProdID::Tuple => {
527 self.tokens_pop()?;
529 let seq_tok = self.tokens_pop()?;
530 let left_paren_tok = self.tokens_pop()?;
531
532 let index = usize::try_from(seq_tok.value)?;
533
534 let vs = &self.terms[index..];
537 let term = if vs.len() == 1 {
538 vs[0]
539 } else {
540 arena.tuple(vs)
541 };
542 self.terms.truncate(index);
543
544 self.tokens_push(TermToken::new(
545 TokenID::Expr,
546 Value::Term(term),
547 left_paren_tok.line_no,
548 ));
549 }
550
551 ProdID::Unit => {
552 self.tokens_pop()?;
554 let left_paren_tok = self.tokens_pop()?;
555 self.tokens_push(TermToken::new(
556 TokenID::Expr,
557 Value::Term(Term::UNIT),
558 left_paren_tok.line_no,
559 ));
560 }
561
562 ProdID::Var | ProdID::Int | ProdID::Real | ProdID::Date | ProdID::Str | ProdID::Bin => {
563 let mut tok = self.tokens_pop()?;
565 tok.token_id = TokenID::Expr;
566 self.tokens_push(tok);
567 }
568
569 ProdID::Atom => {
570 let atom_tok = self.tokens_pop()?;
572 let line_no = atom_tok.line_no;
573 let op_tab_index = atom_tok.op_tab_index;
574
575 let atom = Term::try_from(atom_tok.value)?;
576
577 let term = self.normalize_term(arena, atom, Fixity::Fun, op_tab_index)?;
578
579 self.tokens_push(TermToken::new(TokenID::Expr, Value::Term(term), line_no));
580 }
581
582 ProdID::Infix1 => {
583 let expr2_tok = self.tokens_pop()?;
585 let oper_tok = self.tokens_pop()?;
586 let expr1_tok = self.tokens_pop()?;
587 let line_no = expr1_tok.line_no;
588 let op_tab_index = oper_tok.op_tab_index;
589
590 let expr2 = Term::try_from(expr2_tok.value)?;
591 let oper = Term::try_from(oper_tok.value)?;
592 let expr1 = Term::try_from(expr1_tok.value)?;
593
594 let term = arena.funcv([oper, expr1, expr2])?;
595 let term = self.normalize_term(arena, term, Fixity::Infix, op_tab_index)?;
596
597 self.tokens_push(TermToken::new(TokenID::Expr, Value::Term(term), line_no));
598 }
599
600 ProdID::Infix2 => {
601 let expr2_tok = self.tokens_pop()?;
603 self.tokens_pop()?;
604 let index = usize::try_from(self.tokens_pop()?.value)?;
605 let oper_tok = self.tokens_pop()?;
606 let expr1_tok = self.tokens_pop()?;
607 let line_no = expr1_tok.line_no;
608 let op_tab_index = oper_tok.op_tab_index;
609
610 let expr2 = Term::try_from(expr2_tok.value)?;
611 let oper = Term::try_from(oper_tok.value)?;
612 let expr1 = Term::try_from(expr1_tok.value)?;
613
614 let xs = [oper, expr1, expr2];
615 let vs = xs.iter().chain(self.terms[index..].iter());
616 let term = arena.funcv(vs)?;
617 self.terms.truncate(index);
618
619 let term = self.normalize_term(arena, term, Fixity::Infix, op_tab_index)?;
620
621 self.tokens_push(TermToken::new(TokenID::Expr, Value::Term(term), line_no));
622 }
623
624 ProdID::Prefix1 => {
625 let expr1_tok = self.tokens_pop()?;
627 let oper_tok = self.tokens_pop()?;
628 let line_no = oper_tok.line_no;
629 let op_tab_index = oper_tok.op_tab_index;
630
631 let expr1 = Term::try_from(expr1_tok.value)?;
632 let oper = Term::try_from(oper_tok.value)?;
633
634 let term = match oper.view(arena)? {
635 View::Atom(s)
639 if s == "-"
640 && matches!(expr1.view(arena)?, View::Int(_) | View::Real(_)) =>
641 {
642 match expr1.view(arena)? {
643 View::Int(i) => arena.int(-i),
644 View::Real(r) => arena.real(-r),
645 _ => unreachable!(),
646 }
647 }
648 _ => {
649 let term = arena.funcv([oper, expr1])?;
650 self.normalize_term(arena, term, Fixity::Prefix, op_tab_index)?
651 }
652 };
653
654 self.tokens_push(TermToken::new(TokenID::Expr, Value::Term(term), line_no));
655 }
656
657 ProdID::Prefix2 => {
658 let expr1_tok = self.tokens_pop()?;
660 self.tokens_pop()?;
661 let index = usize::try_from(self.tokens_pop()?.value)?;
662 let oper_tok = self.tokens_pop()?;
663 let line_no = oper_tok.line_no;
664 let op_tab_index = oper_tok.op_tab_index;
665
666 let oper = Term::try_from(oper_tok.value)?;
667 let expr1 = Term::try_from(expr1_tok.value)?;
668
669 let xs = [oper, expr1];
670 let vs = xs.iter().chain(self.terms[index..].iter());
671 let term = arena.funcv(vs)?;
672 self.terms.truncate(index);
673
674 let term = self.normalize_term(arena, term, Fixity::Prefix, op_tab_index)?;
675
676 self.tokens_push(TermToken::new(TokenID::Expr, Value::Term(term), line_no));
677 }
678
679 ProdID::Postfix1 => {
680 let oper_tok = self.tokens_pop()?;
682 let expr1_tok = self.tokens_pop()?;
683 let line_no = expr1_tok.line_no;
684 let op_tab_index = oper_tok.op_tab_index;
685
686 let oper = Term::try_from(oper_tok.value)?;
687 let expr1 = Term::try_from(expr1_tok.value)?;
688
689 let term = arena.funcv([oper, expr1])?;
690 let term = self.normalize_term(arena, term, Fixity::Postfix, op_tab_index)?;
691
692 self.tokens_push(TermToken::new(TokenID::Expr, Value::Term(term), line_no));
693 }
694
695 ProdID::Postfix2 => {
696 self.tokens_pop()?;
698 let index = usize::try_from(self.tokens_pop()?.value)?;
699 let oper_tok = self.tokens_pop()?;
700 let expr1_tok = self.tokens_pop()?;
701 let line_no = expr1_tok.line_no;
702 let op_tab_index = oper_tok.op_tab_index;
703
704 let oper = Term::try_from(oper_tok.value)?;
705 let expr1 = Term::try_from(expr1_tok.value)?;
706
707 let xs = [oper, expr1];
708 let vs = xs.iter().chain(self.terms[index..].iter());
709 let term = arena.funcv(vs)?;
710 self.terms.truncate(index);
711
712 let term = self.normalize_term(arena, term, Fixity::Postfix, op_tab_index)?;
713
714 self.tokens_push(TermToken::new(TokenID::Expr, Value::Term(term), line_no));
715 }
716
717 ProdID::Seq1 => {
718 let mut bare_seq_tok = self.tokens_pop()?;
720 bare_seq_tok.token_id = TokenID::Seq;
721 self.tokens_push(bare_seq_tok);
722 }
723
724 ProdID::Seq2 => {
725 self.tokens_pop()?;
727 let mut bare_seq_tok = self.tokens_pop()?;
728 bare_seq_tok.token_id = TokenID::Seq;
729 self.tokens_push(bare_seq_tok);
730 }
731
732 ProdID::BareSeq1 => {
733 let expr_tok = self.tokens_pop()?;
735 let line_no = expr_tok.line_no;
736 let expr = Term::try_from(expr_tok.value)?;
737
738 let index = self.terms.len();
739 self.terms.push(expr);
740
741 self.tokens_push(TermToken::new(
742 TokenID::BareSeq,
743 Value::Index(index),
744 line_no,
745 ));
746 }
747
748 ProdID::BareSeq2 => {
749 let expr_tok = self.tokens_pop()?;
751 let expr = Term::try_from(expr_tok.value)?;
752 self.tokens_pop()?;
753
754 self.terms.push(expr);
755 }
756 }
757 Ok(())
758 }
759}
760
761#[cfg(test)]
762mod tests {
763 use super::*;
764
765 const SAMPLE_DEFS: &str = r#"[
766op(==(x,y),infix,350,none),
767op(!=(x,y),infix,350,none),
768op( <(x,y),infix,350,none),
769op( >(x,y),infix,350,none),
770op(<=(x,y),infix,350,none),
771op(>=(x,y),infix,350,none),
772op('+'(x,y),infix,380,left),
773op('-'(x,y),infix,380,left),
774op('-'(x),postfix,900,left, rename_to=some('postfix_minus')),
775op('*'(x,y),infix,400,left),
776op('/'(x,y),infix,400,left),
777op('+'(x),prefix,800,right),
778op(and(x,y),infix,300,left),
779op(or(x,y),infix,250,left),
780op(not(x),prefix,800,right),
781]"#;
782
783 fn parse(arena: &mut Arena, defs: Option<&str>, s: &str) -> Result<Vec<Term>> {
784 let mut parser = TermParser::try_new(s.bytes().fuse(), Some(parser_oper_defs(arena)))?;
785 if let Some(defs) = defs {
786 parser.define_opers(arena, defs.bytes().fuse(), None)?;
787 }
788 parser.try_collect_terms(arena)
789 }
790
791 #[test]
792 fn one_term() {
793 let _ = env_logger::builder().is_test(true).try_init();
794 let arena = &mut Arena::new();
795 let ts = parse(arena, Some(SAMPLE_DEFS), " . . 2 * 2 <= 5 . .").unwrap();
796 dbg!(&ts);
797 let s = format!("{}", ts[0].display(arena));
798 dbg!(&s);
799 assert_eq!(ts.len(), 1);
800 assert_eq!(s, "'<='('*'(2, 2), 5)");
801 }
802
803 #[test]
804 #[should_panic]
805 fn missing_ops() {
806 let arena = &mut Arena::new();
807 let ts = parse(arena, None, "2 * 2 <= 5").unwrap();
808 }
809
810 #[test]
811 fn more_complicated_term() {
812 let _ = env_logger::builder().is_test(true).try_init();
813 let arena = &mut Arena::new();
814 let x = "(
815[(1, 2) | unit] ++ foo(baz(1e-9)),
816date{2025-09-30T18:24:22.154Z},
817\"aaa{
8181 + 2
819}bbb{
8203 * 4
821}ccc\",
822{player = {pos = {x = 0, y = 0}, health = 100}},
823)";
824 let ts = parse(arena, Some(SAMPLE_DEFS), x).unwrap();
825 let s = format!("{}", ts[0].display(arena));
826 assert_eq!(ts.len(), 1);
827 assert_eq!(
828 s,
829 "('++'([(1, 2) | unit], foo(baz(0.000000001))), date(2025-09-30T18:24:22.154+00:00), '++'('++'('++'('++'(\"aaa\", '+'(1, 2)), \"bbb\"), '*'(3, 4)), \"ccc\"), \"player = {pos = {x = 0, y = 0}, health = 100}\")"
830 );
831 }
832}