1#![doc = include_str!("../README.md")]
2
3use dst_factory::make_dst_factory;
4use espy_eyes::{Lexer, Lexigram, Token};
5use std::iter::Peekable;
6
7#[cfg(test)]
8mod tests;
9
10#[derive(Debug, Eq, PartialEq)]
11pub enum Error<'source> {
12 Lexer(espy_eyes::Error<'source>),
19 MissingToken {
20 expected: &'static [Lexigram],
22 actual: Option<Token<'source>>,
24 },
25 ExpectedExpression(Option<Token<'source>>),
28 ExpectedStatementOrExpression(Token<'source>),
30 UnexpectedCloseParen(Token<'source>),
34 IncompleteExpression,
37}
38
39#[derive(Debug, Default, Eq, PartialEq)]
41pub struct Diagnostics<'source> {
42 pub errors: Vec<Error<'source>>,
43}
44
45impl<'source> Diagnostics<'source> {
46 fn expect(
47 &mut self,
48 t: Option<espy_eyes::Result<'source>>,
49 expected: &'static [Lexigram],
50 ) -> Option<Token<'source>> {
51 let actual = self.wrap(t);
52 if actual.is_some_and(|actual| expected.contains(&actual.lexigram)) {
53 actual
54 } else {
55 self.errors.push(Error::MissingToken { expected, actual });
56 None
57 }
58 }
59
60 fn expect_expression(
61 &mut self,
62 lexer: &mut Peekable<Lexer<'source>>,
63 ) -> Option<Box<Expression<'source>>> {
64 let expression = Expression::new(lexer);
65 if expression.is_none() {
66 self.errors.push(Error::ExpectedExpression(
67 lexer.peek().copied().transpose().ok().flatten(),
68 ));
69 }
70 expression
71 }
72
73 fn next_if(
74 &mut self,
75 lexer: &mut Peekable<Lexer<'source>>,
76 expected: &'static [Lexigram],
77 ) -> Option<Token<'source>> {
78 self.expect(lexer.peek().copied(), expected).inspect(|_| {
79 lexer.next();
80 })
81 }
82
83 fn wrap(&mut self, t: Option<espy_eyes::Result<'source>>) -> Option<Token<'source>> {
84 match t? {
85 Ok(t) => Some(t),
86 Err(e) => {
87 let t = if let espy_eyes::Error {
88 origin,
89 kind: espy_eyes::ErrorKind::ReservedSymbol,
90 } = e
91 {
92 Some(Token {
93 origin,
94 lexigram: Lexigram::Ident,
95 })
96 } else {
97 None
98 };
99 self.errors.push(Error::Lexer(e));
100 t
101 }
102 }
103 }
104}
105
106#[derive(Debug, Eq, PartialEq)]
110pub enum Node<'source> {
111 Unit(Token<'source>, Token<'source>),
112 Bool(bool, Token<'source>),
113 Number(Token<'source>),
114 String(Token<'source>),
115 Variable(Token<'source>),
116 Block(Box<Block<'source>>),
117 If(Box<If<'source>>),
118 Match(Box<Match<'source>>),
119 Enum(Box<Enum<'source>>),
120
121 Pipe(Token<'source>),
122 Call(Token<'source>),
123 Bind(Token<'source>),
124 Positive(Token<'source>),
125 Negative(Token<'source>),
126 Annotation(Box<Annotation<'source>>),
127 Deref(Token<'source>),
128 Mul(Token<'source>),
129 Div(Token<'source>),
130 Add(Token<'source>),
131 Sub(Token<'source>),
132 BitwiseAnd(Token<'source>),
133 BitwiseOr(Token<'source>),
134 BitwiseXor(Token<'source>),
135 EqualTo(Token<'source>),
136 NotEqualTo(Token<'source>),
137 Greater(Token<'source>),
138 GreaterEqual(Token<'source>),
139 Lesser(Token<'source>),
140 LesserEqual(Token<'source>),
141 LogicalAnd(Token<'source>),
142 LogicalOr(Token<'source>),
143 Name {
144 name: Token<'source>,
145 colon_token: Token<'source>,
146 },
147 Field {
148 dot_token: Token<'source>,
149 index: Token<'source>,
150 },
151 Tuple(Token<'source>),
152}
153
154#[derive(Debug, Eq, PartialEq)]
155enum Operation<'source> {
156 Call(Token<'source>),
157 Pipe(Token<'source>),
158 Bind(Token<'source>),
159 Positive(Token<'source>),
160 Negative(Token<'source>),
161 Annotation(Box<Annotation<'source>>),
162 Deref(Token<'source>),
163 Mul(Token<'source>),
164 Div(Token<'source>),
165 Add(Token<'source>),
166 Sub(Token<'source>),
167 BitwiseAnd(Token<'source>),
168 BitwiseXor(Token<'source>),
169 BitwiseOr(Token<'source>),
170 EqualTo(Token<'source>),
171 NotEqualTo(Token<'source>),
172 Greater(Token<'source>),
173 GreaterEqual(Token<'source>),
174 Lesser(Token<'source>),
175 LesserEqual(Token<'source>),
176 LogicalAnd(Token<'source>),
177 LogicalOr(Token<'source>),
178 Name {
179 name: Token<'source>,
180 colon_token: Token<'source>,
181 },
182 Field {
183 dot_token: Token<'source>,
184 index: Token<'source>,
185 },
186 Tuple(Token<'source>),
187 SubExpression(Token<'source>),
188}
189
190impl Operation<'_> {
191 fn precedence(&self) -> usize {
193 match self {
194 Operation::Field { .. } | Operation::Deref(_) => 13,
195 Operation::Positive(_) | Operation::Negative(_) | Operation::Annotation(_) => 12,
196 Operation::Mul(_) | Operation::Div(_) => 11,
197 Operation::Add(_) | Operation::Sub(_) => 10,
198 Operation::BitwiseAnd(_) => 9,
199 Operation::BitwiseXor(_) => 8,
200 Operation::BitwiseOr(_) => 7,
201 Operation::EqualTo(_)
202 | Operation::NotEqualTo(_)
203 | Operation::Greater(_)
204 | Operation::GreaterEqual(_)
205 | Operation::Lesser(_)
206 | Operation::LesserEqual(_) => 6,
207 Operation::LogicalAnd(_) => 5,
208 Operation::LogicalOr(_) => 4,
209 Operation::Name { .. } => 3,
210 Operation::Tuple(_) => 2,
211 Operation::Pipe(_) | Operation::Call(_) | Operation::Bind(_) => 1,
212 Operation::SubExpression(_) => 0,
213 }
214 }
215
216 fn left_associative(&self) -> bool {
218 match self {
219 Operation::Field { .. }
220 | Operation::Positive(_)
221 | Operation::Negative(_)
222 | Operation::Annotation(_)
223 | Operation::Deref(_)
224 | Operation::Mul(_)
225 | Operation::Div(_)
226 | Operation::Add(_)
227 | Operation::Sub(_)
228 | Operation::BitwiseAnd(_)
229 | Operation::BitwiseXor(_)
230 | Operation::BitwiseOr(_)
231 | Operation::EqualTo(_)
232 | Operation::NotEqualTo(_)
233 | Operation::Greater(_)
234 | Operation::GreaterEqual(_)
235 | Operation::Lesser(_)
236 | Operation::LesserEqual(_)
237 | Operation::LogicalAnd(_)
238 | Operation::LogicalOr(_)
239 | Operation::Tuple(_)
240 | Operation::SubExpression(_)
241 | Operation::Call(_)
242 | Operation::Bind(_)
243 | Operation::Pipe(_) => true,
244 Operation::Name { .. } => false,
245 }
246 }
247}
248
249impl<'source> From<Operation<'source>> for Node<'source> {
250 fn from(op: Operation<'source>) -> Self {
251 match op {
252 Operation::Field { dot_token, index } => Node::Field { dot_token, index },
253 Operation::Pipe(t) => Node::Pipe(t),
254 Operation::Bind(t) => Node::Bind(t),
255 Operation::Call(t) => Node::Call(t),
256 Operation::Positive(t) => Node::Positive(t),
257 Operation::Negative(t) => Node::Negative(t),
258 Operation::Annotation(t) => Node::Annotation(t),
259 Operation::Deref(t) => Node::Deref(t),
260 Operation::Mul(t) => Node::Mul(t),
261 Operation::Div(t) => Node::Div(t),
262 Operation::Add(t) => Node::Add(t),
263 Operation::Sub(t) => Node::Sub(t),
264 Operation::BitwiseAnd(t) => Node::BitwiseAnd(t),
265 Operation::BitwiseXor(t) => Node::BitwiseXor(t),
266 Operation::BitwiseOr(t) => Node::BitwiseOr(t),
267 Operation::EqualTo(t) => Node::EqualTo(t),
268 Operation::NotEqualTo(t) => Node::NotEqualTo(t),
269 Operation::Greater(t) => Node::Greater(t),
270 Operation::GreaterEqual(t) => Node::GreaterEqual(t),
271 Operation::Lesser(t) => Node::Lesser(t),
272 Operation::LesserEqual(t) => Node::LesserEqual(t),
273 Operation::LogicalAnd(t) => Node::LogicalAnd(t),
274 Operation::LogicalOr(t) => Node::LogicalOr(t),
275 Operation::Name { name, colon_token } => Node::Name { name, colon_token },
276 Operation::Tuple(t) => Node::Tuple(t),
277 Operation::SubExpression(_) => {
278 panic!("sub expressions may not enter the output stack")
279 }
280 }
281 }
282}
283
284#[derive(Debug, Eq, PartialEq)]
286#[make_dst_factory(pub)]
287pub struct Expression<'source> {
288 pub first_token: Option<Token<'source>>,
289 pub last_token: Option<Token<'source>>,
290 pub diagnostics: Diagnostics<'source>,
291 pub contents: [Node<'source>],
292}
293
294impl<'source> Expression<'source> {
295 pub fn new(lexer: &mut Peekable<Lexer<'source>>) -> Option<Box<Self>> {
297 const UNARY_POSITION: &[Lexigram] = &[
300 Lexigram::Plus,
301 Lexigram::Minus,
302 Lexigram::Star,
303 Lexigram::Slash,
304 Lexigram::Ampersand,
305 Lexigram::Caret,
306 Lexigram::Pipe,
307 Lexigram::DoubleEqual,
308 Lexigram::BangEqual,
309 Lexigram::Greater,
310 Lexigram::GreaterEqual,
311 Lexigram::Lesser,
312 Lexigram::LesserEqual,
313 Lexigram::And,
314 Lexigram::Or,
315 Lexigram::Comma,
316 Lexigram::Colon,
317 Lexigram::Triangle,
318 Lexigram::OpenParen,
319 Lexigram::CloseSquare,
320 ];
321 const EXPRESSION_TERMINATORS: &[Lexigram] = &[
327 Lexigram::CloseParen,
330 Lexigram::CloseBrace,
331 Lexigram::Then,
332 Lexigram::End,
333 Lexigram::SingleEqual,
334 Lexigram::Semicolon,
335 ];
336 let first_token = lexer.peek().copied().transpose().ok().flatten();
337 let mut last_token: Option<Token> = None;
338 let mut diagnostics = Diagnostics::default();
339 let mut contents = Vec::new();
340 let mut stack = Vec::new();
341 let flush = |output: &mut Vec<Node<'source>>, stack: &mut Vec<Operation<'source>>| {
342 while let Some(op) = stack.pop_if(|x| !matches!(x, Operation::SubExpression(_))) {
343 output.push(op.into());
344 }
345 };
346 let push_with_precedence =
347 |output: &mut Vec<Node<'source>>,
348 stack: &mut Vec<Operation<'source>>,
349 operator: Operation<'source>| {
350 while let Some(op) = stack.pop_if(|x| {
351 if operator.left_associative() {
352 x.precedence() >= operator.precedence()
353 } else {
354 x.precedence() > operator.precedence()
355 }
356 }) {
357 output.push(op.into());
359 }
360 stack.push(operator);
361 };
362 loop {
363 let unary_position = last_token.is_none_or(|t| UNARY_POSITION.contains(&t.lexigram));
364 let t = diagnostics.wrap(lexer.peek().copied());
365 macro_rules! lexi {
366 ($($name:ident)? @ $lexi:ident) => {
367 Some($($name @)? Token {
368 lexigram: Lexigram::$lexi,
369 ..
370 })
371 };
372 }
373 macro_rules! op {
374 ($op:ident($inner:expr)) => {
375 push_with_precedence(&mut contents, &mut stack, Operation::$op($inner))
376 };
377 }
378 match t {
379 lexi!(number @ Number) => {
384 if !unary_position {
385 op!(Call(number));
386 }
387 contents.push(Node::Number(number));
388 }
389 lexi!(string @ String) => {
390 if !unary_position {
391 op!(Call(string));
392 }
393 contents.push(Node::String(string));
394 }
395 lexi!(discard @ Discard) => {
396 if !unary_position {
397 op!(Call(discard));
398 }
399 last_token = lexer.next().transpose().ok().flatten();
400 let colon_token = diagnostics.next_if(lexer, &[Lexigram::Colon]);
401 if let Some(colon_token) = colon_token {
402 last_token = Some(colon_token);
403 push_with_precedence(
404 &mut contents,
405 &mut stack,
406 Operation::Name {
407 name: discard,
408 colon_token,
409 },
410 );
411 }
412 continue;
413 }
414 lexi!(ident @ Ident) => {
415 if !unary_position {
416 op!(Call(ident));
417 }
418 last_token = lexer.next().transpose().ok().flatten();
419 if let Some(Ok(
420 colon_token @ Token {
421 lexigram: Lexigram::Colon,
422 ..
423 },
424 )) = lexer.peek().copied()
425 {
426 last_token = lexer.next().transpose().ok().flatten();
427 push_with_precedence(
428 &mut contents,
429 &mut stack,
430 Operation::Name {
431 name: ident,
432 colon_token,
433 },
434 );
435 } else {
436 contents.push(Node::Variable(ident));
437 }
438 continue;
439 }
440 lexi!(t @ True) => {
441 if !unary_position {
442 push_with_precedence(&mut contents, &mut stack, Operation::Call(t));
443 }
444 contents.push(Node::Bool(true, t));
445 }
446 lexi!(t @ False) => {
447 if !unary_position {
448 push_with_precedence(&mut contents, &mut stack, Operation::Call(t));
449 }
450 contents.push(Node::Bool(false, t));
451 }
452 lexi!(t @ OpenParen) => {
453 if !unary_position {
454 push_with_precedence(&mut contents, &mut stack, Operation::Call(t));
455 }
456 stack.push(Operation::SubExpression(t));
457 }
458 lexi!(t @ OpenBrace) => {
459 if !unary_position {
460 push_with_precedence(&mut contents, &mut stack, Operation::Call(t));
461 }
462 lexer.next();
463 contents.push(Node::Block(Block::child(&mut *lexer)));
464 diagnostics.expect(lexer.peek().copied(), &[Lexigram::CloseBrace]);
465 }
466
467 lexi!(t @ Plus) if unary_position => op!(Positive(t)),
469 lexi!(t @ Minus) if unary_position => op!(Negative(t)),
470 lexi!(t @ At) if unary_position => {
471 lexer.next();
472 let annotation = Annotation::new(t, lexer);
473 last_token = annotation.close_square;
474 op!(Annotation(Box::new(annotation)));
475 continue;
476 }
477 lexi!(dot_token @ Dot) if !unary_position => {
479 last_token = lexer.next().transpose().ok().flatten();
480 if let Some(index) =
481 diagnostics.next_if(lexer, &[Lexigram::Ident, Lexigram::Number])
482 {
483 last_token = Some(index);
484 push_with_precedence(
485 &mut contents,
486 &mut stack,
487 Operation::Field { dot_token, index },
488 );
489 }
490 continue;
491 }
492 lexi!(t @ DotStar) if !unary_position => op!(Deref(t)),
493 lexi!(t @ Bang) if !unary_position => op!(Bind(t)),
494 lexi!(t @ Plus) if !unary_position => op!(Add(t)),
496 lexi!(t @ Minus) if !unary_position => op!(Sub(t)),
497 lexi!(t @ Star) if !unary_position => op!(Mul(t)),
498 lexi!(t @ Slash) if !unary_position => op!(Div(t)),
499 lexi!(t @ Ampersand) if !unary_position => op!(BitwiseAnd(t)),
500 lexi!(t @ Caret) if !unary_position => op!(BitwiseXor(t)),
501 lexi!(t @ Pipe) if !unary_position => op!(BitwiseOr(t)),
502 lexi!(t @ DoubleEqual) if !unary_position => op!(EqualTo(t)),
503 lexi!(t @ BangEqual) if !unary_position => op!(NotEqualTo(t)),
504 lexi!(t @ Greater) if !unary_position => op!(Greater(t)),
505 lexi!(t @ GreaterEqual) if !unary_position => op!(GreaterEqual(t)),
506 lexi!(t @ Lesser) if !unary_position => op!(Lesser(t)),
507 lexi!(t @ LesserEqual) if !unary_position => op!(LesserEqual(t)),
508 lexi!(t @ And) if !unary_position => op!(LogicalAnd(t)),
509 lexi!(t @ Or) if !unary_position => op!(LogicalOr(t)),
510 lexi!(t @ Triangle) if !unary_position => op!(Pipe(t)),
511 lexi!(t @ Comma) if !unary_position => {
512 let potential_last_token = lexer.next().transpose().ok().flatten();
514 if !lexer
515 .peek()
516 .copied()
517 .transpose()
518 .ok()
519 .flatten()
520 .is_none_or(|t| EXPRESSION_TERMINATORS.contains(&t.lexigram))
521 {
522 op!(Tuple(t));
523 last_token = potential_last_token;
524 }
525 continue;
526 }
527 lexi!( @ If) => contents.push(If::from(&mut *lexer).into()),
528 lexi!( @ Match) => contents.push(Match::new(&mut *lexer).into()),
529 lexi!( @ Enum) => contents.push(Enum::from(&mut *lexer).into()),
530 lexi!(t @ CloseParen) if unary_position => {
531 if let Some(
532 last_token @ Token {
533 lexigram: Lexigram::OpenParen,
534 ..
535 },
536 ) = last_token
537 {
538 contents.push(Node::Unit(last_token, t));
539 } else {
540 diagnostics.errors.push(Error::IncompleteExpression);
541 }
542 if !matches!(stack.pop(), Some(Operation::SubExpression(_))) {
543 diagnostics.errors.push(Error::UnexpectedCloseParen(t));
544 }
545 }
546 lexi!(t @ CloseParen) if !unary_position => {
547 while let Some(op) = stack.pop_if(|x| !matches!(x, Operation::SubExpression(_)))
548 {
549 contents.push(op.into());
550 }
551 if !matches!(stack.pop(), Some(Operation::SubExpression(_))) {
552 diagnostics.errors.push(Error::UnexpectedCloseParen(t));
553 }
554 }
555 _ => {
556 if unary_position {
557 if !contents.is_empty() || !stack.is_empty() {
558 diagnostics.errors.push(Error::IncompleteExpression);
559 }
560 } else {
561 flush(&mut contents, &mut stack);
562 if !stack.is_empty() {
563 diagnostics.errors.push(Error::IncompleteExpression);
564 }
565 }
566 if contents.is_empty() && diagnostics.errors.is_empty() {
567 return None;
568 }
569 return Some(Expression::build(
570 first_token,
571 last_token,
572 diagnostics,
573 contents,
574 ));
575 }
576 }
577 last_token = lexer.next().transpose().unwrap_or(None);
579 }
580 }
581}
582
583#[derive(Debug, Eq, PartialEq)]
584pub struct Annotation<'source> {
585 pub at_sign: Token<'source>,
586 pub name: Option<Token<'source>>,
587 pub open_square: Option<Token<'source>>,
588 pub tokens: Box<[Token<'source>]>,
589 pub close_square: Option<Token<'source>>,
590 pub diagnostics: Diagnostics<'source>,
591}
592
593impl<'source> Annotation<'source> {
594 fn new(at_sign: Token<'source>, lexer: &mut Peekable<Lexer<'source>>) -> Self {
595 let mut diagnostics = Diagnostics::default();
596 let name = diagnostics.next_if(lexer, &[Lexigram::Ident]);
597 let open_square = diagnostics.next_if(lexer, &[Lexigram::OpenSquare]);
598 let mut tokens = Vec::new();
599 let mut square_level = 0;
600 loop {
601 let token = match diagnostics.wrap(lexer.peek().copied()) {
602 Some(
603 token @ Token {
604 lexigram: Lexigram::OpenSquare,
605 ..
606 },
607 ) => {
608 square_level += 1;
609 token
610 }
611 Some(
612 token @ Token {
613 lexigram: Lexigram::CloseSquare,
614 ..
615 },
616 ) => {
617 if square_level > 0 {
618 square_level -= 1;
619 token
620 } else {
621 break;
622 }
623 }
624 Some(token) => token,
625 None => break,
626 };
627 lexer.next();
628 tokens.push(token);
629 }
630 let close_square = diagnostics.next_if(lexer, &[Lexigram::CloseSquare]);
631 Self {
632 at_sign,
633 name,
634 open_square,
635 tokens: tokens.into_boxed_slice(),
636 close_square,
637 diagnostics,
638 }
639 }
640}
641
642#[derive(Debug, Eq, PartialEq)]
643pub struct If<'source> {
644 pub if_token: Token<'source>,
645 pub condition: Option<Box<Expression<'source>>>,
646 pub then_token: Option<Token<'source>>,
647 pub first: Box<Block<'source>>,
648 pub else_token: Option<Token<'source>>,
649 pub else_kind: Option<Token<'source>>,
650 pub second: Box<Block<'source>>,
651 pub end_token: Option<Token<'source>>,
652 pub diagnostics: Diagnostics<'source>,
653}
654
655impl<'source> From<If<'source>> for Node<'source> {
656 fn from(if_block: If<'source>) -> Self {
657 Self::If(Box::new(if_block))
658 }
659}
660
661impl<'source> From<&mut Peekable<Lexer<'source>>> for If<'source> {
662 fn from(lexer: &mut Peekable<Lexer<'source>>) -> Self {
663 let if_token = lexer
664 .next()
665 .transpose()
666 .ok()
667 .flatten()
668 .expect("caller must have peeked a token");
669 let mut diagnostics = Diagnostics::default();
670 let condition = diagnostics.expect_expression(lexer);
671 let then_token = diagnostics.next_if(lexer, &[Lexigram::Then]);
672 let first = Block::child(&mut *lexer);
673 let (second, else_token, else_kind) = if let else_token @ Some(Token {
674 lexigram: Lexigram::Else,
675 ..
676 }) = diagnostics.wrap(lexer.peek().copied())
677 {
678 lexer.next();
679 let (second, else_kind) = match diagnostics.wrap(lexer.peek().copied()) {
680 else_kind @ Some(Token {
681 lexigram: Lexigram::Then,
682 ..
683 }) => {
684 lexer.next();
685 (Block::child(&mut *lexer), else_kind)
686 }
687 else_kind @ Some(Token {
688 lexigram: Lexigram::If,
689 ..
690 }) => (
691 Block::build(
692 Expression::build(
693 None,
694 None,
695 Diagnostics::default(),
696 [Self::from(&mut *lexer).into()],
697 )
698 .into(),
699 Diagnostics::default(),
700 [],
701 ),
702 else_kind,
703 ),
704 _ => {
705 diagnostics.expect(lexer.peek().copied(), &[Lexigram::Then, Lexigram::If]);
706 (Box::default(), None)
707 }
708 };
709 (second, else_token, else_kind)
710 } else {
711 (Box::default(), None, None)
712 };
713 let end_token = diagnostics.expect(lexer.peek().copied(), &[Lexigram::End]);
714 Self {
715 if_token,
716 condition,
717 then_token,
718 first,
719 else_token,
720 else_kind,
721 second,
722 end_token,
723 diagnostics,
724 }
725 }
726}
727
728#[derive(Debug, Eq, PartialEq)]
729pub struct MatchCase<'source> {
730 pub let_token: Option<Token<'source>>,
731 pub binding: Option<Binding<'source>>,
732 pub equals_token: Option<Token<'source>>,
733 pub case: Option<Box<Expression<'source>>>,
734 pub arrow_token: Option<Token<'source>>,
735 pub expression: Option<Box<Expression<'source>>>,
736 pub semicolon_token: Option<Token<'source>>,
737}
738
739#[derive(Debug, Eq, PartialEq)]
740#[make_dst_factory(pub)]
741pub struct Match<'source> {
742 pub match_token: Token<'source>,
743 pub expression: Option<Box<Expression<'source>>>,
744 pub then_token: Option<Token<'source>>,
745 pub end_token: Option<Token<'source>>,
746 pub diagnostics: Diagnostics<'source>,
747 pub cases: [MatchCase<'source>],
748}
749
750impl<'source> From<Box<Match<'source>>> for Node<'source> {
751 fn from(struct_block: Box<Match<'source>>) -> Self {
752 Self::Match(struct_block)
753 }
754}
755
756impl<'source> Match<'source> {
757 pub fn new(lexer: &mut Peekable<Lexer<'source>>) -> Box<Self> {
763 let match_token = lexer
764 .next()
765 .transpose()
766 .ok()
767 .flatten()
768 .expect("caller must have peeked a token");
769 let mut diagnostics = Diagnostics::default();
770
771 let expression = diagnostics.expect_expression(lexer);
772 let then_token = diagnostics.next_if(lexer, &[Lexigram::Then]);
773 let mut cases = Vec::new();
774
775 loop {
777 let (let_token, binding, equals_token, case) = if let let_token @ Some(Token {
778 lexigram: Lexigram::Let,
779 ..
780 }) =
781 diagnostics.wrap(lexer.peek().copied())
782 {
783 lexer.next();
784 let binding = Binding::new(lexer)
785 .map_err(|e| diagnostics.errors.push(e))
786 .ok();
787 let (equal_token, case) = if let equal_token @ Some(Token {
788 lexigram: Lexigram::SingleEqual,
789 ..
790 }) = diagnostics.wrap(lexer.peek().copied())
791 {
792 lexer.next();
793 let case = diagnostics.expect_expression(lexer);
794 (equal_token, case)
795 } else {
796 (None, None)
797 };
798 (let_token, binding, equal_token, case)
799 } else {
800 let case = diagnostics.expect_expression(lexer);
801 (None, None, None, case)
802 };
803 let arrow_token = diagnostics.next_if(lexer, &[Lexigram::DoubleArrow]);
804 let expression = diagnostics.expect_expression(lexer);
805 let semicolon_token = diagnostics.next_if(lexer, &[Lexigram::Semicolon]);
806 cases.push(MatchCase {
807 let_token,
808 binding,
809 equals_token,
810 case,
811 arrow_token,
812 expression,
813 semicolon_token,
814 });
815 if semicolon_token.is_none()
816 || diagnostics
817 .wrap(lexer.peek().copied())
818 .is_some_and(|t| t.lexigram == Lexigram::End)
819 {
820 break;
821 }
822 }
823 let end_token = diagnostics.expect(lexer.peek().copied(), &[Lexigram::End]);
824 Match::build(
825 match_token,
826 expression,
827 then_token,
828 end_token,
829 diagnostics,
830 cases,
831 )
832 }
833}
834
835#[derive(Debug, Eq, PartialEq)]
836pub struct Enum<'source> {
837 pub enum_token: Token<'source>,
838 pub variants: Option<Box<Expression<'source>>>,
839 pub end_token: Option<Token<'source>>,
840 pub diagnostics: Diagnostics<'source>,
841}
842
843impl<'source> From<Enum<'source>> for Node<'source> {
844 fn from(struct_block: Enum<'source>) -> Self {
845 Self::Enum(Box::new(struct_block))
846 }
847}
848
849impl<'source> From<&mut Peekable<Lexer<'source>>> for Enum<'source> {
850 fn from(lexer: &mut Peekable<Lexer<'source>>) -> Self {
851 let enum_token = lexer
852 .next()
853 .transpose()
854 .ok()
855 .flatten()
856 .expect("caller must have peeked a token");
857 let mut diagnostics = Diagnostics::default();
858 let variants = diagnostics.expect_expression(lexer);
859 let end_token = diagnostics.expect(lexer.peek().copied(), &[Lexigram::End]);
860 Self {
861 enum_token,
862 variants,
863 end_token,
864 diagnostics,
865 }
866 }
867}
868
869#[derive(Debug, Eq, PartialEq)]
870pub enum Statement<'source> {
871 Sequence(Sequence<'source>),
872 Let(Let<'source>),
873 Rebind(Rebind<'source>),
874 Set(Set<'source>),
875 Use(Use<'source>),
876}
877
878#[derive(Debug, Eq, PartialEq)]
879pub struct Let<'source> {
880 pub let_token: Token<'source>,
881 pub binding: Option<Binding<'source>>,
882 pub equals_token: Option<Token<'source>>,
883 pub expression: Option<Box<Expression<'source>>>,
884 pub semicolon_token: Option<Token<'source>>,
885 pub diagnostics: Diagnostics<'source>,
886}
887
888impl<'source> Let<'source> {
889 pub fn new(let_token: Token<'source>, lexer: &mut Peekable<Lexer<'source>>) -> Self {
895 let mut diagnostics = Diagnostics::default();
896 let binding = Binding::new(lexer)
897 .map_err(|e| diagnostics.errors.push(e))
898 .ok();
899 let equals_token = diagnostics.next_if(lexer, &[Lexigram::SingleEqual]);
900 let expression = diagnostics.expect_expression(lexer);
901 let semicolon_token = diagnostics.next_if(lexer, &[Lexigram::Semicolon]);
902
903 Let {
904 let_token,
905 binding,
906 equals_token,
907 expression,
908 semicolon_token,
909 diagnostics,
910 }
911 }
912}
913
914#[derive(Debug, Eq, PartialEq)]
915pub struct RebindSubject<'source> {
916 pub ident_token: Option<Token<'source>>,
917 pub comma_token: Option<Token<'source>>,
918}
919
920#[derive(Debug, Eq, PartialEq)]
921pub enum RebindBy<'source> {
922 Glob {
923 star_token: Token<'source>,
924 },
925 Identifiers {
926 bindings: Box<[RebindSubject<'source>]>,
927 },
928}
929
930#[derive(Debug, Eq, PartialEq)]
931pub struct Rebind<'source> {
932 pub let_token: Token<'source>,
933 pub caret_token: Token<'source>,
934 pub by: RebindBy<'source>,
935 pub semicolon_token: Option<Token<'source>>,
936 pub diagnostics: Diagnostics<'source>,
937}
938
939impl<'source> Rebind<'source> {
940 pub fn new(let_token: Token<'source>, lexer: &mut Peekable<Lexer<'source>>) -> Self {
946 let mut diagnostics = Diagnostics::default();
947 let caret_token = lexer
948 .next()
949 .transpose()
950 .ok()
951 .flatten()
952 .expect("caller must have peeked a token");
953 let by = if let Some(
954 star_token @ Token {
955 lexigram: Lexigram::Star,
956 ..
957 },
958 ) = diagnostics.wrap(lexer.peek().copied())
959 {
960 lexer.next();
961 RebindBy::Glob { star_token }
962 } else {
963 let mut bindings = Vec::new();
964 if let Some(ident_token) = diagnostics.next_if(lexer, &[Lexigram::Ident]) {
965 bindings.push(RebindSubject {
966 ident_token: Some(ident_token),
967 comma_token: None,
968 });
969 }
970 while let comma_token @ Some(Token {
971 lexigram: Lexigram::Comma,
972 ..
973 }) = diagnostics.wrap(lexer.peek().copied())
974 {
975 lexer.next();
976 bindings.push(RebindSubject {
977 ident_token: diagnostics.next_if(lexer, &[Lexigram::Ident]),
978 comma_token,
979 });
980 }
981 RebindBy::Identifiers {
982 bindings: bindings.into_boxed_slice(),
983 }
984 };
985 let semicolon_token = diagnostics.next_if(lexer, &[Lexigram::Semicolon]);
986 Rebind {
987 let_token,
988 caret_token,
989 by,
990 semicolon_token,
991 diagnostics,
992 }
993 }
994}
995
996#[derive(Debug, Eq, PartialEq)]
997pub struct Sequence<'source> {
998 pub expression: Option<Box<Expression<'source>>>,
999 pub semicolon_token: Token<'source>,
1000}
1001
1002impl<'source> Sequence<'source> {
1003 pub fn try_sequence(
1007 lexer: &mut Peekable<Lexer<'source>>,
1008 ) -> Result<Self, Option<Box<Expression<'source>>>> {
1009 let expression = Expression::new(&mut *lexer);
1010 if let Some(Ok(
1011 semicolon_token @ Token {
1012 lexigram: Lexigram::Semicolon,
1013 ..
1014 },
1015 )) = lexer.peek().copied()
1016 {
1017 lexer.next();
1018 Ok(Sequence {
1019 expression,
1020 semicolon_token,
1021 })
1022 } else {
1023 Err(expression)
1024 }
1025 }
1026}
1027
1028#[derive(Debug, Eq, PartialEq)]
1029pub struct Set<'source> {
1030 pub set_token: Token<'source>,
1031 pub target: Option<Box<Expression<'source>>>,
1032 pub equals_token: Option<Token<'source>>,
1033 pub expression: Option<Box<Expression<'source>>>,
1034 pub semicolon_token: Option<Token<'source>>,
1035 pub diagnostics: Diagnostics<'source>,
1036}
1037
1038impl<'source> Set<'source> {
1039 pub fn new(lexer: &mut Peekable<Lexer<'source>>) -> Self {
1045 let mut diagnostics = Diagnostics::default();
1046 let set_token = lexer
1047 .next()
1048 .transpose()
1049 .ok()
1050 .flatten()
1051 .expect("caller must have peeked a token");
1052 let target = diagnostics.expect_expression(lexer);
1053 let equals_token = diagnostics.next_if(lexer, &[Lexigram::SingleEqual]);
1054 let expression = diagnostics.expect_expression(lexer);
1055 let semicolon_token = diagnostics.next_if(lexer, &[Lexigram::Semicolon]);
1056
1057 Set {
1058 set_token,
1059 target,
1060 equals_token,
1061 expression,
1062 semicolon_token,
1063 diagnostics,
1064 }
1065 }
1066}
1067
1068#[derive(Debug, Eq, PartialEq)]
1069pub struct Use<'source> {
1070 pub use_token: Token<'source>,
1071 pub expression: Option<Box<Expression<'source>>>,
1072 pub semicolon_token: Option<Token<'source>>,
1073 pub diagnostics: Diagnostics<'source>,
1074}
1075
1076impl<'source> Use<'source> {
1077 pub fn new(lexer: &mut Peekable<Lexer<'source>>) -> Self {
1083 let mut diagnostics = Diagnostics::default();
1084 let use_token = lexer
1085 .next()
1086 .transpose()
1087 .ok()
1088 .flatten()
1089 .expect("caller must have peeked a token");
1090 let expression = diagnostics.expect_expression(lexer);
1091 let semicolon_token = diagnostics.next_if(lexer, &[Lexigram::Semicolon]);
1092
1093 Self {
1094 use_token,
1095 expression,
1096 semicolon_token,
1097 diagnostics,
1098 }
1099 }
1100}
1101
1102#[derive(Debug, Eq, PartialEq)]
1103pub struct NumericBinding<'source> {
1104 pub binding: Binding<'source>,
1105 pub comma_token: Option<Token<'source>>,
1106}
1107
1108#[derive(Debug, Eq, PartialEq)]
1109pub struct NamedBinding<'source> {
1110 pub field: Token<'source>,
1111 pub binding: Option<NamedSubBinding<'source>>,
1112 pub comma_token: Option<Token<'source>>,
1113}
1114
1115#[derive(Debug, Eq, PartialEq)]
1116pub struct NamedSubBinding<'source> {
1117 pub colon_token: Token<'source>,
1118 pub binding: Binding<'source>,
1119}
1120
1121#[derive(Debug, Eq, PartialEq)]
1122pub enum BindingMethod<'source> {
1123 Single(Token<'source>),
1124 Numeric {
1125 open_paren: Token<'source>,
1126 bindings: Box<[NumericBinding<'source>]>,
1127 close_paren: Option<Token<'source>>,
1128 },
1129 Named {
1130 open_brace: Token<'source>,
1131 bindings: Box<[NamedBinding<'source>]>,
1132 close_brace: Option<Token<'source>>,
1133 },
1134}
1135
1136#[derive(Debug, Eq, PartialEq)]
1137pub struct Binding<'source> {
1138 pub method: BindingMethod<'source>,
1139 pub diagnostics: Diagnostics<'source>,
1140}
1141
1142impl<'source> Binding<'source> {
1143 pub fn new(lexer: &mut Peekable<Lexer<'source>>) -> Result<Self, Error<'source>> {
1147 match lexer.peek().copied().transpose().map_err(Error::Lexer)? {
1148 Some(
1149 t @ Token {
1150 lexigram: Lexigram::Ident | Lexigram::Discard,
1151 ..
1152 },
1153 ) => {
1154 lexer.next();
1155 Ok(Binding {
1156 method: BindingMethod::Single(t),
1157 diagnostics: Diagnostics::default(),
1158 })
1159 }
1160 Some(
1161 open_paren @ Token {
1162 lexigram: Lexigram::OpenParen,
1163 ..
1164 },
1165 ) => {
1166 let mut diagnostics = Diagnostics::default();
1167 let mut bindings = Vec::new();
1168 lexer.next();
1169 loop {
1170 let t = diagnostics.wrap(lexer.peek().copied());
1171 if let Some(Token {
1172 lexigram: Lexigram::CloseParen,
1173 ..
1174 }) = t
1175 {
1176 break;
1177 }
1178 if let Ok(binding) = Binding::new(lexer) {
1179 let comma_token = diagnostics
1180 .wrap(lexer.peek().copied())
1181 .filter(|t| t.lexigram == Lexigram::Comma);
1182 bindings.push(NumericBinding {
1183 binding,
1184 comma_token,
1185 });
1186 if comma_token.is_some() {
1187 lexer.next();
1188 } else {
1189 break;
1190 }
1191 } else {
1192 diagnostics.errors.push(Error::MissingToken {
1193 expected: &[
1194 Lexigram::Ident,
1195 Lexigram::Discard,
1196 Lexigram::OpenParen,
1197 Lexigram::OpenBrace,
1198 Lexigram::CloseParen,
1199 ],
1200 actual: t,
1201 });
1202 break;
1203 }
1204 }
1205 let close_paren = diagnostics.next_if(lexer, &[Lexigram::CloseParen]);
1206 Ok(Binding {
1207 method: BindingMethod::Numeric {
1208 open_paren,
1209 bindings: bindings.into_boxed_slice(),
1210 close_paren,
1211 },
1212 diagnostics,
1213 })
1214 }
1215 Some(
1216 open_brace @ Token {
1217 lexigram: Lexigram::OpenBrace,
1218 ..
1219 },
1220 ) => {
1221 let mut diagnostics = Diagnostics::default();
1222 let mut bindings = Vec::new();
1223 lexer.next();
1224 loop {
1225 match diagnostics.wrap(lexer.peek().copied()) {
1226 Some(Token {
1227 lexigram: Lexigram::CloseBrace,
1228 ..
1229 }) => break,
1230 Some(
1231 field @ Token {
1232 lexigram: Lexigram::Ident,
1233 ..
1234 },
1235 ) => {
1236 lexer.next();
1237 match diagnostics.wrap(lexer.peek().copied()) {
1238 Some(
1239 colon_token @ Token {
1240 lexigram: Lexigram::Colon,
1241 ..
1242 },
1243 ) => {
1244 lexer.next();
1245 match Binding::new(lexer) {
1246 Ok(binding) => {
1247 let comma_token = diagnostics
1248 .wrap(lexer.peek().copied())
1249 .filter(|t| t.lexigram == Lexigram::Comma);
1250 bindings.push(NamedBinding {
1251 field,
1252 binding: Some(NamedSubBinding {
1253 colon_token,
1254 binding,
1255 }),
1256 comma_token,
1257 });
1258 if comma_token.is_some() {
1259 lexer.next();
1260 } else {
1261 break;
1262 }
1263 }
1264 Err(e) => {
1265 diagnostics.errors.push(e);
1266 break;
1267 }
1268 }
1269 }
1270 comma_token @ Some(Token {
1271 lexigram: Lexigram::Comma,
1272 ..
1273 }) => {
1274 lexer.next();
1275 bindings.push(NamedBinding {
1276 field,
1277 binding: None,
1278 comma_token,
1279 });
1280 }
1281 _ => {
1282 bindings.push(NamedBinding {
1283 field,
1284 binding: None,
1285 comma_token: None,
1286 });
1287 break;
1288 }
1289 }
1290 }
1291 actual => {
1292 diagnostics.errors.push(Error::MissingToken {
1293 expected: &[Lexigram::Ident, Lexigram::CloseBrace],
1294 actual,
1295 });
1296 break;
1297 }
1298 }
1299 }
1300 let close_brace = diagnostics.next_if(lexer, &[Lexigram::CloseBrace]);
1301 Ok(Binding {
1302 method: BindingMethod::Named {
1303 open_brace,
1304 bindings: bindings.into_boxed_slice(),
1305 close_brace,
1306 },
1307 diagnostics,
1308 })
1309 }
1310 actual => Err(Error::MissingToken {
1311 expected: &[
1312 Lexigram::Ident,
1313 Lexigram::Discard,
1314 Lexigram::OpenParen,
1315 Lexigram::OpenBrace,
1316 ],
1317 actual,
1318 }),
1319 }
1320 }
1321}
1322
1323#[derive(Debug, Eq, PartialEq)]
1324pub enum FunctionBody<'source> {
1325 Block(Box<Block<'source>>),
1326 Never,
1330}
1331
1332#[derive(Debug, Eq, PartialEq)]
1333pub struct Function<'source> {
1334 pub with_token: Token<'source>,
1335 pub argument: Option<Binding<'source>>,
1336 pub colon_token: Option<Token<'source>>,
1337 pub input: Option<Box<Expression<'source>>>,
1338 pub single_arrow_token: Option<Token<'source>>,
1339 pub output: Option<Box<Expression<'source>>>,
1340 pub semicolon_token: Option<Token<'source>>,
1341 pub body: FunctionBody<'source>,
1342 pub diagnostics: Diagnostics<'source>,
1343}
1344
1345#[derive(Debug, Eq, PartialEq)]
1346#[expect(
1347 clippy::large_enum_variant,
1348 reason = "this is already inside of a (very large) boxed block"
1349)]
1350pub enum BlockResult<'source> {
1351 Expression(Option<Box<Expression<'source>>>),
1352 Function(Function<'source>),
1353}
1354
1355impl BlockResult<'_> {
1356 #[must_use]
1357 pub fn is_empty(&self) -> bool {
1358 match self {
1359 BlockResult::Expression(expression) => expression.is_none(),
1360 BlockResult::Function(_) => false,
1361 }
1362 }
1363}
1364
1365impl Default for BlockResult<'_> {
1366 fn default() -> Self {
1367 Self::Expression(None)
1368 }
1369}
1370
1371impl<'source> From<Box<Expression<'source>>> for BlockResult<'source> {
1372 fn from(expression: Box<Expression<'source>>) -> Self {
1373 Self::Expression(Some(expression))
1374 }
1375}
1376
1377impl<'source> From<Function<'source>> for BlockResult<'source> {
1378 fn from(function: Function<'source>) -> Self {
1379 Self::Function(function)
1380 }
1381}
1382
1383#[derive(Debug, Eq, PartialEq)]
1384#[make_dst_factory(pub)]
1385pub struct Block<'source> {
1386 pub result: BlockResult<'source>,
1387 pub diagnostics: Diagnostics<'source>,
1388 pub statements: [Statement<'source>],
1389}
1390
1391impl Default for Box<Block<'_>> {
1392 fn default() -> Self {
1393 Block::build(BlockResult::Expression(None), Diagnostics::default(), [])
1394 }
1395}
1396
1397impl<'source> Block<'source> {
1398 pub fn new(lexer: &mut Peekable<Lexer<'source>>) -> Box<Self> {
1399 Self::parse(lexer, true)
1400 }
1401
1402 fn child(lexer: &mut Peekable<Lexer<'source>>) -> Box<Self> {
1403 Self::parse(lexer, false)
1404 }
1405
1406 fn parse(lexer: &mut Peekable<Lexer<'source>>, root: bool) -> Box<Self> {
1407 let mut diagnostics = Diagnostics::default();
1408 let mut statements = Vec::new();
1409 let result = loop {
1410 let statement = match diagnostics.wrap(lexer.peek().copied()) {
1411 Some(
1412 let_token @ Token {
1413 lexigram: Lexigram::Let,
1414 ..
1415 },
1416 ) => {
1417 lexer.next();
1418 if let Some(Ok(Token {
1419 lexigram: Lexigram::Caret,
1420 ..
1421 })) = lexer.peek()
1422 {
1423 Statement::Rebind(Rebind::new(let_token, lexer))
1424 } else {
1425 Statement::Let(Let::new(let_token, lexer))
1426 }
1427 }
1428 Some(Token {
1429 lexigram: Lexigram::Set,
1430 ..
1431 }) => Statement::Set(Set::new(lexer)),
1432 Some(Token {
1433 lexigram: Lexigram::Use,
1434 ..
1435 }) => Statement::Use(Use::new(lexer)),
1436 Some(
1437 with_token @ Token {
1438 lexigram: Lexigram::With,
1439 ..
1440 },
1441 ) => {
1442 lexer.next();
1443 let mut st_diagnostics = Diagnostics::default();
1444 let argument = Binding::new(lexer)
1445 .map_err(|e| st_diagnostics.errors.push(e))
1446 .ok();
1447 let (colon_token, input) = if let Some(
1448 t @ Token {
1449 lexigram: Lexigram::Colon,
1450 ..
1451 },
1452 ) = st_diagnostics.wrap(lexer.peek().copied())
1453 {
1454 lexer.next();
1455 (Some(t), diagnostics.expect_expression(lexer))
1456 } else {
1457 (None, None)
1458 };
1459 let (single_arrow_token, output) = if let Some(
1460 t @ Token {
1461 lexigram: Lexigram::SingleArrow,
1462 ..
1463 },
1464 ) =
1465 st_diagnostics.wrap(lexer.peek().copied())
1466 {
1467 lexer.next();
1468 (Some(t), diagnostics.expect_expression(lexer))
1469 } else {
1470 (None, None)
1471 };
1472 let (semicolon_token, body) = if let Some(
1473 t @ Token {
1474 lexigram: Lexigram::Semicolon,
1475 ..
1476 },
1477 ) = st_diagnostics.wrap(lexer.peek().copied())
1478 {
1479 lexer.next();
1480 (
1481 Some(t),
1482 FunctionBody::Block(Block::parse(&mut *lexer, root)),
1483 )
1484 } else {
1485 (None, FunctionBody::Never)
1486 };
1487
1488 break Function {
1489 with_token,
1490 argument,
1491 colon_token,
1492 input,
1493 single_arrow_token,
1494 output,
1495 semicolon_token,
1496 body,
1497 diagnostics: st_diagnostics,
1498 }
1499 .into();
1500 }
1501 _ => match Sequence::try_sequence(&mut *lexer) {
1502 Ok(sequence) => Statement::Sequence(sequence),
1503 Err(expression) => {
1504 break BlockResult::Expression(expression);
1505 }
1506 },
1507 };
1508 statements.push(statement);
1509 };
1510 if root && let Some(t) = lexer.peek().copied().transpose().ok().flatten() {
1511 diagnostics
1512 .errors
1513 .push(Error::ExpectedStatementOrExpression(t));
1514 }
1515 Self::build(result, diagnostics, statements)
1516 }
1517}