wagon_parser/parser/
rhs.rs

1use crate::parser::Span;
2use std::fmt::Display;
3use std::matches;
4
5use super::SpannableNode;
6use super::{Parse, LexerBridge, ParseResult, Tokens, WagParseError, chunk::Chunk, expression::Expression, Peek, Spannable};
7use super::helpers::between;
8
9use wagon_lexer::{productions::Productions, math::Math};
10
11use wagon_macros::new_unspanned;
12
13#[derive(PartialEq, Debug, Eq, Hash)]
14#[new_unspanned]
15/// A right-hand side (AKA alternative) of a rule.
16///
17/// Any `Rhs` optionally has an expression that evaluates it's weight, enclosed by `[]`.
18/// 
19/// After the weight, it has a list of chunks (which may be empty)
20///
21/// # Grammar
22/// <span><pre>
23/// [Rhs] -> Weight? [Chunk]* `"|"` [Rhs]
24///	    |  Weight? [Chunk]* `";"`
25///	    ;
26/// Weight -> "[" [Expression] "]";
27/// </pre></span>
28pub struct Rhs {
29	/// The weight expression of this alternative.
30	pub weight: Option<SpannableNode<Expression>>,
31	/// The chunks of this alternative.
32	pub chunks: Vec<SpannableNode<Chunk>>,
33}
34
35impl Parse for Rhs {
36	fn parse(lexer: &mut LexerBridge) -> ParseResult<Self> {
37		Ok(Self {weight: Self::parse_weight(lexer)?, chunks: Self::parse_chunks(lexer)?})
38	}
39}
40
41impl Rhs {
42
43	fn parse_weight(lexer: &mut LexerBridge) -> ParseResult<Option<SpannableNode<Expression>>> {
44		match lexer.peek() {
45			Some(Ok(Tokens::ProductionToken(Productions::LBr))) => Ok(Some(between(lexer, &Tokens::ProductionToken(Productions::LBr), &Tokens::MathToken(Math::RBr))?)),
46			_ => Ok(None)
47		}
48	}
49
50	fn parse_chunks(lexer: &mut LexerBridge) -> ParseResult<Vec<SpannableNode<Chunk>>> {
51		let mut resp = Vec::new();
52		if lexer.peek() == Some(&Ok(Tokens::ProductionToken(Productions::Semi))) { // If we immediately encounter a ;, this is an empty rule
53			resp.push(SpannableNode::new(Chunk::empty(), lexer.span()));
54		} else {
55			resp.push(SpannableNode::parse(lexer)?);
56			let mut check = lexer.peek();
57			while check.is_some() && !matches!(check, Some(&Ok(Tokens::ProductionToken(Productions::Alternative)) | &Ok(Tokens::ProductionToken(Productions::Semi)))) {
58				if matches!(check, Some(Err(_))) {
59					return Err(WagParseError::Fatal((lexer.span(), "An unknown error occurred during tokenizing".to_string())))
60				}
61				resp.push(SpannableNode::parse(lexer)?);
62				check = lexer.peek();
63			}
64		}
65		Ok(resp)
66	}
67
68	#[cfg(test)]
69	/// Automatically create an empty rule with no weight.
70	pub(crate) fn empty() -> Self {
71		Self {
72            weight: None,
73            chunks: vec![
74                Chunk::empty().into()
75            ]
76        }
77	}
78
79	/// Automatically create a spanned empty rule with no weight.
80	pub(crate) fn empty_spanned(span: Span) -> SpannableNode<Self> {
81		SpannableNode::new(Self {
82			weight: None,
83			chunks: vec![
84				Chunk::empty_spanned(span.clone())
85			]
86		}, span)
87	}
88
89	// #[cfg(test)]
90	// /// Automatically create a rule which is just an ident. See [`Chunk::simple_ident`].
91	// pub(crate) fn simple_ident(ident: &str) -> Self {
92	// 	Self {
93	// 		weight: None,
94	// 		chunks: vec![
95	// 			Chunk::simple_ident(ident).into()
96	// 		]
97	// 	}
98	// }
99
100	#[cfg(test)]
101	/// Automatically create a rule which is just a terminal. See [`Chunk::simple_terminal`].
102	pub(crate) fn simple_terminal(term: &str) -> Self {
103		Self {
104			weight: None,
105			chunks: vec![Chunk::simple_terminal(term).into()],
106		}
107	}
108}
109
110use itertools::Itertools;
111impl Display for Rhs {
112    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
113        if let Some(weight) = &self.weight {
114        	write!(f, "[{weight}] ")?;
115        }
116        write!(f, "{}", self.chunks.iter().join(" "))
117    }
118}