wgsl_parser/
comment.rs

1use gramatika::{DebugLisp, Parse, ParseStreamer, Span, Spanned, SpannedError, Substr, Token as _};
2
3use crate::{token::comment_start, ParseStream, Text, Token, TokenKind};
4
5#[derive(DebugLisp)]
6pub enum Comment {
7	Line(LineComment),
8	Block(BlockComment),
9}
10
11#[derive(DebugLisp)]
12pub struct LineComment {
13	pub start: Token,
14	pub text: Text,
15}
16
17#[derive(DebugLisp)]
18pub struct BlockComment {
19	pub start: Token,
20	pub children: Vec<BlockCommentChild>,
21	pub end: Token,
22}
23
24#[derive(DebugLisp)]
25pub enum BlockCommentChild {
26	Text(Text),
27	Comment(BlockComment),
28}
29
30impl Parse for Comment {
31	type Stream = ParseStream;
32
33	fn parse(input: &mut Self::Stream) -> gramatika::Result<Self> {
34		use TokenKind::*;
35
36		match input.peek() {
37			Some(token) => match token.as_matchable() {
38				(CommentStart, "//", _) => Ok(Comment::Line(input.parse()?)),
39				(CommentStart, "/*", _) => Ok(Comment::Block(input.parse()?)),
40				(_, _, span) => Err(SpannedError {
41					message: "Expected `//` or `/*`".into(),
42					source: input.source(),
43					span: Some(span),
44				}),
45			},
46			None => Err(SpannedError {
47				message: "Unexpected end of input".into(),
48				source: input.source(),
49				span: input.prev().map(|token| token.span()),
50			}),
51		}
52	}
53}
54
55impl Parse for LineComment {
56	type Stream = ParseStream;
57
58	fn parse(input: &mut Self::Stream) -> gramatika::Result<Self> {
59		let start = input.consume(comment_start!["//"])?;
60		let start_span = start.span();
61		let text = match text_until(input, |peeked| {
62			peeked.span().start.line > start_span.start.line
63		}) {
64			Some(text) => text,
65			None => {
66				let content = Substr::new();
67				let span = Span {
68					start: start_span.end,
69					end: start_span.end,
70				};
71				Text(content, span)
72			}
73		};
74
75		Ok(Self { start, text })
76	}
77}
78
79impl Parse for BlockComment {
80	type Stream = ParseStream;
81
82	fn parse(input: &mut Self::Stream) -> gramatika::Result<Self> {
83		let start = input.consume(comment_start!["/*"])?;
84		let start_span = start.span();
85		let mut children: Vec<BlockCommentChild> = vec![];
86		loop {
87			use TokenKind::*;
88			match input.peek().map(|peeked| peeked.as_matchable()) {
89				Some((CommentStart, "/*", _)) => {
90					children.push(BlockCommentChild::Comment(input.parse()?));
91				}
92				Some((CommentEnd, "*/", _)) => {
93					let end = input.next().unwrap();
94					break Ok(Self {
95						start,
96						children,
97						end,
98					});
99				}
100				Some(_) => {
101					let text = match text_until(input, |peeked| {
102						matches!(
103							peeked.as_matchable(),
104							(CommentStart, "/*", _) | (CommentEnd, _, _)
105						)
106					}) {
107						Some(text) => text,
108						None => {
109							let content = Substr::new();
110							let span = Span {
111								start: start_span.end,
112								end: start_span.end,
113							};
114							Text(content, span)
115						}
116					};
117					children.push(BlockCommentChild::Text(text));
118				}
119				None => {
120					return Err(SpannedError {
121						message: "Unexpected end of input".into(),
122						source: input.source(),
123						span: input.prev().map(|token| token.span()),
124					});
125				}
126			}
127		}
128	}
129}
130
131impl Spanned for Comment {
132	fn span(&self) -> Span {
133		match self {
134			Comment::Line(inner) => inner.span(),
135			Comment::Block(inner) => inner.span(),
136		}
137	}
138}
139
140impl Spanned for BlockComment {
141	fn span(&self) -> Span {
142		self.start.span().through(self.end.span())
143	}
144}
145
146impl Spanned for LineComment {
147	fn span(&self) -> Span {
148		self.start.span().through(self.text.span())
149	}
150}
151
152fn text_until<F>(input: &mut ParseStream, end_condition: F) -> Option<Text>
153where F: Fn(&Token) -> bool {
154	let mut result: Option<Text> = None;
155	loop {
156		match input.peek() {
157			Some(token) if !end_condition(token) => {
158				let kind = token.kind();
159				let token = input.consume_as(kind, Token::plain).unwrap();
160				result = match result.take() {
161					Some(text) => Some(text.concat(token)),
162					None => Some(Text::from(token)),
163				}
164			}
165			_ => {
166				break result;
167			}
168		}
169	}
170}