claw_parser/
lib.rs

1#![allow(clippy::should_implement_trait)]
2#![allow(clippy::while_let_loop)]
3#![allow(clippy::while_let_on_iterator)]
4
5mod component;
6mod expressions;
7mod lexer;
8mod names;
9mod statements;
10mod types;
11
12use std::sync::Arc;
13
14use crate::lexer::{Token, TokenData};
15use ast::{component::Component, Span};
16use claw_ast as ast;
17use claw_common::Source;
18
19use miette::{Diagnostic, NamedSource, SourceSpan};
20use thiserror::Error;
21
22use component::parse_component;
23
24pub use lexer::tokenize;
25
26#[derive(Error, Debug, Diagnostic)]
27pub enum ParserError {
28    #[error("Failed to parse")]
29    Base {
30        #[source_code]
31        src: Source,
32        #[label("Unable to parse this code")]
33        span: SourceSpan,
34    },
35    #[error("{description}")]
36    UnexpectedToken {
37        #[source_code]
38        src: Source,
39        #[label("Found {token:?}")]
40        span: SourceSpan,
41        description: String,
42        token: Token,
43    },
44    #[error("End of input reached")]
45    EndOfInput,
46    #[error("Feature {feature} not supported yet at {token:?}")]
47    NotYetSupported { feature: String, token: Token },
48}
49
50pub fn parse(src: Source, tokens: Vec<TokenData>) -> Result<Component, ParserError> {
51    let mut input = ParseInput::new(src.clone(), tokens);
52    parse_component(src, &mut input)
53}
54
55#[derive(Debug, Clone)]
56pub struct ParseInput {
57    src: Source,
58    tokens: Vec<TokenData>,
59    index: usize,
60}
61
62impl ParseInput {
63    pub fn new(src: Source, tokens: Vec<TokenData>) -> Self {
64        ParseInput {
65            src,
66            tokens,
67            index: 0,
68        }
69    }
70
71    pub fn unsupported_error(&self, feature: &str) -> ParserError {
72        ParserError::NotYetSupported {
73            feature: feature.to_string(),
74            token: self.tokens[self.index].token.clone(),
75        }
76    }
77
78    pub fn unexpected_token(&self, description: &str) -> ParserError {
79        let data = &self.tokens[self.index - 1];
80        ParserError::UnexpectedToken {
81            src: self.src.clone(),
82            span: data.span,
83            description: description.to_string(),
84            token: data.token.clone(),
85        }
86    }
87
88    pub fn get_source(&self) -> Source {
89        self.src.clone()
90    }
91
92    pub fn has(&self, num: usize) -> bool {
93        self.index + num <= self.tokens.len()
94    }
95
96    pub fn done(&self) -> bool {
97        self.index >= self.tokens.len()
98    }
99
100    pub fn peek(&self) -> Result<&TokenData, ParserError> {
101        self.tokens.get(self.index).ok_or(ParserError::EndOfInput)
102    }
103
104    pub fn peekn(&self, n: usize) -> Option<&Token> {
105        self.tokens.get(self.index + n).map(|t| &t.token)
106    }
107
108    pub fn next(&mut self) -> Result<&TokenData, ParserError> {
109        let result = self.tokens.get(self.index);
110        self.index += 1;
111        result.ok_or(ParserError::EndOfInput)
112    }
113
114    pub fn assert_next(&mut self, token: Token, description: &str) -> Result<Span, ParserError> {
115        let next = self.next()?;
116        if next.token == token {
117            Ok(next.span)
118        } else {
119            Err(self.unexpected_token(description))
120        }
121    }
122
123    pub fn next_if(&mut self, token: Token) -> Option<Span> {
124        {
125            let next = self.peek().ok()?;
126            if next.token != token {
127                return None;
128            }
129        }
130        Some(self.next().ok()?.span)
131    }
132
133    pub fn slice_next(&mut self, num: usize) -> Result<&[TokenData], ParserError> {
134        if self.has(num) {
135            let result = &self.tokens[self.index..self.index + num];
136            self.index += num;
137            Ok(result)
138        } else {
139            Err(ParserError::EndOfInput)
140        }
141    }
142}
143
144pub fn make_input(source: &str) -> (Source, ParseInput) {
145    let src = Arc::new(NamedSource::new("test", source.to_string()));
146    let tokens = crate::lexer::tokenize(src.clone(), source).unwrap();
147    (src.clone(), ParseInput::new(src, tokens))
148}
149
150pub fn make_span(start: usize, len: usize) -> Span {
151    Span::new(start.into(), len)
152}
153
154#[cfg(test)]
155mod tests {
156    use super::*;
157
158    #[test]
159    fn test_peek() {
160        let (_src, mut input) = make_input("export func");
161        assert_eq!(input.peek().unwrap().token, Token::Export);
162        assert_eq!(input.peek().unwrap().token, Token::Export);
163        assert_eq!(input.peek().unwrap().token, Token::Export);
164        input.next().unwrap();
165        assert_eq!(input.peek().unwrap().token, Token::Func);
166        assert_eq!(input.peek().unwrap().token, Token::Func);
167        assert_eq!(input.peek().unwrap().token, Token::Func);
168    }
169
170    #[test]
171    fn test_peekn() {
172        let (_src, mut input) = make_input("export func () -> {}");
173        assert_eq!(input.peekn(0).unwrap(), &Token::Export);
174        assert_eq!(input.peekn(1).unwrap(), &Token::Func);
175        assert_eq!(input.peekn(2).unwrap(), &Token::LParen);
176        input.next().unwrap();
177        assert_eq!(input.peekn(0).unwrap(), &Token::Func);
178        assert_eq!(input.peekn(1).unwrap(), &Token::LParen);
179        assert_eq!(input.peekn(2).unwrap(), &Token::RParen);
180    }
181}