1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
#![allow(clippy::should_implement_trait)]
#![allow(clippy::while_let_loop)]
#![allow(clippy::while_let_on_iterator)]

mod component;
mod expressions;
mod lexer;
mod statements;
mod types;

use std::sync::Arc;

use crate::lexer::{Token, TokenData};
use ast::{component::Component, Span};
use claw_ast as ast;
use claw_common::Source;

use miette::{Diagnostic, NamedSource, SourceSpan};
use thiserror::Error;

use component::parse_component;

pub use lexer::tokenize;

#[derive(Error, Debug, Diagnostic)]
pub enum ParserError {
    #[error("Failed to parse")]
    Base {
        #[source_code]
        src: Source,
        #[label("Unable to parse this code")]
        span: SourceSpan,
    },
    #[error("{description}")]
    UnexpectedToken {
        #[source_code]
        src: Source,
        #[label("Found {token:?}")]
        span: SourceSpan,
        description: String,
        token: Token,
    },
    #[error("End of input reached")]
    EndOfInput,
    #[error("Feature {feature} not supported yet at {token:?}")]
    NotYetSupported { feature: String, token: Token },
}

pub fn parse(src: Source, tokens: Vec<TokenData>) -> Result<Component, ParserError> {
    let mut input = ParseInput::new(src.clone(), tokens);
    parse_component(src, &mut input)
}

#[derive(Debug, Clone)]
pub struct ParseInput {
    src: Source,
    tokens: Vec<TokenData>,
    index: usize,
}

impl ParseInput {
    pub fn new(src: Source, tokens: Vec<TokenData>) -> Self {
        ParseInput {
            src,
            tokens,
            index: 0,
        }
    }

    pub fn unsupported_error(&self, feature: &str) -> ParserError {
        ParserError::NotYetSupported {
            feature: feature.to_string(),
            token: self.tokens[self.index].token.clone(),
        }
    }

    pub fn unexpected_token(&self, description: &str) -> ParserError {
        let data = &self.tokens[self.index - 1];
        ParserError::UnexpectedToken {
            src: self.src.clone(),
            span: data.span,
            description: description.to_string(),
            token: data.token.clone(),
        }
    }

    pub fn get_source(&self) -> Source {
        self.src.clone()
    }

    pub fn has(&self, num: usize) -> bool {
        self.index + num <= self.tokens.len()
    }

    pub fn done(&self) -> bool {
        self.index >= self.tokens.len()
    }

    pub fn peek(&self) -> Result<&TokenData, ParserError> {
        self.tokens.get(self.index).ok_or(ParserError::EndOfInput)
    }

    pub fn peekn(&self, n: usize) -> Option<&Token> {
        self.tokens.get(self.index + n).map(|t| &t.token)
    }

    pub fn next(&mut self) -> Result<&TokenData, ParserError> {
        let result = self.tokens.get(self.index);
        self.index += 1;
        result.ok_or(ParserError::EndOfInput)
    }

    pub fn assert_next(&mut self, token: Token, description: &str) -> Result<Span, ParserError> {
        let next = self.next()?;
        if next.token == token {
            Ok(next.span)
        } else {
            Err(self.unexpected_token(description))
        }
    }

    pub fn next_if(&mut self, token: Token) -> Option<Span> {
        {
            let next = self.peek().ok()?;
            if next.token != token {
                return None;
            }
        }
        Some(self.next().ok()?.span)
    }

    pub fn slice_next(&mut self, num: usize) -> Result<&[TokenData], ParserError> {
        if self.has(num) {
            let result = &self.tokens[self.index..self.index + num];
            self.index += num;
            Ok(result)
        } else {
            Err(ParserError::EndOfInput)
        }
    }
}

pub fn make_input(source: &str) -> (Source, ParseInput) {
    let src = Arc::new(NamedSource::new("test", source.to_string()));
    let tokens = crate::lexer::tokenize(src.clone(), source).unwrap();
    (src.clone(), ParseInput::new(src, tokens))
}

pub fn make_span(start: usize, len: usize) -> Span {
    Span::new(start.into(), len)
}

#[cfg(test)]
mod tests {
    use super::*;

    #[test]
    fn test_peek() {
        let (_src, mut input) = make_input("export func");
        assert_eq!(input.peek().unwrap().token, Token::Export);
        assert_eq!(input.peek().unwrap().token, Token::Export);
        assert_eq!(input.peek().unwrap().token, Token::Export);
        input.next().unwrap();
        assert_eq!(input.peek().unwrap().token, Token::Func);
        assert_eq!(input.peek().unwrap().token, Token::Func);
        assert_eq!(input.peek().unwrap().token, Token::Func);
    }

    #[test]
    fn test_peekn() {
        let (_src, mut input) = make_input("export func () -> {}");
        assert_eq!(input.peekn(0).unwrap(), &Token::Export);
        assert_eq!(input.peekn(1).unwrap(), &Token::Func);
        assert_eq!(input.peekn(2).unwrap(), &Token::LParen);
        input.next().unwrap();
        assert_eq!(input.peekn(0).unwrap(), &Token::Func);
        assert_eq!(input.peekn(1).unwrap(), &Token::LParen);
        assert_eq!(input.peekn(2).unwrap(), &Token::RParen);
    }
}