1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
use std::str::Utf8Error;

use nom_supreme::error::ErrorTree;
use thiserror::Error;

use kodept_core::code_point::CodePoint;
use kodept_core::structure::span::Span;

use crate::lexer::{token, Token};
use crate::token_match::TokenMatch;

#[derive(Error, Debug)]
pub enum TokenizeError {
    #[error("Error while tokenizing: {0}")]
    Parse(#[from] ErrorTree<String>),
    #[error("Error while reading: {0}")]
    IO(#[from] std::io::Error),
    #[error("Found not utf-8 byte at {pos}", pos = _0.valid_up_to())]
    Utf8(#[from] Utf8Error),
}

pub struct Tokenizer<'t> {
    buffer: &'t str,
    pos: usize,
    row: u32,
    col: u16,
}

impl<'t> Tokenizer<'t> {
    #[must_use]
    pub const fn new(reader: &'t str) -> Self {
        Self {
            buffer: reader,
            pos: 0,
            row: 1,
            col: 1,
        }
    }

    pub fn into_vec(self) -> Vec<TokenMatch<'t>> {
        let mut vec = self.collect::<Vec<_>>();
        vec.shrink_to_fit();
        vec
    }
}

impl<'t> Iterator for Tokenizer<'t> {
    type Item = TokenMatch<'t>;

    fn next(&mut self) -> Option<Self::Item> {
        if self.buffer[self.pos..].is_empty() {
            return None;
        }

        let (rest, token) = match token(&self.buffer[self.pos..]) {
            Ok(x) => x,
            _ => ("", Token::Unknown),
        };

        let matched_length = self.buffer.len() - rest.len() - self.pos;
        let original_text: &str = &self.buffer[self.pos..self.pos + matched_length];
        let span: TokenMatch =
            TokenMatch::new(token, Span::new(CodePoint::new(matched_length, self.pos)));

        for ch in original_text.chars() {
            if ch == '\n' {
                self.row += 1;
                self.col = 1;
            } else {
                self.col += 1;
            }
        }
        self.pos += matched_length;

        Some(span)
    }
}

#[cfg(test)]
#[allow(clippy::unwrap_used)]
mod tests {
    use kodept_core::code_point::CodePoint;

    use crate::lexer::{
        Identifier::*, Ignore::*, Keyword::*, MathOperator::*, Operator::*, Symbol::*,
    };
    use crate::tokenizer::Tokenizer;

    #[test]
    fn test_tokenizer_simple() {
        let input = " fun foo(x: Int, y: Int) => \n  x + y";
        let tokenizer = Tokenizer::new(input);
        let spans: Vec<_> = tokenizer.collect();

        assert_eq!(spans.len(), 26);
        assert_eq!(
            spans.iter().map(|it| it.token.clone()).collect::<Vec<_>>(),
            vec![
                Whitespace.into(),
                Fun.into(),
                Whitespace.into(),
                Identifier("foo").into(),
                LParen.into(),
                Identifier("x").into(),
                Colon.into(),
                Whitespace.into(),
                Type("Int").into(),
                Comma.into(),
                Whitespace.into(),
                Identifier("y").into(),
                Colon.into(),
                Whitespace.into(),
                Type("Int").into(),
                RParen.into(),
                Whitespace.into(),
                Flow.into(),
                Whitespace.into(),
                Newline.into(),
                Whitespace.into(),
                Identifier("x").into(),
                Whitespace.into(),
                Math(Plus).into(),
                Whitespace.into(),
                Identifier("y").into()
            ]
        );
        assert_eq!(spans.get(20).unwrap().span.point, CodePoint::new(2, 29))
    }
}