1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
use crate::lexer::Token;
use crate::lexer::rules::{MatchResult, LexerRule, WordChar, TokenError};
use crate::lexer::rules::strmatcher::StrMatcher;


// Similar to MultiCharRule but also ensures that the token starts at a word boundary

#[derive(Clone)]
pub struct KeywordRule {
    result: Token,
    matcher: StrMatcher<'static>,
}

impl KeywordRule {
    pub fn new(result: Token, target: &'static str) -> Self {
        debug_assert!(!target.is_empty());
        
        KeywordRule {
            result,
            matcher: StrMatcher::case_sensitive(target),
        }
    }
}

impl LexerRule for KeywordRule {
    fn reset(&mut self) {
        self.matcher.reset();
    }
    
    fn current_state(&self) -> MatchResult {
        self.matcher.last_match_result()
    }
    
    fn try_match(&mut self, prev: Option<char>, next: char) -> MatchResult {
        if self.matcher.count() == 0 {
            let at_word_boundary = match prev {
                Some(ch) => !ch.is_word_alphanumeric(),
                None => true,
            };
            if !at_word_boundary {
                return MatchResult::NoMatch; // must start first char at word boundary
            }
        }
        
        self.matcher.try_match(next)
    }
    
    fn get_token(&self) -> Result<Token, TokenError> {
        debug_assert!(self.current_state().is_complete_match());
        Ok(self.result.clone())
    }
}