use crate::data::ContentReader;
use crate::syntax::{Symbols, Tokenizer};
use crate::{Position, Source, Token};
pub(crate) fn tokenize_input(symbols: &Symbols, source: &Source, cursor: Position) -> Vec<Token> {
let contents = source.contents();
let mut tokenizer = Tokenizer::new(symbols, source, ContentReader::new(&contents));
let mut tokens = Vec::new();
loop {
match tokenizer.pop() {
Ok(Some(token)) => {
if token.pos.start() >= cursor {
break;
}
tokens.push(token);
}
Ok(None) => break,
Err(_) => return vec![],
}
}
tokens
}
#[cfg(test)]
mod tests {
use super::*;
use crate::syntax::test::Code;
use crate::syntax::Kind::*;
use assert_matches::assert_matches;
macro_rules! kind {
($kind: pat) => {
crate::syntax::Token { kind: $kind, .. }
};
}
#[test]
fn tokenizing_an_empty_input() {
let input = Code::new("");
let tokens = tokenize_input(&input.symbols, input.source(), Position::new(0, 0));
assert_eq!(tokens.len(), 0);
}
#[test]
fn tokenizing_stops_at_the_cursors_position() {
let input = Code::new("use ieee.std_logic_1164.all");
let mut cursor = input.s1("std_logic_11").pos().end();
let tokens = tokenize_input(&input.symbols, input.source(), cursor);
assert_matches!(
tokens[..],
[kind!(Use), kind!(Identifier), kind!(Dot), kind!(Identifier)]
);
cursor = input.s1("std_logic_1164").pos().end();
let tokens = tokenize_input(&input.symbols, input.source(), cursor);
assert_matches!(
tokens[..],
[kind!(Use), kind!(Identifier), kind!(Dot), kind!(Identifier)]
);
cursor = input.s1("std_logic_1164.").pos().end();
let tokens = tokenize_input(&input.symbols, input.source(), cursor);
assert_matches!(
tokens[..],
[
kind!(Use),
kind!(Identifier),
kind!(Dot),
kind!(Identifier),
kind!(Dot)
]
);
cursor = input.s1("std_logic_1164.all").pos().end();
let tokens = tokenize_input(&input.symbols, input.source(), cursor);
assert_matches!(
tokens[..],
[
kind!(Use),
kind!(Identifier),
kind!(Dot),
kind!(Identifier),
kind!(Dot),
kind!(All)
]
);
}
}