mod cursor;
mod error;
mod scanner;
mod span;
mod token;
pub use error::{LexerError, LexerErrorKind, LexerResult};
pub use scanner::Lexer;
pub use span::{BytePos, Position, SourceFile, SourceId, Span};
pub use token::{
is_integer_only_suffix, is_valid_float_suffix, is_valid_int_suffix, validate_numeric_suffix,
Delimiter, DocComment, DocCommentKind, DocComments, IntBase, InterpolatedPart, Keyword,
LiteralKind, NumericSuffixKind, NumericSuffixValidation, Token, TokenKind, FLOAT_SUFFIXES,
INTEGER_SUFFIXES, NUMERIC_SUFFIXES,
};
pub fn tokenize(source: &str) -> LexerResult<Vec<Token>> {
let file = SourceFile::anonymous(source);
let mut lexer = Lexer::new(&file);
lexer.tokenize()
}
pub fn tokenize_file(filename: &str, source: &str) -> LexerResult<Vec<Token>> {
let file = SourceFile::new(filename, source);
let mut lexer = Lexer::new(&file);
lexer.tokenize()
}
pub fn tokenize_with_docs(source: &str) -> LexerResult<(Vec<Token>, DocComments)> {
let file = SourceFile::anonymous(source);
let mut lexer = Lexer::new(&file);
lexer.tokenize_with_docs()
}
pub fn tokenize_file_with_docs(
filename: &str,
source: &str,
) -> LexerResult<(Vec<Token>, DocComments)> {
let file = SourceFile::new(filename, source);
let mut lexer = Lexer::new(&file);
lexer.tokenize_with_docs()
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_basic_tokenization() {
let tokens = tokenize("let x = 42").unwrap();
assert!(tokens.len() >= 4); }
}