Function sqparse::tokenize

source ·
pub fn tokenize(
    val: &str,
    flavor: Flavor
) -> Result<Vec<TokenItem<'_>>, LexerError<'_>>
Expand description

Parses an input string into a list of tokens.

Example

use sqparse::{Flavor, tokenize};

let source = r#"
global function MyFunction

struct {
    int a
} file

string function MyFunction( List<number> values ) {
    values.push(1 + 2)
}
"#;

let tokens = tokenize(source, Flavor::SquirrelRespawn).unwrap();
assert_eq!(tokens.len(), 29);
Examples found in repository?
examples/print_lexer_error.rs (line 5)
3
4
5
6
7
8
fn main() {
    let source = include_str!("print_lexer_error_script.nut");
    let tokens_err = tokenize(source, Flavor::SquirrelRespawn).unwrap_err();

    println!("{}", tokens_err.display(source));
}
More examples
Hide additional examples
examples/print_ast.rs (line 5)
3
4
5
6
7
8
9
fn main() {
    let source = include_str!("print_ast_script.nut");
    let tokens = tokenize(source, Flavor::SquirrelRespawn).unwrap();
    let ast = parse(&tokens).unwrap();

    println!("{ast:#?}");
}
examples/print_parser_error.rs (line 5)
3
4
5
6
7
8
9
fn main() {
    let source = include_str!("print_parser_error_script.nut");
    let tokens = tokenize(source, Flavor::SquirrelRespawn).unwrap();
    let parse_err = parse(&tokens).unwrap_err();

    println!("{}", parse_err.display(source, &tokens));
}
examples/dryrun.rs (line 41)
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
fn main() {
    let mut args = std::env::args();
    let exe = args.next().unwrap();

    let base_path = match args.next() {
        Some(arg) => PathBuf::from(arg),
        None => {
            eprintln!("Usage: {exe} [path]");
            eprintln!();
            eprintln!("Provide a path to a file to parse that file, or a path to a directory to");
            eprintln!("recursively parse all .nut and .gnut files in the directory");
            std::process::exit(1);
        }
    };

    let mut total_size_bytes = 0;
    let mut total_lex_secs = 0.;
    let mut total_parse_secs = 0.;

    visit(&base_path, &mut |path| {
        let extension = path.extension().and_then(|val| val.to_str());
        if !matches!(extension, Some("nut") | Some("gnut")) {
            return;
        }

        println!("{}", path.display());

        let file_text = match std::fs::read_to_string(path) {
            Ok(text) => text,
            Err(err) => {
                println!("  could not read: {err}");
                return;
            }
        };

        let lex_start = Instant::now();
        let tokens = match tokenize(&file_text, Flavor::SquirrelRespawn) {
            Ok(tokens) => tokens,
            Err(err) => {
                eprintln!("{}", err.display(&file_text));
                std::process::exit(1);
            }
        };
        let lex_secs = lex_start.elapsed().as_secs_f64();
        println!("  tokenize: {lex_secs}s");

        let parse_start = Instant::now();
        if let Err(err) = parse(&tokens) {
            eprintln!("{}", err.display(&file_text, &tokens));
            std::process::exit(1);
        }
        let parse_secs = parse_start.elapsed().as_secs_f64();
        println!("  parse: {parse_secs}s");

        total_size_bytes += file_text.bytes().len();
        total_lex_secs += lex_secs;
        total_parse_secs += parse_secs;
    });

    let total_mb = total_size_bytes as f64 / 1048576.;
    println!("Finished!");
    println!(
        "Tokenize: {:.4}s, {:.2} MB/s",
        total_lex_secs,
        total_mb / total_lex_secs
    );
    println!(
        "Parse: {:.4}s, {:.2} MB/s",
        total_parse_secs,
        total_mb / total_parse_secs
    );
}