1
 2
 3
 4
 5
 6
 7
 8
 9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
//! AST and parser for Kailua.
//!
//! Kailua parses a single source file in this order:
//!
//! 1. The file contents are read into `kailua_env::Source` in the caller-dependent manner.
//!    This step is flexible enough that the caller is free to make the contents up.
//!    `Source` emits a single span for the entire file.
//!
//! 2. The **lexer** (`kailua_syntax::lex::Lexer`) receives the span and `Source` and
//!    yields a list of tokens (`kailua_syntax::lex::Tok`) with the associated span.
//!
//!    Tokens produced by the lexer include ordinary Lua tokens,
//!    Kailua-specific meta comments (e.g. `--#` = `DashDashHash`),
//!    newlines at the end of meta comments, normal comments and the end of file.
//!    Therefore they are sufficient for highlighting any Lua or Kailua code.
//!
//!    Later steps rely on Kailua-specific tokens to produce Kailua-specific AST nodes.
//!    Care should be taken when tokens are generated by the other means.
//!
//! 3. The **nesting analyzer** (`kailua_syntax::lex::Nest`) adds additional informations
//!    to the spanned tokens so that the parser can recover from errors.
//!
//!    The "nesting" is roughly a range of tokens that should be skipped on a parsing error.
//!    The nesting can be, well, nested and roughly gives an outline of the source code.
//!
//!    The resulting tokens with nesting informations (`kailua_syntax::lex::NestedToken`) have
//!    no actual information about the nesting. Instead, the compact data to distinguish
//!    different nestings are calculated and recorded.
//!
//! 4. The **parser** (`kailua_syntax::Parser`) converts a series of tokens
//!    with nesting informations into the chunk (`kailua_syntax::ast::Chunk`).
//!
//!    The chunk also contains a list of spanned scopes, names in each scope,
//!    globally assigned names and additional hints for each token.
//!    This allows for basic analyses without even touching the type checker.

#[macro_use] extern crate parse_generics_shim;
#[macro_use] extern crate kailua_diag;
#[macro_use] extern crate log;
extern crate kailua_env;

use kailua_env::{Source, Span};
use kailua_diag::Report;

pub use string::{Str, Name};
pub use lex::{Lexer, Nest, Tok, NestedToken};
pub use ast::Chunk;
pub use parser::Parser;

pub mod lang;
mod message;
pub mod lex;
pub mod string;
pub mod ast;
mod parser;

/// An one-off function to parse a chunk from a given span in the `Source`.
///
/// Most parsing errors can be recovered, so the caller should also determine if
/// it can continue in spite of reported errors.
/// `kailua_diag::report::TrackMaxKind` is useful for this.
pub fn parse_chunk(source: &Source, span: Span, report: &Report) -> kailua_diag::Result<Chunk> {
    if let Some(mut iter) = source.iter_from_span(span) {
        let mut lexer = Lexer::new(&mut iter, &report);
        let mut nest = Nest::new(&mut lexer);
        let parser = Parser::new(&mut nest, &report);
        parser.into_chunk()
    } else {
        use kailua_diag::Reporter;
        report.fatal(span, message::NoFileForSpan {}).done()
    }
}