sas_lexer/
lib.rs

1//! # SAS Lexer
2//! A lexer for the SAS programming language.
3//!
4//! ## Usage
5//! ```rust
6//! use sas_lexer::{lex_program, LexResult, TokenIdx};
7//!
8//! let source = "data mydata; set mydataset; run;";
9//!
10//! let LexResult { buffer, .. } = lex_program(&source).unwrap();
11//!
12//! let tokens: Vec<TokenIdx> = buffer.iter_tokens().collect();
13//!
14//! for token in tokens {
15//!     println!("{:?}", buffer.get_token_raw_text(token, &source));
16//! }
17//! ```
18//!
19//! ## Features
20//!
21//! * `macro_sep`: Enables a special virtual `MacroSep` token that is emitted between open code and macro statements when there is no "natural" separator, or when semicolon is missing between two macro statements (a coding error). This may be used by a downstream parser as a reliable terminating token for dynamic open code and thus avoid doing lookaheads. Dynamic, means that the statement has a macro statements in it, like `data %if cond %then %do; t1 %end; %else %do; t2 %end;;`
22//! * `serde`: Enables serialization and deserialization of the `ResolvedTokenInfo` struct using the `serde` library. For an example of usage, see the Python bindings crate `sas-lexer-py`.
23//! * `opti_stats`: Enables some additional statistics during lexing, used for performance tuning. Not intended for general use.
24//!
25//! ## License
26//!
27//! Licensed under the Affero GPL v3 license.
28
29#![cfg_attr(rustc_nightly, feature(vec_push_within_capacity))]
30
31mod lexer;
32
33pub use lexer::buffer::{
34    Payload, ResolvedTokenInfo, TokenIdx, TokenInfo, TokenInfoIter, TokenizedBuffer,
35};
36pub use lexer::channel::TokenChannel;
37pub use lexer::error;
38pub use lexer::token_type::TokenType;
39pub use lexer::{lex_program, LexResult};