Logos

Trait Logos 

Source
pub trait Logos<'source>: Sized {
    type Extras;
    type Source: Source + ?Sized + 'source;
    type Error: Default + Clone + PartialEq + Debug + 'source;

    // Required method
    fn lex(
        lexer: &mut Lexer<'source, Self>,
    ) -> Option<Result<Self, Self::Error>>;

    // Provided methods
    fn lexer(source: &'source Self::Source) -> Lexer<'source, Self> 
       where Self::Extras: Default { ... }
    fn lexer_with_extras(
        source: &'source Self::Source,
        extras: Self::Extras,
    ) -> Lexer<'source, Self>  { ... }
}
Expand description

Trait implemented for an enum representing all tokens. You should never have to implement it manually, use the #[derive(Logos)] attribute on your enum.

Required Associated Types§

Source

type Extras

Associated type Extras for the particular lexer. This can be set using #[logos(extras = MyExtras)] and accessed inside callbacks.

Source

type Source: Source + ?Sized + 'source

Source type this token can be lexed from. This will default to str, unless one of the defined patterns explicitly uses non-unicode byte values or byte slices, in which case that implementation will use [u8].

Source

type Error: Default + Clone + PartialEq + Debug + 'source

Error type returned by the lexer. This can be set using #[logos(error = MyError)]. Defaults to () if not set.

Required Methods§

Source

fn lex(lexer: &mut Lexer<'source, Self>) -> Option<Result<Self, Self::Error>>

The heart of Logos. Called by the Lexer. The implementation for this function is generated by the logos-derive crate.

Provided Methods§

Source

fn lexer(source: &'source Self::Source) -> Lexer<'source, Self>
where Self::Extras: Default,

Create a new instance of a Lexer that will produce tokens implementing this Logos.

Examples found in repository?
examples/extras.rs (line 63)
59fn main() {
60    let src = fs::read_to_string(env::args().nth(1).expect("Expected file argument"))
61        .expect("Failed to read file");
62
63    let mut lex = Token::lexer(src.as_str());
64
65    while let Some(token) = lex.next() {
66        if let Ok(Token::Word((line, column))) = token {
67            println!("Word '{}' found at ({}, {})", lex.slice(), line, column);
68        }
69    }
70}
More examples
Hide additional examples
examples/string-interpolation.rs (lines 123-131)
122fn main() {
123    let mut lex = VariableDefinitionContext::lexer(
124        "\
125        name = 'Mark'\n\
126        greeting = 'Hi ${name}!'\n\
127        surname = 'Scott'\n\
128        greeting2 = 'Hi ${name ' ' surname}!'\n\
129        greeting3 = 'Hi ${name ' ${surname}!'}!'\n\
130        ",
131    );
132    test_variable_definition("name", "Mark", lex.next());
133    test_variable_definition("greeting", "Hi Mark!", lex.next());
134    test_variable_definition("surname", "Scott", lex.next());
135    test_variable_definition("greeting2", "Hi Mark Scott!", lex.next());
136    test_variable_definition("greeting3", "Hi Mark Scott!!", lex.next());
137}
examples/custom_error.rs (line 52)
49fn main() {
50    // 256 overflows u8, since u8's max value is 255.
51    // 'é' is not a valid ascii letter.
52    let mut lex = Token::lexer("Hello 256 Jérome");
53
54    assert_eq!(lex.next(), Some(Ok(Token::Word)));
55    assert_eq!(lex.slice(), "Hello");
56
57    assert_eq!(
58        lex.next(),
59        Some(Err(LexingError::InvalidInteger(
60            "overflow error".to_owned()
61        )))
62    );
63    assert_eq!(lex.slice(), "256");
64
65    assert_eq!(lex.next(), Some(Ok(Token::Word)));
66    assert_eq!(lex.slice(), "J");
67
68    assert_eq!(lex.next(), Some(Err(LexingError::NonAsciiCharacter('é'))));
69    assert_eq!(lex.slice(), "é");
70
71    assert_eq!(lex.next(), Some(Ok(Token::Word)));
72    assert_eq!(lex.slice(), "rome");
73
74    assert_eq!(lex.next(), None);
75}
examples/json.rs (line 209)
205fn main() {
206    let filename = env::args().nth(1).expect("Expected file argument");
207    let src = fs::read_to_string(&filename).expect("Failed to read file");
208
209    let mut lexer = Token::lexer(src.as_str());
210
211    match parse_value(&mut lexer) {
212        Ok(value) => println!("{:#?}", value),
213        Err((msg, span)) => {
214            use ariadne::{ColorGenerator, Label, Report, ReportKind, Source};
215
216            let mut colors = ColorGenerator::new();
217
218            let a = colors.next();
219
220            Report::build(ReportKind::Error, &filename, 12)
221                .with_message("Invalid JSON".to_string())
222                .with_label(
223                    Label::new((&filename, span))
224                        .with_message(msg)
225                        .with_color(a),
226                )
227                .finish()
228                .eprint((&filename, Source::from(src)))
229                .unwrap();
230        }
231    }
232}
examples/json_borrowed.rs (line 205)
201fn main() {
202    let filename = env::args().nth(1).expect("Expected file argument");
203    let src = fs::read_to_string(&filename).expect("Failed to read file");
204
205    let mut lexer = Token::lexer(src.as_str());
206
207    match parse_value(&mut lexer) {
208        Ok(value) => println!("{:#?}", value),
209        Err((msg, span)) => {
210            use ariadne::{ColorGenerator, Label, Report, ReportKind, Source};
211
212            let mut colors = ColorGenerator::new();
213
214            let a = colors.next();
215
216            Report::build(ReportKind::Error, &filename, 12)
217                .with_message("Invalid JSON".to_string())
218                .with_label(
219                    Label::new((&filename, span))
220                        .with_message(msg)
221                        .with_color(a),
222                )
223                .finish()
224                .eprint((&filename, Source::from(src)))
225                .unwrap();
226        }
227    }
228}
examples/calculator.rs (line 140)
133fn main() {
134    //reads the input expression from the command line
135    let input = env::args()
136        .nth(1)
137        .expect("Expected expression argument (e.g. `1 + 7 * (3 - 4) / 5`)");
138
139    //creates a lexer instance from the input
140    let lexer = Token::lexer(&input);
141
142    //splits the input into tokens, using the lexer
143    let mut tokens = vec![];
144    for (token, span) in lexer.spanned() {
145        match token {
146            Ok(token) => tokens.push(token),
147            Err(e) => {
148                println!("lexer error at {:?}: {}", span, e);
149                return;
150            }
151        }
152    }
153
154    //parses the tokens to construct an AST
155    let ast = match parser().parse(&tokens).into_result() {
156        Ok(expr) => {
157            println!("[AST]\n{:#?}", expr);
158            expr
159        }
160        Err(e) => {
161            println!("parse error: {:#?}", e);
162            return;
163        }
164    };
165
166    //evaluates the AST to get the result
167    println!("\n[result]\n{}", ast.eval());
168}
Source

fn lexer_with_extras( source: &'source Self::Source, extras: Self::Extras, ) -> Lexer<'source, Self>

Create a new instance of a Lexer with the provided Extras that will produce tokens implementing this Logos.

Dyn Compatibility§

This trait is not dyn compatible.

In older versions of Rust, dyn compatibility was called "object safety", so this trait is not object safe.

Implementors§