logos

Struct Lexer

Source
pub struct Lexer<'source, Token: Logos<'source>> {
    pub extras: Token::Extras,
    /* private fields */
}
Expand description

Lexer is the main struct of the crate that allows you to read through a Source and produce tokens for enums implementing the Logos trait.

Fields§

§extras: Token::Extras

Extras associated with the Token.

Implementations§

Source§

impl<'source, Token: Logos<'source>> Lexer<'source, Token>

Source

pub fn new(source: &'source Token::Source) -> Self
where Token::Extras: Default,

Create a new Lexer.

Due to type inference, it might be more ergonomic to construct it by calling Token::lexer on any Token with derived Logos.

Source

pub fn with_extras( source: &'source Token::Source, extras: Token::Extras, ) -> Self

Create a new Lexer with the provided Extras.

Due to type inference, it might be more ergonomic to construct it by calling Token::lexer_with_extras on any Token with derived Logos.

Source

pub fn source(&self) -> &'source Token::Source

Source from which this Lexer is reading tokens.

Source

pub fn spanned(self) -> SpannedIter<'source, Token>

Wrap the Lexer in an Iterator that produces tuples of (Token, Span).

§Example
use logos::Logos;

#[derive(Debug, PartialEq, Clone, Default)]
enum LexingError {
    NumberParseError,
    #[default]
    Other
}

impl From<std::num::ParseIntError> for LexingError {
   fn from(_: std::num::ParseIntError) -> Self {
      LexingError::NumberParseError
  }
}

impl From<std::num::ParseFloatError> for LexingError {
  fn from(_: std::num::ParseFloatError) -> Self {
     LexingError::NumberParseError
  }
}

#[derive(Logos, Debug, PartialEq)]
#[logos(error = LexingError)]
enum Example {
    #[regex(r"[ \n\t\f]+", logos::skip)]
    Ignored,

    #[regex("-?[0-9]+", |lex| lex.slice().parse())]
    Integer(i64),

    #[regex("-?[0-9]+\\.[0-9]+", |lex| lex.slice().parse())]
    Float(f64),
}

let tokens: Vec<_> = Example::lexer("42 3.14 -5 f").spanned().collect();

assert_eq!(
    tokens,
    &[
        (Ok(Example::Integer(42)), 0..2),
        (Ok(Example::Float(3.14)), 3..7),
        (Ok(Example::Integer(-5)), 8..10),
        (Err(LexingError::Other), 11..12), // 'f' is not a recognized token
    ],
);
Examples found in repository?
examples/calculator.rs (line 148)
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
fn main() {
    //reads the input expression from the command line
    let input = env::args()
        .nth(1)
        .expect("Expected expression argument (e.g. `1 + 7 * (3 - 4) / 5`)");

    //creates a lexer instance from the input
    let lexer = Token::lexer(&input);

    //splits the input into tokens, using the lexer
    let mut tokens = vec![];
    for (token, span) in lexer.spanned() {
        match token {
            Ok(token) => tokens.push(token),
            Err(e) => {
                println!("lexer error at {:?}: {}", span, e);
                return;
            }
        }
    }

    //parses the tokens to construct an AST
    let ast = match parser().parse(tokens) {
        Ok(expr) => {
            println!("[AST]\n{:#?}", expr);
            expr
        }
        Err(e) => {
            println!("parse error: {:#?}", e);
            return;
        }
    };

    //evaluates the AST to get the result
    println!("\n[result]\n{}", ast.eval());
}
Source

pub fn span(&self) -> Span

Get the range for the current token in Source.

Examples found in repository?
examples/extras.rs (line 35)
33
34
35
36
37
38
39
40
41
42
43
44
45
fn newline_callback(lex: &mut Lexer<Token>) -> Skip {
    lex.extras.0 += 1;
    lex.extras.1 = lex.span().end;
    Skip
}

/// Compute the line and column position for the current word.
fn word_callback(lex: &mut Lexer<Token>) -> (usize, usize) {
    let line = lex.extras.0;
    let column = lex.span().start - lex.extras.1;

    (line, column)
}
More examples
Hide additional examples
examples/json.rs (line 99)
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
fn parse_value(lexer: &mut Lexer<'_, Token>) -> Result<Value> {
    if let Some(token) = lexer.next() {
        match token {
            Ok(Token::Bool(b)) => Ok(Value::Bool(b)),
            Ok(Token::BraceOpen) => parse_object(lexer),
            Ok(Token::BracketOpen) => parse_array(lexer),
            Ok(Token::Null) => Ok(Value::Null),
            Ok(Token::Number(n)) => Ok(Value::Number(n)),
            Ok(Token::String(s)) => Ok(Value::String(s)),
            _ => Err((
                "unexpected token here (context: value)".to_owned(),
                lexer.span(),
            )),
        }
    } else {
        Err(("empty values are not allowed".to_owned(), lexer.span()))
    }
}
/* ANCHOR_END: value */

/* ANCHOR: array */
/// Parse a token stream into an array and return when
/// a valid terminator is found.
///
/// > NOTE: we assume '[' was consumed.
fn parse_array(lexer: &mut Lexer<'_, Token>) -> Result<Value> {
    let mut array = Vec::new();
    let span = lexer.span();
    let mut awaits_comma = false;
    let mut awaits_value = false;

    while let Some(token) = lexer.next() {
        match token {
            Ok(Token::Bool(b)) if !awaits_comma => {
                array.push(Value::Bool(b));
                awaits_value = false;
            }
            Ok(Token::BraceOpen) if !awaits_comma => {
                let object = parse_object(lexer)?;
                array.push(object);
                awaits_value = false;
            }
            Ok(Token::BracketOpen) if !awaits_comma => {
                let sub_array = parse_array(lexer)?;
                array.push(sub_array);
                awaits_value = false;
            }
            Ok(Token::BracketClose) if !awaits_value => return Ok(Value::Array(array)),
            Ok(Token::Comma) if awaits_comma => awaits_value = true,
            Ok(Token::Null) if !awaits_comma => {
                array.push(Value::Null);
                awaits_value = false
            }
            Ok(Token::Number(n)) if !awaits_comma => {
                array.push(Value::Number(n));
                awaits_value = false;
            }
            Ok(Token::String(s)) if !awaits_comma => {
                array.push(Value::String(s));
                awaits_value = false;
            }
            _ => {
                return Err((
                    "unexpected token here (context: array)".to_owned(),
                    lexer.span(),
                ))
            }
        }
        awaits_comma = !awaits_value;
    }
    Err(("unmatched opening bracket defined here".to_owned(), span))
}
/* ANCHOR_END: array */

/* ANCHOR: object */
/// Parse a token stream into an object and return when
/// a valid terminator is found.
///
/// > NOTE: we assume '{' was consumed.
fn parse_object(lexer: &mut Lexer<'_, Token>) -> Result<Value> {
    let mut map = HashMap::new();
    let span = lexer.span();
    let mut awaits_comma = false;
    let mut awaits_key = false;

    while let Some(token) = lexer.next() {
        match token {
            Ok(Token::BraceClose) if !awaits_key => return Ok(Value::Object(map)),
            Ok(Token::Comma) if awaits_comma => awaits_key = true,
            Ok(Token::String(key)) if !awaits_comma => {
                match lexer.next() {
                    Some(Ok(Token::Colon)) => (),
                    _ => {
                        return Err((
                            "unexpected token here, expecting ':'".to_owned(),
                            lexer.span(),
                        ))
                    }
                }
                let value = parse_value(lexer)?;
                map.insert(key, value);
                awaits_key = false;
            }
            _ => {
                return Err((
                    "unexpected token here (context: object)".to_owned(),
                    lexer.span(),
                ))
            }
        }
        awaits_comma = !awaits_key;
    }
    Err(("unmatched opening brace defined here".to_owned(), span))
}
examples/json_borrowed.rs (line 95)
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
fn parse_value<'source>(lexer: &mut Lexer<'source, Token<'source>>) -> Result<Value<'source>> {
    if let Some(token) = lexer.next() {
        match token {
            Ok(Token::Bool(b)) => Ok(Value::Bool(b)),
            Ok(Token::BraceOpen) => parse_object(lexer),
            Ok(Token::BracketOpen) => parse_array(lexer),
            Ok(Token::Null) => Ok(Value::Null),
            Ok(Token::Number(n)) => Ok(Value::Number(n)),
            Ok(Token::String(s)) => Ok(Value::String(s)),
            _ => Err((
                "unexpected token here (context: value)".to_owned(),
                lexer.span(),
            )),
        }
    } else {
        Err(("empty values are not allowed".to_owned(), lexer.span()))
    }
}
/* ANCHOR_END: value */

/* ANCHOR: array */
/// Parse a token stream into an array and return when
/// a valid terminator is found.
///
/// > NOTE: we assume '[' was consumed.
fn parse_array<'source>(lexer: &mut Lexer<'source, Token<'source>>) -> Result<Value<'source>> {
    let mut array = Vec::new();
    let span = lexer.span();
    let mut awaits_comma = false;
    let mut awaits_value = false;

    while let Some(token) = lexer.next() {
        match token {
            Ok(Token::Bool(b)) if !awaits_comma => {
                array.push(Value::Bool(b));
                awaits_value = false;
            }
            Ok(Token::BraceOpen) if !awaits_comma => {
                let object = parse_object(lexer)?;
                array.push(object);
                awaits_value = false;
            }
            Ok(Token::BracketOpen) if !awaits_comma => {
                let sub_array = parse_array(lexer)?;
                array.push(sub_array);
                awaits_value = false;
            }
            Ok(Token::BracketClose) if !awaits_value => return Ok(Value::Array(array)),
            Ok(Token::Comma) if awaits_comma => awaits_value = true,
            Ok(Token::Null) if !awaits_comma => {
                array.push(Value::Null);
                awaits_value = false
            }
            Ok(Token::Number(n)) if !awaits_comma => {
                array.push(Value::Number(n));
                awaits_value = false;
            }
            Ok(Token::String(s)) if !awaits_comma => {
                array.push(Value::String(s));
                awaits_value = false;
            }
            _ => {
                return Err((
                    "unexpected token here (context: array)".to_owned(),
                    lexer.span(),
                ))
            }
        }
        awaits_comma = !awaits_value;
    }
    Err(("unmatched opening bracket defined here".to_owned(), span))
}
/* ANCHOR_END: array */

/* ANCHOR: object */
/// Parse a token stream into an object and return when
/// a valid terminator is found.
///
/// > NOTE: we assume '{' was consumed.
fn parse_object<'source>(lexer: &mut Lexer<'source, Token<'source>>) -> Result<Value<'source>> {
    let mut map = HashMap::new();
    let span = lexer.span();
    let mut awaits_comma = false;
    let mut awaits_key = false;

    while let Some(token) = lexer.next() {
        match token {
            Ok(Token::BraceClose) if !awaits_key => return Ok(Value::Object(map)),
            Ok(Token::Comma) if awaits_comma => awaits_key = true,
            Ok(Token::String(key)) if !awaits_comma => {
                match lexer.next() {
                    Some(Ok(Token::Colon)) => (),
                    _ => {
                        return Err((
                            "unexpected token here, expecting ':'".to_owned(),
                            lexer.span(),
                        ))
                    }
                }
                let value = parse_value(lexer)?;
                map.insert(key, value);
                awaits_key = false;
            }
            _ => {
                return Err((
                    "unexpected token here (context: object)".to_owned(),
                    lexer.span(),
                ))
            }
        }
        awaits_comma = !awaits_key;
    }
    Err(("unmatched opening brace defined here".to_owned(), span))
}
Source

pub fn slice(&self) -> <Token::Source as Source>::Slice<'source>

Get a string slice of the current token.

Examples found in repository?
examples/extras.rs (line 69)
61
62
63
64
65
66
67
68
69
70
71
72
fn main() {
    let src = fs::read_to_string(env::args().nth(1).expect("Expected file argument"))
        .expect("Failed to read file");

    let mut lex = Token::lexer(src.as_str());

    while let Some(token) = lex.next() {
        if let Ok(Token::Word((line, column))) = token {
            println!("Word '{}' found at ({}, {})", lex.slice(), line, column);
        }
    }
}
More examples
Hide additional examples
examples/custom_error.rs (line 48)
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
fn main() {
    // 256 overflows u8, since u8's max value is 255.
    // 'é' is not a valid ascii letter.
    let mut lex = Token::lexer("Hello 256 Jérome");

    assert_eq!(lex.next(), Some(Ok(Token::Word)));
    assert_eq!(lex.slice(), "Hello");

    assert_eq!(
        lex.next(),
        Some(Err(LexingError::InvalidInteger(
            "overflow error".to_owned()
        )))
    );
    assert_eq!(lex.slice(), "256");

    assert_eq!(lex.next(), Some(Ok(Token::Word)));
    assert_eq!(lex.slice(), "J");

    assert_eq!(lex.next(), Some(Err(LexingError::NonAsciiCharacter)));
    assert_eq!(lex.slice(), "é");

    assert_eq!(lex.next(), Some(Ok(Token::Word)));
    assert_eq!(lex.slice(), "rome");

    assert_eq!(lex.next(), None);
}
Source

pub fn remainder(&self) -> <Token::Source as Source>::Slice<'source>

Get a slice of remaining source, starting at the end of current token.

Source

pub fn morph<Token2>(self) -> Lexer<'source, Token2>
where Token2: Logos<'source, Source = Token::Source>, Token::Extras: Into<Token2::Extras>,

Turn this lexer into a lexer for a new token type.

The new lexer continues to point at the same span as the current lexer, and the current token becomes the error token of the new token type.

Source

pub fn bump(&mut self, n: usize)

Bumps the end of currently lexed token by n bytes.

§Panics

Panics if adding n to current offset would place the Lexer beyond the last byte, or in the middle of an UTF-8 code point (does not apply when lexing raw &[u8]).

Trait Implementations§

Source§

impl<'source, Token> Clone for Lexer<'source, Token>
where Token: Logos<'source> + Clone, Token::Extras: Clone,

Source§

fn clone(&self) -> Self

Returns a copy of the value. Read more
1.0.0 · Source§

fn clone_from(&mut self, source: &Self)

Performs copy-assignment from source. Read more
Source§

impl<'source, Token> Debug for Lexer<'source, Token>
where Token: Logos<'source>, Token::Source: Debug, Token::Extras: Debug,

Source§

fn fmt(&self, fmt: &mut Formatter<'_>) -> Result

Formats the value using the given formatter. Read more
Source§

impl<'source, Token> Iterator for Lexer<'source, Token>
where Token: Logos<'source>,

Source§

type Item = Result<Token, <Token as Logos<'source>>::Error>

The type of the elements being iterated over.
Source§

fn next(&mut self) -> Option<Result<Token, Token::Error>>

Advances the iterator and returns the next value. Read more
Source§

fn next_chunk<const N: usize>( &mut self, ) -> Result<[Self::Item; N], IntoIter<Self::Item, N>>
where Self: Sized,

🔬This is a nightly-only experimental API. (iter_next_chunk)
Advances the iterator and returns an array containing the next N values. Read more
1.0.0 · Source§

fn size_hint(&self) -> (usize, Option<usize>)

Returns the bounds on the remaining length of the iterator. Read more
1.0.0 · Source§

fn count(self) -> usize
where Self: Sized,

Consumes the iterator, counting the number of iterations and returning it. Read more
1.0.0 · Source§

fn last(self) -> Option<Self::Item>
where Self: Sized,

Consumes the iterator, returning the last element. Read more
Source§

fn advance_by(&mut self, n: usize) -> Result<(), NonZero<usize>>

🔬This is a nightly-only experimental API. (iter_advance_by)
Advances the iterator by n elements. Read more
1.0.0 · Source§

fn nth(&mut self, n: usize) -> Option<Self::Item>

Returns the nth element of the iterator. Read more
1.28.0 · Source§

fn step_by(self, step: usize) -> StepBy<Self>
where Self: Sized,

Creates an iterator starting at the same point, but stepping by the given amount at each iteration. Read more
1.0.0 · Source§

fn chain<U>(self, other: U) -> Chain<Self, <U as IntoIterator>::IntoIter>
where Self: Sized, U: IntoIterator<Item = Self::Item>,

Takes two iterators and creates a new iterator over both in sequence. Read more
1.0.0 · Source§

fn zip<U>(self, other: U) -> Zip<Self, <U as IntoIterator>::IntoIter>
where Self: Sized, U: IntoIterator,

‘Zips up’ two iterators into a single iterator of pairs. Read more
Source§

fn intersperse(self, separator: Self::Item) -> Intersperse<Self>
where Self: Sized, Self::Item: Clone,

🔬This is a nightly-only experimental API. (iter_intersperse)
Creates a new iterator which places a copy of separator between adjacent items of the original iterator. Read more
Source§

fn intersperse_with<G>(self, separator: G) -> IntersperseWith<Self, G>
where Self: Sized, G: FnMut() -> Self::Item,

🔬This is a nightly-only experimental API. (iter_intersperse)
Creates a new iterator which places an item generated by separator between adjacent items of the original iterator. Read more
1.0.0 · Source§

fn map<B, F>(self, f: F) -> Map<Self, F>
where Self: Sized, F: FnMut(Self::Item) -> B,

Takes a closure and creates an iterator which calls that closure on each element. Read more
1.21.0 · Source§

fn for_each<F>(self, f: F)
where Self: Sized, F: FnMut(Self::Item),

Calls a closure on each element of an iterator. Read more
1.0.0 · Source§

fn filter<P>(self, predicate: P) -> Filter<Self, P>
where Self: Sized, P: FnMut(&Self::Item) -> bool,

Creates an iterator which uses a closure to determine if an element should be yielded. Read more
1.0.0 · Source§

fn filter_map<B, F>(self, f: F) -> FilterMap<Self, F>
where Self: Sized, F: FnMut(Self::Item) -> Option<B>,

Creates an iterator that both filters and maps. Read more
1.0.0 · Source§

fn enumerate(self) -> Enumerate<Self>
where Self: Sized,

Creates an iterator which gives the current iteration count as well as the next value. Read more
1.0.0 · Source§

fn peekable(self) -> Peekable<Self>
where Self: Sized,

Creates an iterator which can use the peek and peek_mut methods to look at the next element of the iterator without consuming it. See their documentation for more information. Read more
1.0.0 · Source§

fn skip_while<P>(self, predicate: P) -> SkipWhile<Self, P>
where Self: Sized, P: FnMut(&Self::Item) -> bool,

Creates an iterator that skips elements based on a predicate. Read more
1.0.0 · Source§

fn take_while<P>(self, predicate: P) -> TakeWhile<Self, P>
where Self: Sized, P: FnMut(&Self::Item) -> bool,

Creates an iterator that yields elements based on a predicate. Read more
1.57.0 · Source§

fn map_while<B, P>(self, predicate: P) -> MapWhile<Self, P>
where Self: Sized, P: FnMut(Self::Item) -> Option<B>,

Creates an iterator that both yields elements based on a predicate and maps. Read more
1.0.0 · Source§

fn skip(self, n: usize) -> Skip<Self>
where Self: Sized,

Creates an iterator that skips the first n elements. Read more
1.0.0 · Source§

fn take(self, n: usize) -> Take<Self>
where Self: Sized,

Creates an iterator that yields the first n elements, or fewer if the underlying iterator ends sooner. Read more
1.0.0 · Source§

fn scan<St, B, F>(self, initial_state: St, f: F) -> Scan<Self, St, F>
where Self: Sized, F: FnMut(&mut St, Self::Item) -> Option<B>,

An iterator adapter which, like fold, holds internal state, but unlike fold, produces a new iterator. Read more
1.0.0 · Source§

fn flat_map<U, F>(self, f: F) -> FlatMap<Self, U, F>
where Self: Sized, U: IntoIterator, F: FnMut(Self::Item) -> U,

Creates an iterator that works like map, but flattens nested structure. Read more
1.29.0 · Source§

fn flatten(self) -> Flatten<Self>
where Self: Sized, Self::Item: IntoIterator,

Creates an iterator that flattens nested structure. Read more
Source§

fn map_windows<F, R, const N: usize>(self, f: F) -> MapWindows<Self, F, N>
where Self: Sized, F: FnMut(&[Self::Item; N]) -> R,

🔬This is a nightly-only experimental API. (iter_map_windows)
Calls the given function f for each contiguous window of size N over self and returns an iterator over the outputs of f. Like slice::windows(), the windows during mapping overlap as well. Read more
1.0.0 · Source§

fn fuse(self) -> Fuse<Self>
where Self: Sized,

Creates an iterator which ends after the first None. Read more
1.0.0 · Source§

fn inspect<F>(self, f: F) -> Inspect<Self, F>
where Self: Sized, F: FnMut(&Self::Item),

Does something with each element of an iterator, passing the value on. Read more
1.0.0 · Source§

fn by_ref(&mut self) -> &mut Self
where Self: Sized,

Borrows an iterator, rather than consuming it. Read more
1.0.0 · Source§

fn collect<B>(self) -> B
where B: FromIterator<Self::Item>, Self: Sized,

Transforms an iterator into a collection. Read more
Source§

fn try_collect<B>( &mut self, ) -> <<Self::Item as Try>::Residual as Residual<B>>::TryType
where Self: Sized, Self::Item: Try, <Self::Item as Try>::Residual: Residual<B>, B: FromIterator<<Self::Item as Try>::Output>,

🔬This is a nightly-only experimental API. (iterator_try_collect)
Fallibly transforms an iterator into a collection, short circuiting if a failure is encountered. Read more
Source§

fn collect_into<E>(self, collection: &mut E) -> &mut E
where E: Extend<Self::Item>, Self: Sized,

🔬This is a nightly-only experimental API. (iter_collect_into)
Collects all the items from an iterator into a collection. Read more
1.0.0 · Source§

fn partition<B, F>(self, f: F) -> (B, B)
where Self: Sized, B: Default + Extend<Self::Item>, F: FnMut(&Self::Item) -> bool,

Consumes an iterator, creating two collections from it. Read more
Source§

fn is_partitioned<P>(self, predicate: P) -> bool
where Self: Sized, P: FnMut(Self::Item) -> bool,

🔬This is a nightly-only experimental API. (iter_is_partitioned)
Checks if the elements of this iterator are partitioned according to the given predicate, such that all those that return true precede all those that return false. Read more
1.27.0 · Source§

fn try_fold<B, F, R>(&mut self, init: B, f: F) -> R
where Self: Sized, F: FnMut(B, Self::Item) -> R, R: Try<Output = B>,

An iterator method that applies a function as long as it returns successfully, producing a single, final value. Read more
1.27.0 · Source§

fn try_for_each<F, R>(&mut self, f: F) -> R
where Self: Sized, F: FnMut(Self::Item) -> R, R: Try<Output = ()>,

An iterator method that applies a fallible function to each item in the iterator, stopping at the first error and returning that error. Read more
1.0.0 · Source§

fn fold<B, F>(self, init: B, f: F) -> B
where Self: Sized, F: FnMut(B, Self::Item) -> B,

Folds every element into an accumulator by applying an operation, returning the final result. Read more
1.51.0 · Source§

fn reduce<F>(self, f: F) -> Option<Self::Item>
where Self: Sized, F: FnMut(Self::Item, Self::Item) -> Self::Item,

Reduces the elements to a single one, by repeatedly applying a reducing operation. Read more
Source§

fn try_reduce<R>( &mut self, f: impl FnMut(Self::Item, Self::Item) -> R, ) -> <<R as Try>::Residual as Residual<Option<<R as Try>::Output>>>::TryType
where Self: Sized, R: Try<Output = Self::Item>, <R as Try>::Residual: Residual<Option<Self::Item>>,

🔬This is a nightly-only experimental API. (iterator_try_reduce)
Reduces the elements to a single one by repeatedly applying a reducing operation. If the closure returns a failure, the failure is propagated back to the caller immediately. Read more
1.0.0 · Source§

fn all<F>(&mut self, f: F) -> bool
where Self: Sized, F: FnMut(Self::Item) -> bool,

Tests if every element of the iterator matches a predicate. Read more
1.0.0 · Source§

fn any<F>(&mut self, f: F) -> bool
where Self: Sized, F: FnMut(Self::Item) -> bool,

Tests if any element of the iterator matches a predicate. Read more
1.0.0 · Source§

fn find<P>(&mut self, predicate: P) -> Option<Self::Item>
where Self: Sized, P: FnMut(&Self::Item) -> bool,

Searches for an element of an iterator that satisfies a predicate. Read more
1.30.0 · Source§

fn find_map<B, F>(&mut self, f: F) -> Option<B>
where Self: Sized, F: FnMut(Self::Item) -> Option<B>,

Applies function to the elements of iterator and returns the first non-none result. Read more
Source§

fn try_find<R>( &mut self, f: impl FnMut(&Self::Item) -> R, ) -> <<R as Try>::Residual as Residual<Option<Self::Item>>>::TryType
where Self: Sized, R: Try<Output = bool>, <R as Try>::Residual: Residual<Option<Self::Item>>,

🔬This is a nightly-only experimental API. (try_find)
Applies function to the elements of iterator and returns the first true result or the first error. Read more
1.0.0 · Source§

fn position<P>(&mut self, predicate: P) -> Option<usize>
where Self: Sized, P: FnMut(Self::Item) -> bool,

Searches for an element in an iterator, returning its index. Read more
1.0.0 · Source§

fn max(self) -> Option<Self::Item>
where Self: Sized, Self::Item: Ord,

Returns the maximum element of an iterator. Read more
1.0.0 · Source§

fn min(self) -> Option<Self::Item>
where Self: Sized, Self::Item: Ord,

Returns the minimum element of an iterator. Read more
1.6.0 · Source§

fn max_by_key<B, F>(self, f: F) -> Option<Self::Item>
where B: Ord, Self: Sized, F: FnMut(&Self::Item) -> B,

Returns the element that gives the maximum value from the specified function. Read more
1.15.0 · Source§

fn max_by<F>(self, compare: F) -> Option<Self::Item>
where Self: Sized, F: FnMut(&Self::Item, &Self::Item) -> Ordering,

Returns the element that gives the maximum value with respect to the specified comparison function. Read more
1.6.0 · Source§

fn min_by_key<B, F>(self, f: F) -> Option<Self::Item>
where B: Ord, Self: Sized, F: FnMut(&Self::Item) -> B,

Returns the element that gives the minimum value from the specified function. Read more
1.15.0 · Source§

fn min_by<F>(self, compare: F) -> Option<Self::Item>
where Self: Sized, F: FnMut(&Self::Item, &Self::Item) -> Ordering,

Returns the element that gives the minimum value with respect to the specified comparison function. Read more
1.0.0 · Source§

fn unzip<A, B, FromA, FromB>(self) -> (FromA, FromB)
where FromA: Default + Extend<A>, FromB: Default + Extend<B>, Self: Sized + Iterator<Item = (A, B)>,

Converts an iterator of pairs into a pair of containers. Read more
1.36.0 · Source§

fn copied<'a, T>(self) -> Copied<Self>
where T: 'a + Copy, Self: Sized + Iterator<Item = &'a T>,

Creates an iterator which copies all of its elements. Read more
1.0.0 · Source§

fn cloned<'a, T>(self) -> Cloned<Self>
where T: 'a + Clone, Self: Sized + Iterator<Item = &'a T>,

Creates an iterator which clones all of its elements. Read more
1.0.0 · Source§

fn cycle(self) -> Cycle<Self>
where Self: Sized + Clone,

Repeats an iterator endlessly. Read more
Source§

fn array_chunks<const N: usize>(self) -> ArrayChunks<Self, N>
where Self: Sized,

🔬This is a nightly-only experimental API. (iter_array_chunks)
Returns an iterator over N elements of the iterator at a time. Read more
1.11.0 · Source§

fn sum<S>(self) -> S
where Self: Sized, S: Sum<Self::Item>,

Sums the elements of an iterator. Read more
1.11.0 · Source§

fn product<P>(self) -> P
where Self: Sized, P: Product<Self::Item>,

Iterates over the entire iterator, multiplying all the elements Read more
1.5.0 · Source§

fn cmp<I>(self, other: I) -> Ordering
where I: IntoIterator<Item = Self::Item>, Self::Item: Ord, Self: Sized,

Lexicographically compares the elements of this Iterator with those of another. Read more
Source§

fn cmp_by<I, F>(self, other: I, cmp: F) -> Ordering
where Self: Sized, I: IntoIterator, F: FnMut(Self::Item, <I as IntoIterator>::Item) -> Ordering,

🔬This is a nightly-only experimental API. (iter_order_by)
Lexicographically compares the elements of this Iterator with those of another with respect to the specified comparison function. Read more
1.5.0 · Source§

fn partial_cmp<I>(self, other: I) -> Option<Ordering>
where I: IntoIterator, Self::Item: PartialOrd<<I as IntoIterator>::Item>, Self: Sized,

Lexicographically compares the PartialOrd elements of this Iterator with those of another. The comparison works like short-circuit evaluation, returning a result without comparing the remaining elements. As soon as an order can be determined, the evaluation stops and a result is returned. Read more
Source§

fn partial_cmp_by<I, F>(self, other: I, partial_cmp: F) -> Option<Ordering>
where Self: Sized, I: IntoIterator, F: FnMut(Self::Item, <I as IntoIterator>::Item) -> Option<Ordering>,

🔬This is a nightly-only experimental API. (iter_order_by)
Lexicographically compares the elements of this Iterator with those of another with respect to the specified comparison function. Read more
1.5.0 · Source§

fn eq<I>(self, other: I) -> bool
where I: IntoIterator, Self::Item: PartialEq<<I as IntoIterator>::Item>, Self: Sized,

Determines if the elements of this Iterator are equal to those of another. Read more
Source§

fn eq_by<I, F>(self, other: I, eq: F) -> bool
where Self: Sized, I: IntoIterator, F: FnMut(Self::Item, <I as IntoIterator>::Item) -> bool,

🔬This is a nightly-only experimental API. (iter_order_by)
Determines if the elements of this Iterator are equal to those of another with respect to the specified equality function. Read more
1.5.0 · Source§

fn ne<I>(self, other: I) -> bool
where I: IntoIterator, Self::Item: PartialEq<<I as IntoIterator>::Item>, Self: Sized,

Determines if the elements of this Iterator are not equal to those of another. Read more
1.5.0 · Source§

fn lt<I>(self, other: I) -> bool
where I: IntoIterator, Self::Item: PartialOrd<<I as IntoIterator>::Item>, Self: Sized,

Determines if the elements of this Iterator are lexicographically less than those of another. Read more
1.5.0 · Source§

fn le<I>(self, other: I) -> bool
where I: IntoIterator, Self::Item: PartialOrd<<I as IntoIterator>::Item>, Self: Sized,

Determines if the elements of this Iterator are lexicographically less or equal to those of another. Read more
1.5.0 · Source§

fn gt<I>(self, other: I) -> bool
where I: IntoIterator, Self::Item: PartialOrd<<I as IntoIterator>::Item>, Self: Sized,

Determines if the elements of this Iterator are lexicographically greater than those of another. Read more
1.5.0 · Source§

fn ge<I>(self, other: I) -> bool
where I: IntoIterator, Self::Item: PartialOrd<<I as IntoIterator>::Item>, Self: Sized,

Determines if the elements of this Iterator are lexicographically greater than or equal to those of another. Read more
1.82.0 · Source§

fn is_sorted(self) -> bool
where Self: Sized, Self::Item: PartialOrd,

Checks if the elements of this iterator are sorted. Read more
1.82.0 · Source§

fn is_sorted_by<F>(self, compare: F) -> bool
where Self: Sized, F: FnMut(&Self::Item, &Self::Item) -> bool,

Checks if the elements of this iterator are sorted using the given comparator function. Read more
1.82.0 · Source§

fn is_sorted_by_key<F, K>(self, f: F) -> bool
where Self: Sized, F: FnMut(Self::Item) -> K, K: PartialOrd,

Checks if the elements of this iterator are sorted using the given key extraction function. Read more

Auto Trait Implementations§

§

impl<'source, Token> Freeze for Lexer<'source, Token>
where <Token as Logos<'source>>::Extras: Freeze, Token: Freeze, <Token as Logos<'source>>::Error: Freeze,

§

impl<'source, Token> RefUnwindSafe for Lexer<'source, Token>
where <Token as Logos<'source>>::Extras: RefUnwindSafe, <Token as Logos<'source>>::Source: RefUnwindSafe, Token: RefUnwindSafe, <Token as Logos<'source>>::Error: RefUnwindSafe,

§

impl<'source, Token> Send for Lexer<'source, Token>
where <Token as Logos<'source>>::Extras: Send, <Token as Logos<'source>>::Source: Sync, Token: Send, <Token as Logos<'source>>::Error: Send,

§

impl<'source, Token> Sync for Lexer<'source, Token>
where <Token as Logos<'source>>::Extras: Sync, <Token as Logos<'source>>::Source: Sync, Token: Sync, <Token as Logos<'source>>::Error: Sync,

§

impl<'source, Token> Unpin for Lexer<'source, Token>
where <Token as Logos<'source>>::Extras: Unpin, Token: Unpin, <Token as Logos<'source>>::Error: Unpin,

§

impl<'source, Token> UnwindSafe for Lexer<'source, Token>
where <Token as Logos<'source>>::Extras: UnwindSafe, <Token as Logos<'source>>::Source: RefUnwindSafe, Token: UnwindSafe, <Token as Logos<'source>>::Error: UnwindSafe,

Blanket Implementations§

Source§

impl<T> Any for T
where T: 'static + ?Sized,

Source§

fn type_id(&self) -> TypeId

Gets the TypeId of self. Read more
Source§

impl<T> Borrow<T> for T
where T: ?Sized,

Source§

fn borrow(&self) -> &T

Immutably borrows from an owned value. Read more
Source§

impl<T> BorrowMut<T> for T
where T: ?Sized,

Source§

fn borrow_mut(&mut self) -> &mut T

Mutably borrows from an owned value. Read more
Source§

impl<T> CloneToUninit for T
where T: Clone,

Source§

unsafe fn clone_to_uninit(&self, dst: *mut u8)

🔬This is a nightly-only experimental API. (clone_to_uninit)
Performs copy-assignment from self to dst. Read more
Source§

impl<T> From<T> for T

Source§

fn from(t: T) -> T

Returns the argument unchanged.

Source§

impl<T, U> Into<U> for T
where U: From<T>,

Source§

fn into(self) -> U

Calls U::from(self).

That is, this conversion is whatever the implementation of From<T> for U chooses to do.

Source§

impl<I> IntoIterator for I
where I: Iterator,

Source§

type Item = <I as Iterator>::Item

The type of the elements being iterated over.
Source§

type IntoIter = I

Which kind of iterator are we turning this into?
Source§

fn into_iter(self) -> I

Creates an iterator from a value. Read more
Source§

impl<T> ToOwned for T
where T: Clone,

Source§

type Owned = T

The resulting type after obtaining ownership.
Source§

fn to_owned(&self) -> T

Creates owned data from borrowed data, usually by cloning. Read more
Source§

fn clone_into(&self, target: &mut T)

Uses borrowed data to replace owned data, usually by cloning. Read more
Source§

impl<T, U> TryFrom<U> for T
where U: Into<T>,

Source§

type Error = Infallible

The type returned in the event of a conversion error.
Source§

fn try_from(value: U) -> Result<T, <T as TryFrom<U>>::Error>

Performs the conversion.
Source§

impl<T, U> TryInto<U> for T
where U: TryFrom<T>,

Source§

type Error = <U as TryFrom<T>>::Error

The type returned in the event of a conversion error.
Source§

fn try_into(self) -> Result<U, <U as TryFrom<T>>::Error>

Performs the conversion.