Lexer

Struct Lexer 

Source
pub struct Lexer<'source, Token: Logos<'source>> {
    pub extras: Token::Extras,
    /* private fields */
}
Expand description

Lexer is the main struct of the crate that allows you to read through a Source and produce tokens for enums implementing the Logos trait.

Fields§

§extras: Token::Extras

Extras associated with the Token.

Implementations§

Source§

impl<'source, Token: Logos<'source>> Lexer<'source, Token>

Source

pub fn new(source: &'source Token::Source) -> Self
where Token::Extras: Default,

Create a new Lexer.

Due to type inference, it might be more ergonomic to construct it by calling Token::lexer on any Token with derived Logos.

Source

pub fn with_extras( source: &'source Token::Source, extras: Token::Extras, ) -> Self

Create a new Lexer with the provided Extras.

Due to type inference, it might be more ergonomic to construct it by calling Token::lexer_with_extras on any Token with derived Logos.

Source

pub fn source(&self) -> &'source Token::Source

Source from which this Lexer is reading tokens.

Source

pub fn spanned(self) -> SpannedIter<'source, Token>

Wrap the Lexer in an Iterator that produces tuples of (Token, Span).

§Example
use logos::Logos;

#[derive(Debug, PartialEq, Clone, Default)]
enum LexingError {
    NumberParseError,
    #[default]
    Other
}

impl From<std::num::ParseIntError> for LexingError {
   fn from(_: std::num::ParseIntError) -> Self {
      LexingError::NumberParseError
  }
}

impl From<std::num::ParseFloatError> for LexingError {
  fn from(_: std::num::ParseFloatError) -> Self {
     LexingError::NumberParseError
  }
}

#[derive(Logos, Debug, PartialEq)]
#[logos(error = LexingError)]
enum Example {
    #[regex(r"[ \n\t\f]+", logos::skip)]
    Ignored,

    #[regex("-?[0-9]+", |lex| lex.slice().parse())]
    Integer(i64),

    #[regex("-?[0-9]+\\.[0-9]+", |lex| lex.slice().parse())]
    Float(f64),
}

let tokens: Vec<_> = Example::lexer("42 3.14 -5 f").spanned().collect();

assert_eq!(
    tokens,
    &[
        (Ok(Example::Integer(42)), 0..2),
        (Ok(Example::Float(3.14)), 3..7),
        (Ok(Example::Integer(-5)), 8..10),
        (Err(LexingError::Other), 11..12), // 'f' is not a recognized token
    ],
);
Examples found in repository?
examples/calculator.rs (line 144)
133fn main() {
134    //reads the input expression from the command line
135    let input = env::args()
136        .nth(1)
137        .expect("Expected expression argument (e.g. `1 + 7 * (3 - 4) / 5`)");
138
139    //creates a lexer instance from the input
140    let lexer = Token::lexer(&input);
141
142    //splits the input into tokens, using the lexer
143    let mut tokens = vec![];
144    for (token, span) in lexer.spanned() {
145        match token {
146            Ok(token) => tokens.push(token),
147            Err(e) => {
148                println!("lexer error at {:?}: {}", span, e);
149                return;
150            }
151        }
152    }
153
154    //parses the tokens to construct an AST
155    let ast = match parser().parse(&tokens).into_result() {
156        Ok(expr) => {
157            println!("[AST]\n{:#?}", expr);
158            expr
159        }
160        Err(e) => {
161            println!("parse error: {:#?}", e);
162            return;
163        }
164    };
165
166    //evaluates the AST to get the result
167    println!("\n[result]\n{}", ast.eval());
168}
Source

pub fn span(&self) -> Span

Get the range for the current token in Source.

Examples found in repository?
examples/extras.rs (line 35)
33fn newline_callback(lex: &mut Lexer<Token>) -> Skip {
34    lex.extras.0 += 1;
35    lex.extras.1 = lex.span().end;
36    Skip
37}
38
39/// Compute the line and column position for the current word.
40fn word_callback(lex: &mut Lexer<Token>) -> (usize, usize) {
41    let line = lex.extras.0;
42    let column = lex.span().start - lex.extras.1;
43
44    (line, column)
45}
More examples
Hide additional examples
examples/json.rs (line 100)
89fn parse_value(lexer: &mut Lexer<'_, Token>) -> Result<Value> {
90    if let Some(token) = lexer.next() {
91        match token {
92            Ok(Token::Bool(b)) => Ok(Value::Bool(b)),
93            Ok(Token::BraceOpen) => parse_object(lexer),
94            Ok(Token::BracketOpen) => parse_array(lexer),
95            Ok(Token::Null) => Ok(Value::Null),
96            Ok(Token::Number(n)) => Ok(Value::Number(n)),
97            Ok(Token::String(s)) => Ok(Value::String(s)),
98            _ => Err((
99                "unexpected token here (context: value)".to_owned(),
100                lexer.span(),
101            )),
102        }
103    } else {
104        Err(("empty values are not allowed".to_owned(), lexer.span()))
105    }
106}
107/* ANCHOR_END: value */
108
109/* ANCHOR: array */
110/// Parse a token stream into an array and return when
111/// a valid terminator is found.
112///
113/// > NOTE: we assume '[' was consumed.
114fn parse_array(lexer: &mut Lexer<'_, Token>) -> Result<Value> {
115    let mut array = Vec::new();
116    let span = lexer.span();
117    let mut awaits_comma = false;
118    let mut awaits_value = false;
119
120    while let Some(token) = lexer.next() {
121        match token {
122            Ok(Token::Bool(b)) if !awaits_comma => {
123                array.push(Value::Bool(b));
124                awaits_value = false;
125            }
126            Ok(Token::BraceOpen) if !awaits_comma => {
127                let object = parse_object(lexer)?;
128                array.push(object);
129                awaits_value = false;
130            }
131            Ok(Token::BracketOpen) if !awaits_comma => {
132                let sub_array = parse_array(lexer)?;
133                array.push(sub_array);
134                awaits_value = false;
135            }
136            Ok(Token::BracketClose) if !awaits_value => return Ok(Value::Array(array)),
137            Ok(Token::Comma) if awaits_comma => awaits_value = true,
138            Ok(Token::Null) if !awaits_comma => {
139                array.push(Value::Null);
140                awaits_value = false
141            }
142            Ok(Token::Number(n)) if !awaits_comma => {
143                array.push(Value::Number(n));
144                awaits_value = false;
145            }
146            Ok(Token::String(s)) if !awaits_comma => {
147                array.push(Value::String(s));
148                awaits_value = false;
149            }
150            _ => {
151                return Err((
152                    "unexpected token here (context: array)".to_owned(),
153                    lexer.span(),
154                ))
155            }
156        }
157        awaits_comma = !awaits_value;
158    }
159    Err(("unmatched opening bracket defined here".to_owned(), span))
160}
161/* ANCHOR_END: array */
162
163/* ANCHOR: object */
164/// Parse a token stream into an object and return when
165/// a valid terminator is found.
166///
167/// > NOTE: we assume '{' was consumed.
168fn parse_object(lexer: &mut Lexer<'_, Token>) -> Result<Value> {
169    let mut map = HashMap::new();
170    let span = lexer.span();
171    let mut awaits_comma = false;
172    let mut awaits_key = false;
173
174    while let Some(token) = lexer.next() {
175        match token {
176            Ok(Token::BraceClose) if !awaits_key => return Ok(Value::Object(map)),
177            Ok(Token::Comma) if awaits_comma => awaits_key = true,
178            Ok(Token::String(key)) if !awaits_comma => {
179                match lexer.next() {
180                    Some(Ok(Token::Colon)) => (),
181                    _ => {
182                        return Err((
183                            "unexpected token here, expecting ':'".to_owned(),
184                            lexer.span(),
185                        ))
186                    }
187                }
188                let value = parse_value(lexer)?;
189                map.insert(key, value);
190                awaits_key = false;
191            }
192            _ => {
193                return Err((
194                    "unexpected token here (context: object)".to_owned(),
195                    lexer.span(),
196                ))
197            }
198        }
199        awaits_comma = !awaits_key;
200    }
201    Err(("unmatched opening brace defined here".to_owned(), span))
202}
examples/json_borrowed.rs (line 96)
85fn parse_value<'source>(lexer: &mut Lexer<'source, Token<'source>>) -> Result<Value<'source>> {
86    if let Some(token) = lexer.next() {
87        match token {
88            Ok(Token::Bool(b)) => Ok(Value::Bool(b)),
89            Ok(Token::BraceOpen) => parse_object(lexer),
90            Ok(Token::BracketOpen) => parse_array(lexer),
91            Ok(Token::Null) => Ok(Value::Null),
92            Ok(Token::Number(n)) => Ok(Value::Number(n)),
93            Ok(Token::String(s)) => Ok(Value::String(s)),
94            _ => Err((
95                "unexpected token here (context: value)".to_owned(),
96                lexer.span(),
97            )),
98        }
99    } else {
100        Err(("empty values are not allowed".to_owned(), lexer.span()))
101    }
102}
103/* ANCHOR_END: value */
104
105/* ANCHOR: array */
106/// Parse a token stream into an array and return when
107/// a valid terminator is found.
108///
109/// > NOTE: we assume '[' was consumed.
110fn parse_array<'source>(lexer: &mut Lexer<'source, Token<'source>>) -> Result<Value<'source>> {
111    let mut array = Vec::new();
112    let span = lexer.span();
113    let mut awaits_comma = false;
114    let mut awaits_value = false;
115
116    while let Some(token) = lexer.next() {
117        match token {
118            Ok(Token::Bool(b)) if !awaits_comma => {
119                array.push(Value::Bool(b));
120                awaits_value = false;
121            }
122            Ok(Token::BraceOpen) if !awaits_comma => {
123                let object = parse_object(lexer)?;
124                array.push(object);
125                awaits_value = false;
126            }
127            Ok(Token::BracketOpen) if !awaits_comma => {
128                let sub_array = parse_array(lexer)?;
129                array.push(sub_array);
130                awaits_value = false;
131            }
132            Ok(Token::BracketClose) if !awaits_value => return Ok(Value::Array(array)),
133            Ok(Token::Comma) if awaits_comma => awaits_value = true,
134            Ok(Token::Null) if !awaits_comma => {
135                array.push(Value::Null);
136                awaits_value = false
137            }
138            Ok(Token::Number(n)) if !awaits_comma => {
139                array.push(Value::Number(n));
140                awaits_value = false;
141            }
142            Ok(Token::String(s)) if !awaits_comma => {
143                array.push(Value::String(s));
144                awaits_value = false;
145            }
146            _ => {
147                return Err((
148                    "unexpected token here (context: array)".to_owned(),
149                    lexer.span(),
150                ))
151            }
152        }
153        awaits_comma = !awaits_value;
154    }
155    Err(("unmatched opening bracket defined here".to_owned(), span))
156}
157/* ANCHOR_END: array */
158
159/* ANCHOR: object */
160/// Parse a token stream into an object and return when
161/// a valid terminator is found.
162///
163/// > NOTE: we assume '{' was consumed.
164fn parse_object<'source>(lexer: &mut Lexer<'source, Token<'source>>) -> Result<Value<'source>> {
165    let mut map = HashMap::new();
166    let span = lexer.span();
167    let mut awaits_comma = false;
168    let mut awaits_key = false;
169
170    while let Some(token) = lexer.next() {
171        match token {
172            Ok(Token::BraceClose) if !awaits_key => return Ok(Value::Object(map)),
173            Ok(Token::Comma) if awaits_comma => awaits_key = true,
174            Ok(Token::String(key)) if !awaits_comma => {
175                match lexer.next() {
176                    Some(Ok(Token::Colon)) => (),
177                    _ => {
178                        return Err((
179                            "unexpected token here, expecting ':'".to_owned(),
180                            lexer.span(),
181                        ))
182                    }
183                }
184                let value = parse_value(lexer)?;
185                map.insert(key, value);
186                awaits_key = false;
187            }
188            _ => {
189                return Err((
190                    "unexpected token here (context: object)".to_owned(),
191                    lexer.span(),
192                ))
193            }
194        }
195        awaits_comma = !awaits_key;
196    }
197    Err(("unmatched opening brace defined here".to_owned(), span))
198}
Source

pub fn slice(&self) -> <Token::Source as Source>::Slice<'source>

Get a string slice of the current token.

Examples found in repository?
examples/custom_error.rs (line 35)
34    fn from_lexer(lex: &mut logos::Lexer<'_, Token>) -> Self {
35        LexingError::NonAsciiCharacter(lex.slice().chars().next().unwrap())
36    }
37}
38
39#[derive(Debug, Logos, PartialEq)]
40#[logos(error(LexingError, LexingError::from_lexer))]
41#[logos(skip r"[ \t]+")]
42enum Token {
43    #[regex(r"[a-zA-Z]+")]
44    Word,
45    #[regex(r"[0-9]+", |lex| lex.slice().parse())]
46    Integer(u8),
47}
48
49fn main() {
50    // 256 overflows u8, since u8's max value is 255.
51    // 'é' is not a valid ascii letter.
52    let mut lex = Token::lexer("Hello 256 Jérome");
53
54    assert_eq!(lex.next(), Some(Ok(Token::Word)));
55    assert_eq!(lex.slice(), "Hello");
56
57    assert_eq!(
58        lex.next(),
59        Some(Err(LexingError::InvalidInteger(
60            "overflow error".to_owned()
61        )))
62    );
63    assert_eq!(lex.slice(), "256");
64
65    assert_eq!(lex.next(), Some(Ok(Token::Word)));
66    assert_eq!(lex.slice(), "J");
67
68    assert_eq!(lex.next(), Some(Err(LexingError::NonAsciiCharacter('é'))));
69    assert_eq!(lex.slice(), "é");
70
71    assert_eq!(lex.next(), Some(Ok(Token::Word)));
72    assert_eq!(lex.slice(), "rome");
73
74    assert_eq!(lex.next(), None);
75}
More examples
Hide additional examples
examples/extras.rs (line 67)
59fn main() {
60    let src = fs::read_to_string(env::args().nth(1).expect("Expected file argument"))
61        .expect("Failed to read file");
62
63    let mut lex = Token::lexer(src.as_str());
64
65    while let Some(token) = lex.next() {
66        if let Ok(Token::Word((line, column))) = token {
67            println!("Word '{}' found at ({}, {})", lex.slice(), line, column);
68        }
69    }
70}
examples/string-interpolation.rs (line 52)
48fn get_string_content(lex: &mut Lexer<StringContext>) -> String {
49    let mut s = String::new();
50    while let Some(Ok(token)) = lex.next() {
51        match token {
52            StringContext::Content => s.push_str(lex.slice()),
53            StringContext::DollarSign => s.push('$'),
54            StringContext::InterpolationStart(value) => s.push_str(&value),
55            StringContext::Quote => break,
56        }
57    }
58    s
59}
60
61fn variable_definition(lex: &mut Lexer<VariableDefinitionContext>) -> Option<(String, String)> {
62    let id = lex.slice().to_string();
63    if let Some(Ok(VariableDefinitionContext::Equals)) = lex.next() {
64        if let Some(Ok(VariableDefinitionContext::Quote)) = lex.next() {
65            let mut lex2 = lex.clone().morph::<StringContext>();
66            let value = get_string_content(&mut lex2);
67            *lex = lex2.morph();
68            lex.extras.insert(id.clone(), value.clone());
69            return Some((id, value));
70        }
71    }
72    None
73}
74/* ANCHOR_END: variable_definition */
75
76/* ANCHOR: evaluate_interpolation */
77fn evaluate_interpolation(lex: &mut Lexer<StringContext>) -> Option<String> {
78    let mut lex2 = lex.clone().morph::<StringInterpolationContext>();
79    let mut interpolation = String::new();
80    while let Some(result) = lex2.next() {
81        match result {
82            Ok(token) => match token {
83                StringInterpolationContext::Id(value) => interpolation.push_str(&value),
84                StringInterpolationContext::Quote => {
85                    *lex = lex2.morph();
86                    interpolation.push_str(&get_string_content(lex));
87                    lex2 = lex.clone().morph();
88                }
89                StringInterpolationContext::InterpolationEnd => break,
90            },
91            Err(()) => panic!("Interpolation error"),
92        }
93    }
94    *lex = lex2.morph();
95    Some(interpolation)
96}
97/* ANCHOR_END: evaluate_interpolation */
98
99/* ANCHOR: get_variable_value */
100fn get_variable_value(lex: &mut Lexer<StringInterpolationContext>) -> Option<String> {
101    if let Some(value) = lex.extras.get(lex.slice()) {
102        return Some(value.clone());
103    }
104    None
105}
Source

pub fn remainder(&self) -> <Token::Source as Source>::Slice<'source>

Get a slice of remaining source, starting at the end of current token.

Source

pub fn morph<Token2>(self) -> Lexer<'source, Token2>
where Token2: Logos<'source, Source = Token::Source>, Token::Extras: Into<Token2::Extras>,

Turn this lexer into a lexer for a new token type.

The new lexer continues to point at the same span as the current lexer, and the current token becomes the error token of the new token type.

Examples found in repository?
examples/string-interpolation.rs (line 65)
61fn variable_definition(lex: &mut Lexer<VariableDefinitionContext>) -> Option<(String, String)> {
62    let id = lex.slice().to_string();
63    if let Some(Ok(VariableDefinitionContext::Equals)) = lex.next() {
64        if let Some(Ok(VariableDefinitionContext::Quote)) = lex.next() {
65            let mut lex2 = lex.clone().morph::<StringContext>();
66            let value = get_string_content(&mut lex2);
67            *lex = lex2.morph();
68            lex.extras.insert(id.clone(), value.clone());
69            return Some((id, value));
70        }
71    }
72    None
73}
74/* ANCHOR_END: variable_definition */
75
76/* ANCHOR: evaluate_interpolation */
77fn evaluate_interpolation(lex: &mut Lexer<StringContext>) -> Option<String> {
78    let mut lex2 = lex.clone().morph::<StringInterpolationContext>();
79    let mut interpolation = String::new();
80    while let Some(result) = lex2.next() {
81        match result {
82            Ok(token) => match token {
83                StringInterpolationContext::Id(value) => interpolation.push_str(&value),
84                StringInterpolationContext::Quote => {
85                    *lex = lex2.morph();
86                    interpolation.push_str(&get_string_content(lex));
87                    lex2 = lex.clone().morph();
88                }
89                StringInterpolationContext::InterpolationEnd => break,
90            },
91            Err(()) => panic!("Interpolation error"),
92        }
93    }
94    *lex = lex2.morph();
95    Some(interpolation)
96}
Source

pub fn bump(&mut self, n: usize)

Bumps the end of currently lexed token by n bytes.

§Panics

Panics if adding n to current offset would place the Lexer beyond the last byte, or in the middle of an UTF-8 code point (does not apply when lexing raw &[u8]).

Trait Implementations§

Source§

impl<'source, Token> Clone for Lexer<'source, Token>
where Token: Logos<'source> + Clone, Token::Extras: Clone,

Source§

fn clone(&self) -> Self

Returns a duplicate of the value. Read more
1.0.0 · Source§

fn clone_from(&mut self, source: &Self)

Performs copy-assignment from source. Read more
Source§

impl<'source, Token> Debug for Lexer<'source, Token>
where Token: Logos<'source>, Token::Source: Debug, Token::Extras: Debug,

Source§

fn fmt(&self, fmt: &mut Formatter<'_>) -> Result

Formats the value using the given formatter. Read more
Source§

impl<'source, Token> Iterator for Lexer<'source, Token>
where Token: Logos<'source>,

Source§

type Item = Result<Token, <Token as Logos<'source>>::Error>

The type of the elements being iterated over.
Source§

fn next(&mut self) -> Option<Result<Token, Token::Error>>

Advances the iterator and returns the next value. Read more
Source§

fn next_chunk<const N: usize>( &mut self, ) -> Result<[Self::Item; N], IntoIter<Self::Item, N>>
where Self: Sized,

🔬This is a nightly-only experimental API. (iter_next_chunk)
Advances the iterator and returns an array containing the next N values. Read more
1.0.0 · Source§

fn size_hint(&self) -> (usize, Option<usize>)

Returns the bounds on the remaining length of the iterator. Read more
1.0.0 · Source§

fn count(self) -> usize
where Self: Sized,

Consumes the iterator, counting the number of iterations and returning it. Read more
1.0.0 · Source§

fn last(self) -> Option<Self::Item>
where Self: Sized,

Consumes the iterator, returning the last element. Read more
Source§

fn advance_by(&mut self, n: usize) -> Result<(), NonZero<usize>>

🔬This is a nightly-only experimental API. (iter_advance_by)
Advances the iterator by n elements. Read more
1.0.0 · Source§

fn nth(&mut self, n: usize) -> Option<Self::Item>

Returns the nth element of the iterator. Read more
1.28.0 · Source§

fn step_by(self, step: usize) -> StepBy<Self>
where Self: Sized,

Creates an iterator starting at the same point, but stepping by the given amount at each iteration. Read more
1.0.0 · Source§

fn chain<U>(self, other: U) -> Chain<Self, <U as IntoIterator>::IntoIter>
where Self: Sized, U: IntoIterator<Item = Self::Item>,

Takes two iterators and creates a new iterator over both in sequence. Read more
1.0.0 · Source§

fn zip<U>(self, other: U) -> Zip<Self, <U as IntoIterator>::IntoIter>
where Self: Sized, U: IntoIterator,

‘Zips up’ two iterators into a single iterator of pairs. Read more
Source§

fn intersperse(self, separator: Self::Item) -> Intersperse<Self>
where Self: Sized, Self::Item: Clone,

🔬This is a nightly-only experimental API. (iter_intersperse)
Creates a new iterator which places a copy of separator between adjacent items of the original iterator. Read more
Source§

fn intersperse_with<G>(self, separator: G) -> IntersperseWith<Self, G>
where Self: Sized, G: FnMut() -> Self::Item,

🔬This is a nightly-only experimental API. (iter_intersperse)
Creates a new iterator which places an item generated by separator between adjacent items of the original iterator. Read more
1.0.0 · Source§

fn map<B, F>(self, f: F) -> Map<Self, F>
where Self: Sized, F: FnMut(Self::Item) -> B,

Takes a closure and creates an iterator which calls that closure on each element. Read more
1.21.0 · Source§

fn for_each<F>(self, f: F)
where Self: Sized, F: FnMut(Self::Item),

Calls a closure on each element of an iterator. Read more
1.0.0 · Source§

fn filter<P>(self, predicate: P) -> Filter<Self, P>
where Self: Sized, P: FnMut(&Self::Item) -> bool,

Creates an iterator which uses a closure to determine if an element should be yielded. Read more
1.0.0 · Source§

fn filter_map<B, F>(self, f: F) -> FilterMap<Self, F>
where Self: Sized, F: FnMut(Self::Item) -> Option<B>,

Creates an iterator that both filters and maps. Read more
1.0.0 · Source§

fn enumerate(self) -> Enumerate<Self>
where Self: Sized,

Creates an iterator which gives the current iteration count as well as the next value. Read more
1.0.0 · Source§

fn peekable(self) -> Peekable<Self>
where Self: Sized,

Creates an iterator which can use the peek and peek_mut methods to look at the next element of the iterator without consuming it. See their documentation for more information. Read more
1.0.0 · Source§

fn skip_while<P>(self, predicate: P) -> SkipWhile<Self, P>
where Self: Sized, P: FnMut(&Self::Item) -> bool,

Creates an iterator that skips elements based on a predicate. Read more
1.0.0 · Source§

fn take_while<P>(self, predicate: P) -> TakeWhile<Self, P>
where Self: Sized, P: FnMut(&Self::Item) -> bool,

Creates an iterator that yields elements based on a predicate. Read more
1.57.0 · Source§

fn map_while<B, P>(self, predicate: P) -> MapWhile<Self, P>
where Self: Sized, P: FnMut(Self::Item) -> Option<B>,

Creates an iterator that both yields elements based on a predicate and maps. Read more
1.0.0 · Source§

fn skip(self, n: usize) -> Skip<Self>
where Self: Sized,

Creates an iterator that skips the first n elements. Read more
1.0.0 · Source§

fn take(self, n: usize) -> Take<Self>
where Self: Sized,

Creates an iterator that yields the first n elements, or fewer if the underlying iterator ends sooner. Read more
1.0.0 · Source§

fn scan<St, B, F>(self, initial_state: St, f: F) -> Scan<Self, St, F>
where Self: Sized, F: FnMut(&mut St, Self::Item) -> Option<B>,

An iterator adapter which, like fold, holds internal state, but unlike fold, produces a new iterator. Read more
1.0.0 · Source§

fn flat_map<U, F>(self, f: F) -> FlatMap<Self, U, F>
where Self: Sized, U: IntoIterator, F: FnMut(Self::Item) -> U,

Creates an iterator that works like map, but flattens nested structure. Read more
1.29.0 · Source§

fn flatten(self) -> Flatten<Self>
where Self: Sized, Self::Item: IntoIterator,

Creates an iterator that flattens nested structure. Read more
Source§

fn map_windows<F, R, const N: usize>(self, f: F) -> MapWindows<Self, F, N>
where Self: Sized, F: FnMut(&[Self::Item; N]) -> R,

🔬This is a nightly-only experimental API. (iter_map_windows)
Calls the given function f for each contiguous window of size N over self and returns an iterator over the outputs of f. Like slice::windows(), the windows during mapping overlap as well. Read more
1.0.0 · Source§

fn fuse(self) -> Fuse<Self>
where Self: Sized,

Creates an iterator which ends after the first None. Read more
1.0.0 · Source§

fn inspect<F>(self, f: F) -> Inspect<Self, F>
where Self: Sized, F: FnMut(&Self::Item),

Does something with each element of an iterator, passing the value on. Read more
1.0.0 · Source§

fn by_ref(&mut self) -> &mut Self
where Self: Sized,

Creates a “by reference” adapter for this instance of Iterator. Read more
1.0.0 · Source§

fn collect<B>(self) -> B
where B: FromIterator<Self::Item>, Self: Sized,

Transforms an iterator into a collection. Read more
Source§

fn try_collect<B>( &mut self, ) -> <<Self::Item as Try>::Residual as Residual<B>>::TryType
where Self: Sized, Self::Item: Try, <Self::Item as Try>::Residual: Residual<B>, B: FromIterator<<Self::Item as Try>::Output>,

🔬This is a nightly-only experimental API. (iterator_try_collect)
Fallibly transforms an iterator into a collection, short circuiting if a failure is encountered. Read more
Source§

fn collect_into<E>(self, collection: &mut E) -> &mut E
where E: Extend<Self::Item>, Self: Sized,

🔬This is a nightly-only experimental API. (iter_collect_into)
Collects all the items from an iterator into a collection. Read more
1.0.0 · Source§

fn partition<B, F>(self, f: F) -> (B, B)
where Self: Sized, B: Default + Extend<Self::Item>, F: FnMut(&Self::Item) -> bool,

Consumes an iterator, creating two collections from it. Read more
Source§

fn is_partitioned<P>(self, predicate: P) -> bool
where Self: Sized, P: FnMut(Self::Item) -> bool,

🔬This is a nightly-only experimental API. (iter_is_partitioned)
Checks if the elements of this iterator are partitioned according to the given predicate, such that all those that return true precede all those that return false. Read more
1.27.0 · Source§

fn try_fold<B, F, R>(&mut self, init: B, f: F) -> R
where Self: Sized, F: FnMut(B, Self::Item) -> R, R: Try<Output = B>,

An iterator method that applies a function as long as it returns successfully, producing a single, final value. Read more
1.27.0 · Source§

fn try_for_each<F, R>(&mut self, f: F) -> R
where Self: Sized, F: FnMut(Self::Item) -> R, R: Try<Output = ()>,

An iterator method that applies a fallible function to each item in the iterator, stopping at the first error and returning that error. Read more
1.0.0 · Source§

fn fold<B, F>(self, init: B, f: F) -> B
where Self: Sized, F: FnMut(B, Self::Item) -> B,

Folds every element into an accumulator by applying an operation, returning the final result. Read more
1.51.0 · Source§

fn reduce<F>(self, f: F) -> Option<Self::Item>
where Self: Sized, F: FnMut(Self::Item, Self::Item) -> Self::Item,

Reduces the elements to a single one, by repeatedly applying a reducing operation. Read more
Source§

fn try_reduce<R>( &mut self, f: impl FnMut(Self::Item, Self::Item) -> R, ) -> <<R as Try>::Residual as Residual<Option<<R as Try>::Output>>>::TryType
where Self: Sized, R: Try<Output = Self::Item>, <R as Try>::Residual: Residual<Option<Self::Item>>,

🔬This is a nightly-only experimental API. (iterator_try_reduce)
Reduces the elements to a single one by repeatedly applying a reducing operation. If the closure returns a failure, the failure is propagated back to the caller immediately. Read more
1.0.0 · Source§

fn all<F>(&mut self, f: F) -> bool
where Self: Sized, F: FnMut(Self::Item) -> bool,

Tests if every element of the iterator matches a predicate. Read more
1.0.0 · Source§

fn any<F>(&mut self, f: F) -> bool
where Self: Sized, F: FnMut(Self::Item) -> bool,

Tests if any element of the iterator matches a predicate. Read more
1.0.0 · Source§

fn find<P>(&mut self, predicate: P) -> Option<Self::Item>
where Self: Sized, P: FnMut(&Self::Item) -> bool,

Searches for an element of an iterator that satisfies a predicate. Read more
1.30.0 · Source§

fn find_map<B, F>(&mut self, f: F) -> Option<B>
where Self: Sized, F: FnMut(Self::Item) -> Option<B>,

Applies function to the elements of iterator and returns the first non-none result. Read more
Source§

fn try_find<R>( &mut self, f: impl FnMut(&Self::Item) -> R, ) -> <<R as Try>::Residual as Residual<Option<Self::Item>>>::TryType
where Self: Sized, R: Try<Output = bool>, <R as Try>::Residual: Residual<Option<Self::Item>>,

🔬This is a nightly-only experimental API. (try_find)
Applies function to the elements of iterator and returns the first true result or the first error. Read more
1.0.0 · Source§

fn position<P>(&mut self, predicate: P) -> Option<usize>
where Self: Sized, P: FnMut(Self::Item) -> bool,

Searches for an element in an iterator, returning its index. Read more
1.0.0 · Source§

fn max(self) -> Option<Self::Item>
where Self: Sized, Self::Item: Ord,

Returns the maximum element of an iterator. Read more
1.0.0 · Source§

fn min(self) -> Option<Self::Item>
where Self: Sized, Self::Item: Ord,

Returns the minimum element of an iterator. Read more
1.6.0 · Source§

fn max_by_key<B, F>(self, f: F) -> Option<Self::Item>
where B: Ord, Self: Sized, F: FnMut(&Self::Item) -> B,

Returns the element that gives the maximum value from the specified function. Read more
1.15.0 · Source§

fn max_by<F>(self, compare: F) -> Option<Self::Item>
where Self: Sized, F: FnMut(&Self::Item, &Self::Item) -> Ordering,

Returns the element that gives the maximum value with respect to the specified comparison function. Read more
1.6.0 · Source§

fn min_by_key<B, F>(self, f: F) -> Option<Self::Item>
where B: Ord, Self: Sized, F: FnMut(&Self::Item) -> B,

Returns the element that gives the minimum value from the specified function. Read more
1.15.0 · Source§

fn min_by<F>(self, compare: F) -> Option<Self::Item>
where Self: Sized, F: FnMut(&Self::Item, &Self::Item) -> Ordering,

Returns the element that gives the minimum value with respect to the specified comparison function. Read more
1.0.0 · Source§

fn unzip<A, B, FromA, FromB>(self) -> (FromA, FromB)
where FromA: Default + Extend<A>, FromB: Default + Extend<B>, Self: Sized + Iterator<Item = (A, B)>,

Converts an iterator of pairs into a pair of containers. Read more
1.36.0 · Source§

fn copied<'a, T>(self) -> Copied<Self>
where T: Copy + 'a, Self: Sized + Iterator<Item = &'a T>,

Creates an iterator which copies all of its elements. Read more
1.0.0 · Source§

fn cloned<'a, T>(self) -> Cloned<Self>
where T: Clone + 'a, Self: Sized + Iterator<Item = &'a T>,

Creates an iterator which clones all of its elements. Read more
1.0.0 · Source§

fn cycle(self) -> Cycle<Self>
where Self: Sized + Clone,

Repeats an iterator endlessly. Read more
Source§

fn array_chunks<const N: usize>(self) -> ArrayChunks<Self, N>
where Self: Sized,

🔬This is a nightly-only experimental API. (iter_array_chunks)
Returns an iterator over N elements of the iterator at a time. Read more
1.11.0 · Source§

fn sum<S>(self) -> S
where Self: Sized, S: Sum<Self::Item>,

Sums the elements of an iterator. Read more
1.11.0 · Source§

fn product<P>(self) -> P
where Self: Sized, P: Product<Self::Item>,

Iterates over the entire iterator, multiplying all the elements Read more
1.5.0 · Source§

fn cmp<I>(self, other: I) -> Ordering
where I: IntoIterator<Item = Self::Item>, Self::Item: Ord, Self: Sized,

Lexicographically compares the elements of this Iterator with those of another. Read more
Source§

fn cmp_by<I, F>(self, other: I, cmp: F) -> Ordering
where Self: Sized, I: IntoIterator, F: FnMut(Self::Item, <I as IntoIterator>::Item) -> Ordering,

🔬This is a nightly-only experimental API. (iter_order_by)
Lexicographically compares the elements of this Iterator with those of another with respect to the specified comparison function. Read more
1.5.0 · Source§

fn partial_cmp<I>(self, other: I) -> Option<Ordering>
where I: IntoIterator, Self::Item: PartialOrd<<I as IntoIterator>::Item>, Self: Sized,

Lexicographically compares the PartialOrd elements of this Iterator with those of another. The comparison works like short-circuit evaluation, returning a result without comparing the remaining elements. As soon as an order can be determined, the evaluation stops and a result is returned. Read more
Source§

fn partial_cmp_by<I, F>(self, other: I, partial_cmp: F) -> Option<Ordering>
where Self: Sized, I: IntoIterator, F: FnMut(Self::Item, <I as IntoIterator>::Item) -> Option<Ordering>,

🔬This is a nightly-only experimental API. (iter_order_by)
Lexicographically compares the elements of this Iterator with those of another with respect to the specified comparison function. Read more
1.5.0 · Source§

fn eq<I>(self, other: I) -> bool
where I: IntoIterator, Self::Item: PartialEq<<I as IntoIterator>::Item>, Self: Sized,

Determines if the elements of this Iterator are equal to those of another. Read more
Source§

fn eq_by<I, F>(self, other: I, eq: F) -> bool
where Self: Sized, I: IntoIterator, F: FnMut(Self::Item, <I as IntoIterator>::Item) -> bool,

🔬This is a nightly-only experimental API. (iter_order_by)
Determines if the elements of this Iterator are equal to those of another with respect to the specified equality function. Read more
1.5.0 · Source§

fn ne<I>(self, other: I) -> bool
where I: IntoIterator, Self::Item: PartialEq<<I as IntoIterator>::Item>, Self: Sized,

Determines if the elements of this Iterator are not equal to those of another. Read more
1.5.0 · Source§

fn lt<I>(self, other: I) -> bool
where I: IntoIterator, Self::Item: PartialOrd<<I as IntoIterator>::Item>, Self: Sized,

Determines if the elements of this Iterator are lexicographically less than those of another. Read more
1.5.0 · Source§

fn le<I>(self, other: I) -> bool
where I: IntoIterator, Self::Item: PartialOrd<<I as IntoIterator>::Item>, Self: Sized,

Determines if the elements of this Iterator are lexicographically less or equal to those of another. Read more
1.5.0 · Source§

fn gt<I>(self, other: I) -> bool
where I: IntoIterator, Self::Item: PartialOrd<<I as IntoIterator>::Item>, Self: Sized,

Determines if the elements of this Iterator are lexicographically greater than those of another. Read more
1.5.0 · Source§

fn ge<I>(self, other: I) -> bool
where I: IntoIterator, Self::Item: PartialOrd<<I as IntoIterator>::Item>, Self: Sized,

Determines if the elements of this Iterator are lexicographically greater than or equal to those of another. Read more
1.82.0 · Source§

fn is_sorted(self) -> bool
where Self: Sized, Self::Item: PartialOrd,

Checks if the elements of this iterator are sorted. Read more
1.82.0 · Source§

fn is_sorted_by<F>(self, compare: F) -> bool
where Self: Sized, F: FnMut(&Self::Item, &Self::Item) -> bool,

Checks if the elements of this iterator are sorted using the given comparator function. Read more
1.82.0 · Source§

fn is_sorted_by_key<F, K>(self, f: F) -> bool
where Self: Sized, F: FnMut(Self::Item) -> K, K: PartialOrd,

Checks if the elements of this iterator are sorted using the given key extraction function. Read more

Auto Trait Implementations§

§

impl<'source, Token> Freeze for Lexer<'source, Token>
where <Token as Logos<'source>>::Extras: Freeze,

§

impl<'source, Token> RefUnwindSafe for Lexer<'source, Token>
where <Token as Logos<'source>>::Extras: RefUnwindSafe, <Token as Logos<'source>>::Source: RefUnwindSafe,

§

impl<'source, Token> Send for Lexer<'source, Token>
where <Token as Logos<'source>>::Extras: Send, <Token as Logos<'source>>::Source: Sync,

§

impl<'source, Token> Sync for Lexer<'source, Token>
where <Token as Logos<'source>>::Extras: Sync, <Token as Logos<'source>>::Source: Sync,

§

impl<'source, Token> Unpin for Lexer<'source, Token>
where <Token as Logos<'source>>::Extras: Unpin,

§

impl<'source, Token> UnwindSafe for Lexer<'source, Token>
where <Token as Logos<'source>>::Extras: UnwindSafe, <Token as Logos<'source>>::Source: RefUnwindSafe,

Blanket Implementations§

Source§

impl<T> Any for T
where T: 'static + ?Sized,

Source§

fn type_id(&self) -> TypeId

Gets the TypeId of self. Read more
Source§

impl<T> Borrow<T> for T
where T: ?Sized,

Source§

fn borrow(&self) -> &T

Immutably borrows from an owned value. Read more
Source§

impl<T> BorrowMut<T> for T
where T: ?Sized,

Source§

fn borrow_mut(&mut self) -> &mut T

Mutably borrows from an owned value. Read more
Source§

impl<T> CloneToUninit for T
where T: Clone,

Source§

unsafe fn clone_to_uninit(&self, dest: *mut u8)

🔬This is a nightly-only experimental API. (clone_to_uninit)
Performs copy-assignment from self to dest. Read more
Source§

impl<T> From<T> for T

Source§

fn from(t: T) -> T

Returns the argument unchanged.

Source§

impl<T, U> Into<U> for T
where U: From<T>,

Source§

fn into(self) -> U

Calls U::from(self).

That is, this conversion is whatever the implementation of From<T> for U chooses to do.

Source§

impl<I> IntoIterator for I
where I: Iterator,

Source§

type Item = <I as Iterator>::Item

The type of the elements being iterated over.
Source§

type IntoIter = I

Which kind of iterator are we turning this into?
Source§

fn into_iter(self) -> I

Creates an iterator from a value. Read more
Source§

impl<T> ToOwned for T
where T: Clone,

Source§

type Owned = T

The resulting type after obtaining ownership.
Source§

fn to_owned(&self) -> T

Creates owned data from borrowed data, usually by cloning. Read more
Source§

fn clone_into(&self, target: &mut T)

Uses borrowed data to replace owned data, usually by cloning. Read more
Source§

impl<T, U> TryFrom<U> for T
where U: Into<T>,

Source§

type Error = Infallible

The type returned in the event of a conversion error.
Source§

fn try_from(value: U) -> Result<T, <T as TryFrom<U>>::Error>

Performs the conversion.
Source§

impl<T, U> TryInto<U> for T
where U: TryFrom<T>,

Source§

type Error = <U as TryFrom<T>>::Error

The type returned in the event of a conversion error.
Source§

fn try_into(self) -> Result<U, <U as TryFrom<T>>::Error>

Performs the conversion.