pub struct Lexer<'source, Token: Logos<'source>> {
pub extras: Token::Extras,
/* private fields */
}Expand description
Lexer is the main struct of the crate that allows you to read through a
Source and produce tokens for enums implementing the Logos trait.
Fields§
§extras: Token::ExtrasExtras associated with the Token.
Implementations§
Source§impl<'source, Token: Logos<'source>> Lexer<'source, Token>
impl<'source, Token: Logos<'source>> Lexer<'source, Token>
Sourcepub fn new(source: &'source Token::Source) -> Self
pub fn new(source: &'source Token::Source) -> Self
Create a new Lexer.
Due to type inference, it might be more ergonomic to construct
it by calling Token::lexer on any Token with derived Logos.
Sourcepub fn with_extras(
source: &'source Token::Source,
extras: Token::Extras,
) -> Self
pub fn with_extras( source: &'source Token::Source, extras: Token::Extras, ) -> Self
Create a new Lexer with the provided Extras.
Due to type inference, it might be more ergonomic to construct
it by calling Token::lexer_with_extras on any Token with derived Logos.
Sourcepub fn spanned(self) -> SpannedIter<'source, Token> ⓘ
pub fn spanned(self) -> SpannedIter<'source, Token> ⓘ
Wrap the Lexer in an Iterator
that produces tuples of (Token, Span).
§Example
use logos::Logos;
#[derive(Debug, PartialEq, Clone, Default)]
enum LexingError {
NumberParseError,
#[default]
Other
}
impl From<std::num::ParseIntError> for LexingError {
fn from(_: std::num::ParseIntError) -> Self {
LexingError::NumberParseError
}
}
impl From<std::num::ParseFloatError> for LexingError {
fn from(_: std::num::ParseFloatError) -> Self {
LexingError::NumberParseError
}
}
#[derive(Logos, Debug, PartialEq)]
#[logos(error = LexingError)]
enum Example {
#[regex(r"[ \n\t\f]+", logos::skip)]
Ignored,
#[regex("-?[0-9]+", |lex| lex.slice().parse())]
Integer(i64),
#[regex("-?[0-9]+\\.[0-9]+", |lex| lex.slice().parse())]
Float(f64),
}
let tokens: Vec<_> = Example::lexer("42 3.14 -5 f").spanned().collect();
assert_eq!(
tokens,
&[
(Ok(Example::Integer(42)), 0..2),
(Ok(Example::Float(3.14)), 3..7),
(Ok(Example::Integer(-5)), 8..10),
(Err(LexingError::Other), 11..12), // 'f' is not a recognized token
],
);Examples found in repository?
133fn main() {
134 //reads the input expression from the command line
135 let input = env::args()
136 .nth(1)
137 .expect("Expected expression argument (e.g. `1 + 7 * (3 - 4) / 5`)");
138
139 //creates a lexer instance from the input
140 let lexer = Token::lexer(&input);
141
142 //splits the input into tokens, using the lexer
143 let mut tokens = vec![];
144 for (token, span) in lexer.spanned() {
145 match token {
146 Ok(token) => tokens.push(token),
147 Err(e) => {
148 println!("lexer error at {:?}: {}", span, e);
149 return;
150 }
151 }
152 }
153
154 //parses the tokens to construct an AST
155 let ast = match parser().parse(&tokens).into_result() {
156 Ok(expr) => {
157 println!("[AST]\n{:#?}", expr);
158 expr
159 }
160 Err(e) => {
161 println!("parse error: {:#?}", e);
162 return;
163 }
164 };
165
166 //evaluates the AST to get the result
167 println!("\n[result]\n{}", ast.eval());
168}Sourcepub fn span(&self) -> Span
pub fn span(&self) -> Span
Get the range for the current token in Source.
Examples found in repository?
33fn newline_callback(lex: &mut Lexer<Token>) -> Skip {
34 lex.extras.0 += 1;
35 lex.extras.1 = lex.span().end;
36 Skip
37}
38
39/// Compute the line and column position for the current word.
40fn word_callback(lex: &mut Lexer<Token>) -> (usize, usize) {
41 let line = lex.extras.0;
42 let column = lex.span().start - lex.extras.1;
43
44 (line, column)
45}More examples
89fn parse_value(lexer: &mut Lexer<'_, Token>) -> Result<Value> {
90 if let Some(token) = lexer.next() {
91 match token {
92 Ok(Token::Bool(b)) => Ok(Value::Bool(b)),
93 Ok(Token::BraceOpen) => parse_object(lexer),
94 Ok(Token::BracketOpen) => parse_array(lexer),
95 Ok(Token::Null) => Ok(Value::Null),
96 Ok(Token::Number(n)) => Ok(Value::Number(n)),
97 Ok(Token::String(s)) => Ok(Value::String(s)),
98 _ => Err((
99 "unexpected token here (context: value)".to_owned(),
100 lexer.span(),
101 )),
102 }
103 } else {
104 Err(("empty values are not allowed".to_owned(), lexer.span()))
105 }
106}
107/* ANCHOR_END: value */
108
109/* ANCHOR: array */
110/// Parse a token stream into an array and return when
111/// a valid terminator is found.
112///
113/// > NOTE: we assume '[' was consumed.
114fn parse_array(lexer: &mut Lexer<'_, Token>) -> Result<Value> {
115 let mut array = Vec::new();
116 let span = lexer.span();
117 let mut awaits_comma = false;
118 let mut awaits_value = false;
119
120 while let Some(token) = lexer.next() {
121 match token {
122 Ok(Token::Bool(b)) if !awaits_comma => {
123 array.push(Value::Bool(b));
124 awaits_value = false;
125 }
126 Ok(Token::BraceOpen) if !awaits_comma => {
127 let object = parse_object(lexer)?;
128 array.push(object);
129 awaits_value = false;
130 }
131 Ok(Token::BracketOpen) if !awaits_comma => {
132 let sub_array = parse_array(lexer)?;
133 array.push(sub_array);
134 awaits_value = false;
135 }
136 Ok(Token::BracketClose) if !awaits_value => return Ok(Value::Array(array)),
137 Ok(Token::Comma) if awaits_comma => awaits_value = true,
138 Ok(Token::Null) if !awaits_comma => {
139 array.push(Value::Null);
140 awaits_value = false
141 }
142 Ok(Token::Number(n)) if !awaits_comma => {
143 array.push(Value::Number(n));
144 awaits_value = false;
145 }
146 Ok(Token::String(s)) if !awaits_comma => {
147 array.push(Value::String(s));
148 awaits_value = false;
149 }
150 _ => {
151 return Err((
152 "unexpected token here (context: array)".to_owned(),
153 lexer.span(),
154 ))
155 }
156 }
157 awaits_comma = !awaits_value;
158 }
159 Err(("unmatched opening bracket defined here".to_owned(), span))
160}
161/* ANCHOR_END: array */
162
163/* ANCHOR: object */
164/// Parse a token stream into an object and return when
165/// a valid terminator is found.
166///
167/// > NOTE: we assume '{' was consumed.
168fn parse_object(lexer: &mut Lexer<'_, Token>) -> Result<Value> {
169 let mut map = HashMap::new();
170 let span = lexer.span();
171 let mut awaits_comma = false;
172 let mut awaits_key = false;
173
174 while let Some(token) = lexer.next() {
175 match token {
176 Ok(Token::BraceClose) if !awaits_key => return Ok(Value::Object(map)),
177 Ok(Token::Comma) if awaits_comma => awaits_key = true,
178 Ok(Token::String(key)) if !awaits_comma => {
179 match lexer.next() {
180 Some(Ok(Token::Colon)) => (),
181 _ => {
182 return Err((
183 "unexpected token here, expecting ':'".to_owned(),
184 lexer.span(),
185 ))
186 }
187 }
188 let value = parse_value(lexer)?;
189 map.insert(key, value);
190 awaits_key = false;
191 }
192 _ => {
193 return Err((
194 "unexpected token here (context: object)".to_owned(),
195 lexer.span(),
196 ))
197 }
198 }
199 awaits_comma = !awaits_key;
200 }
201 Err(("unmatched opening brace defined here".to_owned(), span))
202}85fn parse_value<'source>(lexer: &mut Lexer<'source, Token<'source>>) -> Result<Value<'source>> {
86 if let Some(token) = lexer.next() {
87 match token {
88 Ok(Token::Bool(b)) => Ok(Value::Bool(b)),
89 Ok(Token::BraceOpen) => parse_object(lexer),
90 Ok(Token::BracketOpen) => parse_array(lexer),
91 Ok(Token::Null) => Ok(Value::Null),
92 Ok(Token::Number(n)) => Ok(Value::Number(n)),
93 Ok(Token::String(s)) => Ok(Value::String(s)),
94 _ => Err((
95 "unexpected token here (context: value)".to_owned(),
96 lexer.span(),
97 )),
98 }
99 } else {
100 Err(("empty values are not allowed".to_owned(), lexer.span()))
101 }
102}
103/* ANCHOR_END: value */
104
105/* ANCHOR: array */
106/// Parse a token stream into an array and return when
107/// a valid terminator is found.
108///
109/// > NOTE: we assume '[' was consumed.
110fn parse_array<'source>(lexer: &mut Lexer<'source, Token<'source>>) -> Result<Value<'source>> {
111 let mut array = Vec::new();
112 let span = lexer.span();
113 let mut awaits_comma = false;
114 let mut awaits_value = false;
115
116 while let Some(token) = lexer.next() {
117 match token {
118 Ok(Token::Bool(b)) if !awaits_comma => {
119 array.push(Value::Bool(b));
120 awaits_value = false;
121 }
122 Ok(Token::BraceOpen) if !awaits_comma => {
123 let object = parse_object(lexer)?;
124 array.push(object);
125 awaits_value = false;
126 }
127 Ok(Token::BracketOpen) if !awaits_comma => {
128 let sub_array = parse_array(lexer)?;
129 array.push(sub_array);
130 awaits_value = false;
131 }
132 Ok(Token::BracketClose) if !awaits_value => return Ok(Value::Array(array)),
133 Ok(Token::Comma) if awaits_comma => awaits_value = true,
134 Ok(Token::Null) if !awaits_comma => {
135 array.push(Value::Null);
136 awaits_value = false
137 }
138 Ok(Token::Number(n)) if !awaits_comma => {
139 array.push(Value::Number(n));
140 awaits_value = false;
141 }
142 Ok(Token::String(s)) if !awaits_comma => {
143 array.push(Value::String(s));
144 awaits_value = false;
145 }
146 _ => {
147 return Err((
148 "unexpected token here (context: array)".to_owned(),
149 lexer.span(),
150 ))
151 }
152 }
153 awaits_comma = !awaits_value;
154 }
155 Err(("unmatched opening bracket defined here".to_owned(), span))
156}
157/* ANCHOR_END: array */
158
159/* ANCHOR: object */
160/// Parse a token stream into an object and return when
161/// a valid terminator is found.
162///
163/// > NOTE: we assume '{' was consumed.
164fn parse_object<'source>(lexer: &mut Lexer<'source, Token<'source>>) -> Result<Value<'source>> {
165 let mut map = HashMap::new();
166 let span = lexer.span();
167 let mut awaits_comma = false;
168 let mut awaits_key = false;
169
170 while let Some(token) = lexer.next() {
171 match token {
172 Ok(Token::BraceClose) if !awaits_key => return Ok(Value::Object(map)),
173 Ok(Token::Comma) if awaits_comma => awaits_key = true,
174 Ok(Token::String(key)) if !awaits_comma => {
175 match lexer.next() {
176 Some(Ok(Token::Colon)) => (),
177 _ => {
178 return Err((
179 "unexpected token here, expecting ':'".to_owned(),
180 lexer.span(),
181 ))
182 }
183 }
184 let value = parse_value(lexer)?;
185 map.insert(key, value);
186 awaits_key = false;
187 }
188 _ => {
189 return Err((
190 "unexpected token here (context: object)".to_owned(),
191 lexer.span(),
192 ))
193 }
194 }
195 awaits_comma = !awaits_key;
196 }
197 Err(("unmatched opening brace defined here".to_owned(), span))
198}Sourcepub fn slice(&self) -> <Token::Source as Source>::Slice<'source>
pub fn slice(&self) -> <Token::Source as Source>::Slice<'source>
Get a string slice of the current token.
Examples found in repository?
34 fn from_lexer(lex: &mut logos::Lexer<'_, Token>) -> Self {
35 LexingError::NonAsciiCharacter(lex.slice().chars().next().unwrap())
36 }
37}
38
39#[derive(Debug, Logos, PartialEq)]
40#[logos(error(LexingError, LexingError::from_lexer))]
41#[logos(skip r"[ \t]+")]
42enum Token {
43 #[regex(r"[a-zA-Z]+")]
44 Word,
45 #[regex(r"[0-9]+", |lex| lex.slice().parse())]
46 Integer(u8),
47}
48
49fn main() {
50 // 256 overflows u8, since u8's max value is 255.
51 // 'é' is not a valid ascii letter.
52 let mut lex = Token::lexer("Hello 256 Jérome");
53
54 assert_eq!(lex.next(), Some(Ok(Token::Word)));
55 assert_eq!(lex.slice(), "Hello");
56
57 assert_eq!(
58 lex.next(),
59 Some(Err(LexingError::InvalidInteger(
60 "overflow error".to_owned()
61 )))
62 );
63 assert_eq!(lex.slice(), "256");
64
65 assert_eq!(lex.next(), Some(Ok(Token::Word)));
66 assert_eq!(lex.slice(), "J");
67
68 assert_eq!(lex.next(), Some(Err(LexingError::NonAsciiCharacter('é'))));
69 assert_eq!(lex.slice(), "é");
70
71 assert_eq!(lex.next(), Some(Ok(Token::Word)));
72 assert_eq!(lex.slice(), "rome");
73
74 assert_eq!(lex.next(), None);
75}More examples
59fn main() {
60 let src = fs::read_to_string(env::args().nth(1).expect("Expected file argument"))
61 .expect("Failed to read file");
62
63 let mut lex = Token::lexer(src.as_str());
64
65 while let Some(token) = lex.next() {
66 if let Ok(Token::Word((line, column))) = token {
67 println!("Word '{}' found at ({}, {})", lex.slice(), line, column);
68 }
69 }
70}48fn get_string_content(lex: &mut Lexer<StringContext>) -> String {
49 let mut s = String::new();
50 while let Some(Ok(token)) = lex.next() {
51 match token {
52 StringContext::Content => s.push_str(lex.slice()),
53 StringContext::DollarSign => s.push('$'),
54 StringContext::InterpolationStart(value) => s.push_str(&value),
55 StringContext::Quote => break,
56 }
57 }
58 s
59}
60
61fn variable_definition(lex: &mut Lexer<VariableDefinitionContext>) -> Option<(String, String)> {
62 let id = lex.slice().to_string();
63 if let Some(Ok(VariableDefinitionContext::Equals)) = lex.next() {
64 if let Some(Ok(VariableDefinitionContext::Quote)) = lex.next() {
65 let mut lex2 = lex.clone().morph::<StringContext>();
66 let value = get_string_content(&mut lex2);
67 *lex = lex2.morph();
68 lex.extras.insert(id.clone(), value.clone());
69 return Some((id, value));
70 }
71 }
72 None
73}
74/* ANCHOR_END: variable_definition */
75
76/* ANCHOR: evaluate_interpolation */
77fn evaluate_interpolation(lex: &mut Lexer<StringContext>) -> Option<String> {
78 let mut lex2 = lex.clone().morph::<StringInterpolationContext>();
79 let mut interpolation = String::new();
80 while let Some(result) = lex2.next() {
81 match result {
82 Ok(token) => match token {
83 StringInterpolationContext::Id(value) => interpolation.push_str(&value),
84 StringInterpolationContext::Quote => {
85 *lex = lex2.morph();
86 interpolation.push_str(&get_string_content(lex));
87 lex2 = lex.clone().morph();
88 }
89 StringInterpolationContext::InterpolationEnd => break,
90 },
91 Err(()) => panic!("Interpolation error"),
92 }
93 }
94 *lex = lex2.morph();
95 Some(interpolation)
96}
97/* ANCHOR_END: evaluate_interpolation */
98
99/* ANCHOR: get_variable_value */
100fn get_variable_value(lex: &mut Lexer<StringInterpolationContext>) -> Option<String> {
101 if let Some(value) = lex.extras.get(lex.slice()) {
102 return Some(value.clone());
103 }
104 None
105}Sourcepub fn remainder(&self) -> <Token::Source as Source>::Slice<'source>
pub fn remainder(&self) -> <Token::Source as Source>::Slice<'source>
Get a slice of remaining source, starting at the end of current token.
Sourcepub fn morph<Token2>(self) -> Lexer<'source, Token2> ⓘ
pub fn morph<Token2>(self) -> Lexer<'source, Token2> ⓘ
Turn this lexer into a lexer for a new token type.
The new lexer continues to point at the same span as the current lexer, and the current token becomes the error token of the new token type.
Examples found in repository?
61fn variable_definition(lex: &mut Lexer<VariableDefinitionContext>) -> Option<(String, String)> {
62 let id = lex.slice().to_string();
63 if let Some(Ok(VariableDefinitionContext::Equals)) = lex.next() {
64 if let Some(Ok(VariableDefinitionContext::Quote)) = lex.next() {
65 let mut lex2 = lex.clone().morph::<StringContext>();
66 let value = get_string_content(&mut lex2);
67 *lex = lex2.morph();
68 lex.extras.insert(id.clone(), value.clone());
69 return Some((id, value));
70 }
71 }
72 None
73}
74/* ANCHOR_END: variable_definition */
75
76/* ANCHOR: evaluate_interpolation */
77fn evaluate_interpolation(lex: &mut Lexer<StringContext>) -> Option<String> {
78 let mut lex2 = lex.clone().morph::<StringInterpolationContext>();
79 let mut interpolation = String::new();
80 while let Some(result) = lex2.next() {
81 match result {
82 Ok(token) => match token {
83 StringInterpolationContext::Id(value) => interpolation.push_str(&value),
84 StringInterpolationContext::Quote => {
85 *lex = lex2.morph();
86 interpolation.push_str(&get_string_content(lex));
87 lex2 = lex.clone().morph();
88 }
89 StringInterpolationContext::InterpolationEnd => break,
90 },
91 Err(()) => panic!("Interpolation error"),
92 }
93 }
94 *lex = lex2.morph();
95 Some(interpolation)
96}Trait Implementations§
Source§impl<'source, Token> Iterator for Lexer<'source, Token>where
Token: Logos<'source>,
impl<'source, Token> Iterator for Lexer<'source, Token>where
Token: Logos<'source>,
Source§type Item = Result<Token, <Token as Logos<'source>>::Error>
type Item = Result<Token, <Token as Logos<'source>>::Error>
Source§fn next(&mut self) -> Option<Result<Token, Token::Error>>
fn next(&mut self) -> Option<Result<Token, Token::Error>>
Source§fn next_chunk<const N: usize>(
&mut self,
) -> Result<[Self::Item; N], IntoIter<Self::Item, N>>where
Self: Sized,
fn next_chunk<const N: usize>(
&mut self,
) -> Result<[Self::Item; N], IntoIter<Self::Item, N>>where
Self: Sized,
iter_next_chunk)N values. Read more1.0.0 · Source§fn size_hint(&self) -> (usize, Option<usize>)
fn size_hint(&self) -> (usize, Option<usize>)
1.0.0 · Source§fn count(self) -> usizewhere
Self: Sized,
fn count(self) -> usizewhere
Self: Sized,
1.0.0 · Source§fn last(self) -> Option<Self::Item>where
Self: Sized,
fn last(self) -> Option<Self::Item>where
Self: Sized,
Source§fn advance_by(&mut self, n: usize) -> Result<(), NonZero<usize>>
fn advance_by(&mut self, n: usize) -> Result<(), NonZero<usize>>
iter_advance_by)n elements. Read more1.0.0 · Source§fn nth(&mut self, n: usize) -> Option<Self::Item>
fn nth(&mut self, n: usize) -> Option<Self::Item>
nth element of the iterator. Read more1.28.0 · Source§fn step_by(self, step: usize) -> StepBy<Self>where
Self: Sized,
fn step_by(self, step: usize) -> StepBy<Self>where
Self: Sized,
1.0.0 · Source§fn chain<U>(self, other: U) -> Chain<Self, <U as IntoIterator>::IntoIter>
fn chain<U>(self, other: U) -> Chain<Self, <U as IntoIterator>::IntoIter>
1.0.0 · Source§fn zip<U>(self, other: U) -> Zip<Self, <U as IntoIterator>::IntoIter>where
Self: Sized,
U: IntoIterator,
fn zip<U>(self, other: U) -> Zip<Self, <U as IntoIterator>::IntoIter>where
Self: Sized,
U: IntoIterator,
Source§fn intersperse(self, separator: Self::Item) -> Intersperse<Self>
fn intersperse(self, separator: Self::Item) -> Intersperse<Self>
iter_intersperse)separator between adjacent
items of the original iterator. Read moreSource§fn intersperse_with<G>(self, separator: G) -> IntersperseWith<Self, G>
fn intersperse_with<G>(self, separator: G) -> IntersperseWith<Self, G>
iter_intersperse)separator
between adjacent items of the original iterator. Read more1.0.0 · Source§fn map<B, F>(self, f: F) -> Map<Self, F>
fn map<B, F>(self, f: F) -> Map<Self, F>
1.0.0 · Source§fn filter<P>(self, predicate: P) -> Filter<Self, P>
fn filter<P>(self, predicate: P) -> Filter<Self, P>
1.0.0 · Source§fn filter_map<B, F>(self, f: F) -> FilterMap<Self, F>
fn filter_map<B, F>(self, f: F) -> FilterMap<Self, F>
1.0.0 · Source§fn enumerate(self) -> Enumerate<Self>where
Self: Sized,
fn enumerate(self) -> Enumerate<Self>where
Self: Sized,
1.0.0 · Source§fn skip_while<P>(self, predicate: P) -> SkipWhile<Self, P>
fn skip_while<P>(self, predicate: P) -> SkipWhile<Self, P>
1.0.0 · Source§fn take_while<P>(self, predicate: P) -> TakeWhile<Self, P>
fn take_while<P>(self, predicate: P) -> TakeWhile<Self, P>
1.57.0 · Source§fn map_while<B, P>(self, predicate: P) -> MapWhile<Self, P>
fn map_while<B, P>(self, predicate: P) -> MapWhile<Self, P>
1.0.0 · Source§fn skip(self, n: usize) -> Skip<Self>where
Self: Sized,
fn skip(self, n: usize) -> Skip<Self>where
Self: Sized,
n elements. Read more1.0.0 · Source§fn take(self, n: usize) -> Take<Self>where
Self: Sized,
fn take(self, n: usize) -> Take<Self>where
Self: Sized,
n elements, or fewer
if the underlying iterator ends sooner. Read more1.0.0 · Source§fn flat_map<U, F>(self, f: F) -> FlatMap<Self, U, F>
fn flat_map<U, F>(self, f: F) -> FlatMap<Self, U, F>
1.29.0 · Source§fn flatten(self) -> Flatten<Self>
fn flatten(self) -> Flatten<Self>
Source§fn map_windows<F, R, const N: usize>(self, f: F) -> MapWindows<Self, F, N>
fn map_windows<F, R, const N: usize>(self, f: F) -> MapWindows<Self, F, N>
iter_map_windows)f for each contiguous window of size N over
self and returns an iterator over the outputs of f. Like slice::windows(),
the windows during mapping overlap as well. Read more1.0.0 · Source§fn inspect<F>(self, f: F) -> Inspect<Self, F>
fn inspect<F>(self, f: F) -> Inspect<Self, F>
1.0.0 · Source§fn by_ref(&mut self) -> &mut Selfwhere
Self: Sized,
fn by_ref(&mut self) -> &mut Selfwhere
Self: Sized,
Iterator. Read moreSource§fn try_collect<B>(
&mut self,
) -> <<Self::Item as Try>::Residual as Residual<B>>::TryType
fn try_collect<B>( &mut self, ) -> <<Self::Item as Try>::Residual as Residual<B>>::TryType
iterator_try_collect)Source§fn collect_into<E>(self, collection: &mut E) -> &mut E
fn collect_into<E>(self, collection: &mut E) -> &mut E
iter_collect_into)1.0.0 · Source§fn partition<B, F>(self, f: F) -> (B, B)
fn partition<B, F>(self, f: F) -> (B, B)
Source§fn is_partitioned<P>(self, predicate: P) -> bool
fn is_partitioned<P>(self, predicate: P) -> bool
iter_is_partitioned)true precede all those that return false. Read more1.27.0 · Source§fn try_fold<B, F, R>(&mut self, init: B, f: F) -> R
fn try_fold<B, F, R>(&mut self, init: B, f: F) -> R
1.27.0 · Source§fn try_for_each<F, R>(&mut self, f: F) -> R
fn try_for_each<F, R>(&mut self, f: F) -> R
1.0.0 · Source§fn fold<B, F>(self, init: B, f: F) -> B
fn fold<B, F>(self, init: B, f: F) -> B
1.51.0 · Source§fn reduce<F>(self, f: F) -> Option<Self::Item>
fn reduce<F>(self, f: F) -> Option<Self::Item>
Source§fn try_reduce<R>(
&mut self,
f: impl FnMut(Self::Item, Self::Item) -> R,
) -> <<R as Try>::Residual as Residual<Option<<R as Try>::Output>>>::TryType
fn try_reduce<R>( &mut self, f: impl FnMut(Self::Item, Self::Item) -> R, ) -> <<R as Try>::Residual as Residual<Option<<R as Try>::Output>>>::TryType
iterator_try_reduce)1.0.0 · Source§fn all<F>(&mut self, f: F) -> bool
fn all<F>(&mut self, f: F) -> bool
1.0.0 · Source§fn any<F>(&mut self, f: F) -> bool
fn any<F>(&mut self, f: F) -> bool
1.0.0 · Source§fn find<P>(&mut self, predicate: P) -> Option<Self::Item>
fn find<P>(&mut self, predicate: P) -> Option<Self::Item>
1.30.0 · Source§fn find_map<B, F>(&mut self, f: F) -> Option<B>
fn find_map<B, F>(&mut self, f: F) -> Option<B>
Source§fn try_find<R>(
&mut self,
f: impl FnMut(&Self::Item) -> R,
) -> <<R as Try>::Residual as Residual<Option<Self::Item>>>::TryType
fn try_find<R>( &mut self, f: impl FnMut(&Self::Item) -> R, ) -> <<R as Try>::Residual as Residual<Option<Self::Item>>>::TryType
try_find)1.0.0 · Source§fn position<P>(&mut self, predicate: P) -> Option<usize>
fn position<P>(&mut self, predicate: P) -> Option<usize>
1.0.0 · Source§fn max(self) -> Option<Self::Item>
fn max(self) -> Option<Self::Item>
1.0.0 · Source§fn min(self) -> Option<Self::Item>
fn min(self) -> Option<Self::Item>
1.6.0 · Source§fn max_by_key<B, F>(self, f: F) -> Option<Self::Item>
fn max_by_key<B, F>(self, f: F) -> Option<Self::Item>
1.15.0 · Source§fn max_by<F>(self, compare: F) -> Option<Self::Item>
fn max_by<F>(self, compare: F) -> Option<Self::Item>
1.6.0 · Source§fn min_by_key<B, F>(self, f: F) -> Option<Self::Item>
fn min_by_key<B, F>(self, f: F) -> Option<Self::Item>
1.15.0 · Source§fn min_by<F>(self, compare: F) -> Option<Self::Item>
fn min_by<F>(self, compare: F) -> Option<Self::Item>
1.0.0 · Source§fn unzip<A, B, FromA, FromB>(self) -> (FromA, FromB)
fn unzip<A, B, FromA, FromB>(self) -> (FromA, FromB)
1.36.0 · Source§fn copied<'a, T>(self) -> Copied<Self>
fn copied<'a, T>(self) -> Copied<Self>
Source§fn array_chunks<const N: usize>(self) -> ArrayChunks<Self, N>where
Self: Sized,
fn array_chunks<const N: usize>(self) -> ArrayChunks<Self, N>where
Self: Sized,
iter_array_chunks)N elements of the iterator at a time. Read more1.11.0 · Source§fn product<P>(self) -> P
fn product<P>(self) -> P
Source§fn cmp_by<I, F>(self, other: I, cmp: F) -> Ordering
fn cmp_by<I, F>(self, other: I, cmp: F) -> Ordering
iter_order_by)Iterator with those
of another with respect to the specified comparison function. Read more1.5.0 · Source§fn partial_cmp<I>(self, other: I) -> Option<Ordering>
fn partial_cmp<I>(self, other: I) -> Option<Ordering>
PartialOrd elements of
this Iterator with those of another. The comparison works like short-circuit
evaluation, returning a result without comparing the remaining elements.
As soon as an order can be determined, the evaluation stops and a result is returned. Read moreSource§fn partial_cmp_by<I, F>(self, other: I, partial_cmp: F) -> Option<Ordering>where
Self: Sized,
I: IntoIterator,
F: FnMut(Self::Item, <I as IntoIterator>::Item) -> Option<Ordering>,
fn partial_cmp_by<I, F>(self, other: I, partial_cmp: F) -> Option<Ordering>where
Self: Sized,
I: IntoIterator,
F: FnMut(Self::Item, <I as IntoIterator>::Item) -> Option<Ordering>,
iter_order_by)Iterator with those
of another with respect to the specified comparison function. Read moreSource§fn eq_by<I, F>(self, other: I, eq: F) -> bool
fn eq_by<I, F>(self, other: I, eq: F) -> bool
iter_order_by)1.5.0 · Source§fn lt<I>(self, other: I) -> bool
fn lt<I>(self, other: I) -> bool
Iterator are lexicographically
less than those of another. Read more1.5.0 · Source§fn le<I>(self, other: I) -> bool
fn le<I>(self, other: I) -> bool
Iterator are lexicographically
less or equal to those of another. Read more1.5.0 · Source§fn gt<I>(self, other: I) -> bool
fn gt<I>(self, other: I) -> bool
Iterator are lexicographically
greater than those of another. Read more1.5.0 · Source§fn ge<I>(self, other: I) -> bool
fn ge<I>(self, other: I) -> bool
Iterator are lexicographically
greater than or equal to those of another. Read more