pub struct Tokens<'s> { /* private fields */ }
Expand description
Tokens instance.
Implementations§
Source§impl<'s> Tokens<'s>
impl<'s> Tokens<'s>
Sourcepub fn set_offset(&mut self, offset: Offset) -> bool
pub fn set_offset(&mut self, offset: Offset) -> bool
Sets the offset if it is valid, updating position and span.
Returns true
if the offset is valid, false
otherwise.
Sourcepub fn is_at_start(&self) -> bool
pub fn is_at_start(&self) -> bool
Returns true
if the current position is the start of input.
Sourcepub fn consume_all(&mut self) -> &str
pub fn consume_all(&mut self) -> &str
Consumes the rest of input.
§Example
use simple_tokenizer::*;
let mut tokens = "tokens".as_tokens();
assert_eq!(tokens.consume_all(), "tokens");
assert!(tokens.remainder().is_empty());
Sourcepub fn token(&mut self, token: impl AsRef<str>) -> bool
pub fn token(&mut self, token: impl AsRef<str>) -> bool
Consume the next substring equal to token
or nothing.
Basically a shortcut for bytes_if(token.len(), |s| s == token).is_some()
.
§Example
use simple_tokenizer::*;
let mut tokens = "tok1 tok2".as_tokens();
assert!(tokens.token("tok1"));
assert_eq!(tokens.remainder(), " tok2");
assert!(!tokens.token(" tok3"));
assert_eq!(tokens.remainder(), " tok2");
Sourcepub fn tokens(
&mut self,
tokens: impl IntoIterator<Item = impl AsRef<str>>,
) -> Option<&str>
pub fn tokens( &mut self, tokens: impl IntoIterator<Item = impl AsRef<str>>, ) -> Option<&str>
Try to consume a substring equal to one of the provided tokens. Returns the first successful substring.
§Example
use simple_tokenizer::*;
let mut tokens = "tok1 tok2".as_tokens();
assert_eq!(tokens.tokens(&["tok", "tok1"]), Some("tok"));
assert_eq!(tokens.remainder(), "1 tok2");
assert_eq!(tokens.tokens(&["1 tok3", "2 tok2"]), None);
assert_eq!(tokens.remainder(), "1 tok2");
Sourcepub fn char(&mut self) -> Option<char>
pub fn char(&mut self) -> Option<char>
Consume the next character.
§Example
use simple_tokenizer::*;
let mut tokens = "tokens".as_tokens();
assert_eq!(tokens.char(), Some('t'));
assert_eq!(tokens.remainder(), "okens");
Sourcepub fn char_if(&mut self, f: impl FnOnce(char) -> bool) -> Option<char>
pub fn char_if(&mut self, f: impl FnOnce(char) -> bool) -> Option<char>
Consume the next character if it matches a predicate.
§Example
use simple_tokenizer::*;
let mut tokens = "tokens".as_tokens();
assert_eq!(tokens.char_if(char::is_alphabetic), Some('t'));
assert_eq!(tokens.remainder(), "okens");
assert_eq!(tokens.char_if(char::is_numeric), None);
assert_eq!(tokens.remainder(), "okens");
Sourcepub fn bytes(&mut self, n: usize) -> Option<&str>
pub fn bytes(&mut self, n: usize) -> Option<&str>
Consume the next n
bytes.
§Example
use simple_tokenizer::*;
let mut tokens = "tokens123".as_tokens();
assert_eq!(tokens.bytes(6), Some("tokens"));
assert_eq!(tokens.remainder(), "123");
assert_eq!(tokens.bytes(5), None);
assert_eq!(tokens.remainder(), "123");
Sourcepub fn bytes_if(
&mut self,
n: usize,
f: impl FnOnce(&str) -> bool,
) -> Option<&str>
pub fn bytes_if( &mut self, n: usize, f: impl FnOnce(&str) -> bool, ) -> Option<&str>
Consume the next n
bytes if they match a predicate.
§Example
use simple_tokenizer::*;
let mut tokens = "1231234".as_tokens();
assert_eq!(tokens.bytes_if(3, |s| s.chars().all(char::is_numeric)), Some("123"));
assert_eq!(tokens.remainder(), "1234");
assert_eq!(tokens.bytes_if(5, |s| s.chars().all(char::is_numeric)), None);
assert_eq!(tokens.remainder(), "1234");
Sourcepub fn limit_bytes(&mut self, n: usize) -> bool
pub fn limit_bytes(&mut self, n: usize) -> bool
Limit the input to the next n
bytes.
Returns true
if successful (n
lands on a char boundary).
§Example
use simple_tokenizer::*;
let mut tokens = "123456".as_tokens();
assert!(tokens.limit_bytes(4));
assert_eq!(tokens.remainder(), "1234");
Sourcepub fn split_bytes(self, n: usize) -> Option<(Tokens<'s>, Tokens<'s>)>
pub fn split_bytes(self, n: usize) -> Option<(Tokens<'s>, Tokens<'s>)>
Attempts to split the Tokens
into two.
Similar to str::split_at()
.
§Example
use simple_tokenizer::*;
let mut tokens = "1231234".as_tokens();
let (first, second) = tokens.split_bytes(3).unwrap();
assert_eq!(first.remainder(), "123");
assert_eq!(second.remainder(), "1234");
assert_eq!(second.offset(), Offset(3));
Sourcepub fn chars(&mut self, n: usize) -> Option<&str>
pub fn chars(&mut self, n: usize) -> Option<&str>
Consume the next n
characters.
Doesn’t advance if there aren’t enough characters left.
§Example
use simple_tokenizer::*;
let mut tokens = "tokens123".as_tokens();
assert_eq!(tokens.chars(6), Some("tokens"));
assert_eq!(tokens.remainder(), "123");
assert_eq!(tokens.chars(5), None);
assert_eq!(tokens.remainder(), "123");
Sourcepub fn chars_if(
&mut self,
n: usize,
f: impl FnOnce(&str) -> bool,
) -> Option<&str>
pub fn chars_if( &mut self, n: usize, f: impl FnOnce(&str) -> bool, ) -> Option<&str>
Consume the next n
characters if they match a predicate.
Doesn’t advance if there aren’t enough characters left.
§Example
use simple_tokenizer::*;
let mut tokens = "1231234".as_tokens();
assert_eq!(tokens.chars_if(3, |s| s.chars().all(char::is_numeric)), Some("123"));
assert_eq!(tokens.remainder(), "1234");
assert_eq!(tokens.chars_if(5, |s| s.chars().all(char::is_numeric)), None);
assert_eq!(tokens.remainder(), "1234");
Sourcepub fn limit_chars(&mut self, n: usize) -> bool
pub fn limit_chars(&mut self, n: usize) -> bool
Limits the input to the next n
characters.
Returns true
if successful (>=n characters left in the input).
§Example
use simple_tokenizer::*;
let mut tokens = "123456".as_tokens();
assert!(tokens.limit_chars(4));
assert_eq!(tokens.remainder(), "1234");
Sourcepub fn split_chars(self, n: usize) -> Option<(Tokens<'s>, Tokens<'s>)>
pub fn split_chars(self, n: usize) -> Option<(Tokens<'s>, Tokens<'s>)>
Attempts to split the Tokens
into two.
Similar to str::split_at()
, but n
is in characters.
§Example
use simple_tokenizer::*;
let mut tokens = "1231234".as_tokens();
let (first, second) = tokens.split_chars(3).unwrap();
assert_eq!(first.remainder(), "123");
assert_eq!(second.remainder(), "1234");
assert_eq!(second.offset(), Offset(3));
Sourcepub fn take_while(&mut self, f: impl FnMut(char) -> bool) -> &str
pub fn take_while(&mut self, f: impl FnMut(char) -> bool) -> &str
Consume characters while f
returns true.
Returns the consumed substring.
§Example
use simple_tokenizer::*;
let mut tokens = "12345word".as_tokens();
assert_eq!(tokens.take_while(char::is_numeric), "12345");
assert_eq!(tokens.remainder(), "word");
Sourcepub fn limit_while(&mut self, f: impl FnMut(char) -> bool)
pub fn limit_while(&mut self, f: impl FnMut(char) -> bool)
Limit the input to the next amount of characters, for which f
returns true
.
§Example
use simple_tokenizer::*;
let mut tokens = "line 1\nline 2".as_tokens();
tokens.limit_while(|ch| ch != '\n');
assert_eq!(tokens.remainder(), "line 1");
Sourcepub fn split_while(
self,
f: impl FnMut(char) -> bool,
) -> (Tokens<'s>, Tokens<'s>)
pub fn split_while( self, f: impl FnMut(char) -> bool, ) -> (Tokens<'s>, Tokens<'s>)
Attempts to split the Tokens
into two.
Similar to str::split_at()
.
The split point is determined by f
.
§Example
use simple_tokenizer::*;
let mut tokens = "12345abcdef".as_tokens();
let (first, second) = tokens.split_while(char::is_numeric);
assert_eq!(first.remainder(), "12345");
assert_eq!(second.remainder(), "abcdef");
assert_eq!(second.offset(), Offset(5));