Struct Tokens

Source
pub struct Tokens<'s> { /* private fields */ }
Expand description

Tokens instance.

Implementations§

Source§

impl<'s> Tokens<'s>

Source

pub fn new(input: &'s str) -> Self

Construct a new instance from a string slice.

Source

pub fn input(&self) -> &str

Returns the original full input.

Source

pub fn remainder(&self) -> &str

Part of the input string that hasn’t been consumed yet.

Source

pub fn span(&self) -> Span

Byte span of the last token.

Source

pub fn position(&self) -> Position

Current position (just after the last token).

Source

pub fn offset(&self) -> Offset

Current byte offset in the source.

Source

pub fn set_offset(&mut self, offset: Offset) -> bool

Sets the offset if it is valid, updating position and span. Returns true if the offset is valid, false otherwise.

Source

pub fn is_at_start(&self) -> bool

Returns true if the current position is the start of input.

Source

pub fn is_at_end(&self) -> bool

Returns true if the input has been exhausted.

Source

pub fn peek(&self) -> Option<char>

Peeks at the next character of the input.

Source

pub fn consume_all(&mut self) -> &str

Consumes the rest of input.

§Example
use simple_tokenizer::*;

let mut tokens = "tokens".as_tokens();

assert_eq!(tokens.consume_all(), "tokens");
assert!(tokens.remainder().is_empty());
Source

pub fn token(&mut self, token: impl AsRef<str>) -> bool

Consume the next substring equal to token or nothing. Basically a shortcut for bytes_if(token.len(), |s| s == token).is_some().

§Example
use simple_tokenizer::*;

let mut tokens = "tok1 tok2".as_tokens();

assert!(tokens.token("tok1"));
assert_eq!(tokens.remainder(), " tok2");

assert!(!tokens.token(" tok3"));
assert_eq!(tokens.remainder(), " tok2");
Source

pub fn tokens( &mut self, tokens: impl IntoIterator<Item = impl AsRef<str>>, ) -> Option<&str>

Try to consume a substring equal to one of the provided tokens. Returns the first successful substring.

§Example
use simple_tokenizer::*;

let mut tokens = "tok1 tok2".as_tokens();

assert_eq!(tokens.tokens(&["tok", "tok1"]), Some("tok"));
assert_eq!(tokens.remainder(), "1 tok2");

assert_eq!(tokens.tokens(&["1 tok3", "2 tok2"]), None);
assert_eq!(tokens.remainder(), "1 tok2");
Source

pub fn char(&mut self) -> Option<char>

Consume the next character.

§Example
use simple_tokenizer::*;

let mut tokens = "tokens".as_tokens();

assert_eq!(tokens.char(), Some('t'));
assert_eq!(tokens.remainder(), "okens");
Source

pub fn char_if(&mut self, f: impl FnOnce(char) -> bool) -> Option<char>

Consume the next character if it matches a predicate.

§Example
use simple_tokenizer::*;

let mut tokens = "tokens".as_tokens();

assert_eq!(tokens.char_if(char::is_alphabetic), Some('t'));
assert_eq!(tokens.remainder(), "okens");

assert_eq!(tokens.char_if(char::is_numeric), None);
assert_eq!(tokens.remainder(), "okens");
Source

pub fn bytes(&mut self, n: usize) -> Option<&str>

Consume the next n bytes.

§Example
use simple_tokenizer::*;

let mut tokens = "tokens123".as_tokens();

assert_eq!(tokens.bytes(6), Some("tokens"));
assert_eq!(tokens.remainder(), "123");

assert_eq!(tokens.bytes(5), None);
assert_eq!(tokens.remainder(), "123");
Source

pub fn bytes_if( &mut self, n: usize, f: impl FnOnce(&str) -> bool, ) -> Option<&str>

Consume the next n bytes if they match a predicate.

§Example
use simple_tokenizer::*;

let mut tokens = "1231234".as_tokens();

assert_eq!(tokens.bytes_if(3, |s| s.chars().all(char::is_numeric)), Some("123"));
assert_eq!(tokens.remainder(), "1234");

assert_eq!(tokens.bytes_if(5, |s| s.chars().all(char::is_numeric)), None);
assert_eq!(tokens.remainder(), "1234");
Source

pub fn limit_bytes(&mut self, n: usize) -> bool

Limit the input to the next n bytes. Returns true if successful (n lands on a char boundary).

§Example
use simple_tokenizer::*;

let mut tokens = "123456".as_tokens();

assert!(tokens.limit_bytes(4));
assert_eq!(tokens.remainder(), "1234");
Source

pub fn split_bytes(self, n: usize) -> Option<(Tokens<'s>, Tokens<'s>)>

Attempts to split the Tokens into two. Similar to str::split_at().

§Example
use simple_tokenizer::*;

let mut tokens = "1231234".as_tokens();

let (first, second) = tokens.split_bytes(3).unwrap();

assert_eq!(first.remainder(), "123");
assert_eq!(second.remainder(), "1234");
assert_eq!(second.offset(), Offset(3));
Source

pub fn chars(&mut self, n: usize) -> Option<&str>

Consume the next n characters. Doesn’t advance if there aren’t enough characters left.

§Example
use simple_tokenizer::*;

let mut tokens = "tokens123".as_tokens();

assert_eq!(tokens.chars(6), Some("tokens"));
assert_eq!(tokens.remainder(), "123");

assert_eq!(tokens.chars(5), None);
assert_eq!(tokens.remainder(), "123");
Source

pub fn chars_if( &mut self, n: usize, f: impl FnOnce(&str) -> bool, ) -> Option<&str>

Consume the next n characters if they match a predicate. Doesn’t advance if there aren’t enough characters left.

§Example
use simple_tokenizer::*;

let mut tokens = "1231234".as_tokens();

assert_eq!(tokens.chars_if(3, |s| s.chars().all(char::is_numeric)), Some("123"));
assert_eq!(tokens.remainder(), "1234");

assert_eq!(tokens.chars_if(5, |s| s.chars().all(char::is_numeric)), None);
assert_eq!(tokens.remainder(), "1234");
Source

pub fn limit_chars(&mut self, n: usize) -> bool

Limits the input to the next n characters. Returns true if successful (>=n characters left in the input).

§Example
use simple_tokenizer::*;

let mut tokens = "123456".as_tokens();

assert!(tokens.limit_chars(4));
assert_eq!(tokens.remainder(), "1234");
Source

pub fn split_chars(self, n: usize) -> Option<(Tokens<'s>, Tokens<'s>)>

Attempts to split the Tokens into two. Similar to str::split_at(), but n is in characters.

§Example
use simple_tokenizer::*;

let mut tokens = "1231234".as_tokens();

let (first, second) = tokens.split_chars(3).unwrap();

assert_eq!(first.remainder(), "123");
assert_eq!(second.remainder(), "1234");
assert_eq!(second.offset(), Offset(3));
Source

pub fn take_while(&mut self, f: impl FnMut(char) -> bool) -> &str

Consume characters while f returns true. Returns the consumed substring.

§Example
use simple_tokenizer::*;

let mut tokens = "12345word".as_tokens();

assert_eq!(tokens.take_while(char::is_numeric), "12345");
assert_eq!(tokens.remainder(), "word");
Source

pub fn limit_while(&mut self, f: impl FnMut(char) -> bool)

Limit the input to the next amount of characters, for which f returns true.

§Example
use simple_tokenizer::*;

let mut tokens = "line 1\nline 2".as_tokens();

tokens.limit_while(|ch| ch != '\n');
assert_eq!(tokens.remainder(), "line 1");
Source

pub fn split_while( self, f: impl FnMut(char) -> bool, ) -> (Tokens<'s>, Tokens<'s>)

Attempts to split the Tokens into two. Similar to str::split_at(). The split point is determined by f.

§Example
use simple_tokenizer::*;

let mut tokens = "12345abcdef".as_tokens();

let (first, second) = tokens.split_while(char::is_numeric);

assert_eq!(first.remainder(), "12345");
assert_eq!(second.remainder(), "abcdef");
assert_eq!(second.offset(), Offset(5));

Trait Implementations§

Source§

impl<'s> AsTokens for Tokens<'s>

Source§

fn as_tokens(&self) -> Tokens<'_>

Convenient converting to tokens instance.
Source§

impl<'s> Clone for Tokens<'s>

Source§

fn clone(&self) -> Tokens<'s>

Returns a duplicate of the value. Read more
1.0.0 · Source§

fn clone_from(&mut self, source: &Self)

Performs copy-assignment from source. Read more
Source§

impl<'s> Debug for Tokens<'s>

Source§

fn fmt(&self, f: &mut Formatter<'_>) -> Result

Formats the value using the given formatter. Read more
Source§

impl<'s> PartialEq for Tokens<'s>

Source§

fn eq(&self, other: &Tokens<'s>) -> bool

Tests for self and other values to be equal, and is used by ==.
1.0.0 · Source§

fn ne(&self, other: &Rhs) -> bool

Tests for !=. The default implementation is almost always sufficient, and should not be overridden without very good reason.
Source§

impl<'s> Eq for Tokens<'s>

Source§

impl<'s> StructuralPartialEq for Tokens<'s>

Auto Trait Implementations§

§

impl<'s> Freeze for Tokens<'s>

§

impl<'s> RefUnwindSafe for Tokens<'s>

§

impl<'s> Send for Tokens<'s>

§

impl<'s> Sync for Tokens<'s>

§

impl<'s> Unpin for Tokens<'s>

§

impl<'s> UnwindSafe for Tokens<'s>

Blanket Implementations§

Source§

impl<T> Any for T
where T: 'static + ?Sized,

Source§

fn type_id(&self) -> TypeId

Gets the TypeId of self. Read more
Source§

impl<T> Borrow<T> for T
where T: ?Sized,

Source§

fn borrow(&self) -> &T

Immutably borrows from an owned value. Read more
Source§

impl<T> BorrowMut<T> for T
where T: ?Sized,

Source§

fn borrow_mut(&mut self) -> &mut T

Mutably borrows from an owned value. Read more
Source§

impl<T> CloneToUninit for T
where T: Clone,

Source§

unsafe fn clone_to_uninit(&self, dest: *mut u8)

🔬This is a nightly-only experimental API. (clone_to_uninit)
Performs copy-assignment from self to dest. Read more
Source§

impl<T> From<T> for T

Source§

fn from(t: T) -> T

Returns the argument unchanged.

Source§

impl<T, U> Into<U> for T
where U: From<T>,

Source§

fn into(self) -> U

Calls U::from(self).

That is, this conversion is whatever the implementation of From<T> for U chooses to do.

Source§

impl<T, U> TryFrom<U> for T
where U: Into<T>,

Source§

type Error = Infallible

The type returned in the event of a conversion error.
Source§

fn try_from(value: U) -> Result<T, <T as TryFrom<U>>::Error>

Performs the conversion.
Source§

impl<T, U> TryInto<U> for T
where U: TryFrom<T>,

Source§

type Error = <U as TryFrom<T>>::Error

The type returned in the event of a conversion error.
Source§

fn try_into(self) -> Result<U, <U as TryFrom<T>>::Error>

Performs the conversion.