sentience_tokenize/
iter.rs1use crate::error::LexError;
2use crate::{Lexer, Token};
3
4#[must_use]
5#[derive(Debug)]
6pub struct Tokens<'a> {
7 inner: TokensInner<'a>,
8}
9
10#[derive(Debug)]
11enum TokensInner<'a> {
12 Lex(Lexer<'a>),
13 Done,
14}
15
16pub fn tokenize_iter(src: &str) -> Tokens<'_> {
19 Tokens {
20 inner: TokensInner::Lex(Lexer::new(src)),
21 }
22}
23
24impl<'a> Iterator for Tokens<'a> {
25 type Item = Result<Token, LexError>;
26
27 fn next(&mut self) -> Option<Self::Item> {
28 match &mut self.inner {
29 TokensInner::Lex(lex) => match lex.next_token() {
30 Some(Ok(tok)) => Some(Ok(tok)),
31 Some(Err(e)) => {
32 self.inner = TokensInner::Done;
34 Some(Err(e))
35 }
36 None => {
37 self.inner = TokensInner::Done;
38 None
39 }
40 },
41 TokensInner::Done => None,
42 }
43 }
44
45 fn size_hint(&self) -> (usize, Option<usize>) {
46 (0, None)
48 }
49}