sentience_tokenize/
iter.rs1use crate::error::LexError;
2use crate::{Lexer, Token};
3
4pub struct Tokens {
5 inner: TokensInner,
6}
7
8enum TokensInner {
9 Items(std::vec::IntoIter<Token>),
10 Error(Option<LexError>),
11}
12
13pub fn tokenize_iter(src: &str) -> Tokens {
14 match Lexer::new(src).tokenize() {
15 Ok(v) => Tokens {
16 inner: TokensInner::Items(v.into_iter()),
17 },
18 Err(e) => Tokens {
19 inner: TokensInner::Error(Some(e)),
20 },
21 }
22}
23
24impl Iterator for Tokens {
25 type Item = Result<Token, LexError>;
26
27 fn next(&mut self) -> Option<Self::Item> {
28 match &mut self.inner {
29 TokensInner::Items(iter) => iter.next().map(Ok),
30 TokensInner::Error(e) => e.take().map(Err),
31 }
32 }
33
34 fn size_hint(&self) -> (usize, Option<usize>) {
35 match &self.inner {
36 TokensInner::Items(iter) => iter.size_hint(),
37 TokensInner::Error(Some(_)) => (1, Some(1)),
38 TokensInner::Error(None) => (0, Some(0)),
39 }
40 }
41}