java_lang/tokenizer/
stream.rs

1use super::{one_token, Token};
2use nom::{
3    error::{Error, ErrorKind},
4    multi::many0,
5    Compare, CompareResult, IResult, Input, Needed, Parser,
6};
7use std::{borrow::Cow, iter::Enumerate, vec::IntoIter};
8
9#[derive(Clone, Debug, Default)]
10pub struct TokenStream {
11    data: Cow<'static, Vec<Token>>,
12}
13
14impl TokenStream {
15    pub fn is_empty(&self) -> bool {
16        self.data.is_empty()
17    }
18
19    #[inline]
20    pub fn from_str(input: &str) -> IResult<&str, Self> {
21        let (remaining, out) = many0(one_token).parse(input)?;
22        if remaining.trim_end().is_empty() {
23            return Ok((
24                remaining,
25                Self {
26                    data: Cow::Owned(out),
27                },
28            ));
29        }
30        Err(nom::Err::Failure(Error::new(remaining, ErrorKind::Fail)))
31    }
32
33    #[inline]
34    pub fn from_vec(data: Vec<Token>) -> Self {
35        Self {
36            data: Cow::Owned(data),
37        }
38    }
39
40    #[inline]
41    fn from_slice(data: &[Token]) -> Self {
42        Self {
43            data: Cow::Owned(data.to_owned()),
44        }
45    }
46}
47
48impl Input for TokenStream {
49    type Item = Token;
50    type Iter = IntoIter<Token>;
51    type IterIndices = Enumerate<Self::Iter>;
52
53    #[inline]
54    fn input_len(&self) -> usize {
55        self.data.len()
56    }
57
58    #[inline]
59    fn take(&self, index: usize) -> Self {
60        Self::from_slice(&self.data[..index])
61    }
62
63    #[inline]
64    fn take_from(&self, index: usize) -> Self {
65        Self::from_slice(&self.data[index..])
66    }
67
68    #[inline]
69    fn take_split(&self, index: usize) -> (Self, Self) {
70        let (prefix, suffix) = self.data.split_at(index);
71        (Self::from_slice(suffix), Self::from_slice(prefix))
72    }
73
74    #[inline]
75    fn position<P>(&self, predicate: P) -> Option<usize>
76    where
77        P: Fn(Self::Item) -> bool,
78    {
79        for (i, j) in self.data.iter().enumerate() {
80            if predicate(j.clone()) {
81                return Some(i);
82            }
83        }
84        None
85    }
86
87    #[inline]
88    fn iter_elements(&self) -> Self::Iter {
89        let data = <Vec<_> as Clone>::clone(&self.data);
90        data.into_iter()
91    }
92
93    #[inline]
94    fn iter_indices(&self) -> Self::IterIndices {
95        let data = <Vec<_> as Clone>::clone(&self.data);
96        data.into_iter().enumerate()
97    }
98
99    #[inline]
100    fn slice_index(&self, count: usize) -> Result<usize, Needed> {
101        let mut cnt = 0;
102        for (index, _) in self.data.iter().enumerate() {
103            if cnt == count {
104                return Ok(index);
105            }
106            cnt += 1;
107        }
108        if cnt == count {
109            return Ok(self.input_len());
110        }
111        Err(Needed::Unknown)
112    }
113}
114
115impl Compare<TokenStream> for TokenStream {
116    #[inline]
117    fn compare(&self, t: TokenStream) -> CompareResult {
118        let pos = self
119            .data
120            .iter()
121            .zip(t.data.iter())
122            .position(|(a, b)| a != b);
123
124        match pos {
125            Some(_) => CompareResult::Error,
126            None => {
127                if self.input_len() >= t.input_len() {
128                    CompareResult::Ok
129                } else {
130                    CompareResult::Incomplete
131                }
132            }
133        }
134    }
135
136    #[inline]
137    fn compare_no_case(&self, t: TokenStream) -> CompareResult {
138        self.compare(t)
139    }
140}
141
142#[macro_export]
143macro_rules! ts {
144    ($($token:ident),*) => {
145        $crate::TokenStream::from_vec(vec![$($crate::Token::$token),*])
146    };
147}