do_not_use_antlr_rust/
common_token_stream.rs1use std::borrow::Borrow;
3
4use crate::int_stream::{IntStream, IterWrapper, EOF};
5use crate::token::{Token, TOKEN_DEFAULT_CHANNEL, TOKEN_INVALID_TYPE};
6use crate::token_factory::TokenFactory;
7use crate::token_source::TokenSource;
8use crate::token_stream::{TokenStream, UnbufferedTokenStream};
9use better_any::{Tid, TidAble};
10
11#[derive(Debug)]
13pub struct CommonTokenStream<'input, T: TokenSource<'input>> {
14 base: UnbufferedTokenStream<'input, T>,
15 channel: isize,
16}
17
18better_any::tid! { impl<'input,T> TidAble<'input> for CommonTokenStream<'input, T> where T: TokenSource<'input>}
19
20impl<'input, T: TokenSource<'input>> IntStream for CommonTokenStream<'input, T> {
21 #[inline]
22 fn consume(&mut self) {
23 self.base.consume();
24 let next = self.next_token_on_channel(self.base.p, self.channel, 1);
27 self.base.seek(next);
28 }
30
31 #[inline]
32 fn la(&mut self, i: isize) -> isize {
33 self.lt(i)
34 .map(|t| t.borrow().get_token_type())
35 .unwrap_or(TOKEN_INVALID_TYPE)
36 }
37
38 #[inline(always)]
39 fn mark(&mut self) -> isize { 0 }
40
41 #[inline(always)]
42 fn release(&mut self, _marker: isize) {}
43
44 #[inline(always)]
45 fn index(&self) -> isize { self.base.index() }
46
47 #[inline(always)]
48 fn seek(&mut self, index: isize) { self.base.seek(index); }
49
50 #[inline(always)]
51 fn size(&self) -> isize { self.base.size() }
52
53 fn get_source_name(&self) -> String { self.base.get_source_name() }
54}
55
56impl<'input, T: TokenSource<'input>> TokenStream<'input> for CommonTokenStream<'input, T> {
57 type TF = T::TF;
58
59 #[inline(always)]
60 fn lt(&mut self, k: isize) -> Option<&<Self::TF as TokenFactory<'input>>::Tok> {
61 if k == 1 {
62 return self.base.tokens.get(self.base.p as usize);
63 }
64 if k == 0 {
65 panic!();
66 }
67 if k < 0 {
68 return self.lb(-k);
69 }
70 self.lt_inner(k)
71 }
72
73 #[inline]
74 fn get(&self, index: isize) -> &<Self::TF as TokenFactory<'input>>::Tok { self.base.get(index) }
75
76 fn get_token_source(&self) -> &dyn TokenSource<'input, TF = Self::TF> {
77 self.base.get_token_source()
78 }
79
80 fn get_text_from_interval(&self, start: isize, stop: isize) -> String {
81 self.base.get_text_from_interval(start, stop)
82 }
83}
84
85impl<'input, T: TokenSource<'input>> CommonTokenStream<'input, T> {
86 pub fn new(lexer: T) -> CommonTokenStream<'input, T> {
88 Self::with_channel(lexer, TOKEN_DEFAULT_CHANNEL)
89 }
90
91 pub fn with_channel(lexer: T, channel: isize) -> CommonTokenStream<'input, T> {
93 let mut r = CommonTokenStream {
94 base: UnbufferedTokenStream::new_buffered(lexer),
95 channel,
96 };
97 r.sync(0);
98 r
99 }
100
101 fn lt_inner(&mut self, k: isize) -> Option<&<T::TF as TokenFactory<'input>>::Tok> {
102 let mut i = self.base.p;
103 let mut n = 1; while n < k {
106 if self.sync(i + 1) {
108 i = self.next_token_on_channel(i + 1, self.channel, 1);
109 }
110 n += 1;
111 }
112 return self.base.tokens.get(i as usize);
114 }
115
116 pub fn reset(&mut self) {
118 self.base.p = 0;
119 self.base.current_token_index = 0;
120 }
121
122 pub fn iter(&mut self) -> IterWrapper<'_, Self> { IterWrapper(self) }
124
125 fn sync(&mut self, i: isize) -> bool {
126 let need = i - self.size() + 1;
127 if need > 0 {
128 let fetched = self.base.fill(need);
129 return fetched >= need;
130 }
131
132 true
133 }
134 fn next_token_on_channel(&mut self, mut i: isize, channel: isize, direction: isize) -> isize {
149 self.sync(i);
150 if i >= self.size() {
151 return self.size() - 1;
152 }
153
154 let mut token = self.base.tokens[i as usize].borrow();
155 while token.get_channel() != channel {
156 if token.get_token_type() == EOF || i < 0 {
157 return i;
158 }
159
160 i += direction;
161 self.sync(i);
162 token = self.base.tokens[i as usize].borrow();
163 }
164
165 return i;
166 }
167 fn lb(
191 &mut self,
192 k: isize,
193 ) -> Option<&<<Self as TokenStream<'input>>::TF as TokenFactory<'input>>::Tok> {
194 if k == 0 || (self.base.p - k) < 0 {
195 return None;
196 }
197
198 let mut i = self.base.p;
199 let mut n = 1;
200 while n <= k && i > 0 {
202 i = self.next_token_on_channel(i - 1, self.channel, -1);
204 n += 1;
205 }
206 if i < 0 {
207 return None;
208 }
209
210 return self.base.tokens.get(i as usize);
211 }
212
213 }