do_not_use_antlr_rust/
common_token_stream.rs

1//! Channel based `TokenStream`
2use std::borrow::Borrow;
3
4use crate::int_stream::{IntStream, IterWrapper, EOF};
5use crate::token::{Token, TOKEN_DEFAULT_CHANNEL, TOKEN_INVALID_TYPE};
6use crate::token_factory::TokenFactory;
7use crate::token_source::TokenSource;
8use crate::token_stream::{TokenStream, UnbufferedTokenStream};
9use better_any::{Tid, TidAble};
10
11/// Default token stream that skips token that not correspond to current channel.
12#[derive(Debug)]
13pub struct CommonTokenStream<'input, T: TokenSource<'input>> {
14    base: UnbufferedTokenStream<'input, T>,
15    channel: isize,
16}
17
18better_any::tid! { impl<'input,T> TidAble<'input> for CommonTokenStream<'input, T> where T: TokenSource<'input>}
19
20impl<'input, T: TokenSource<'input>> IntStream for CommonTokenStream<'input, T> {
21    #[inline]
22    fn consume(&mut self) {
23        self.base.consume();
24        //        self.base.p = self.next_token_on_channel(self.base.p,self.channel);
25        //        self.base.current_token_index = self.base.p;
26        let next = self.next_token_on_channel(self.base.p, self.channel, 1);
27        self.base.seek(next);
28        // Ok(())
29    }
30
31    #[inline]
32    fn la(&mut self, i: isize) -> isize {
33        self.lt(i)
34            .map(|t| t.borrow().get_token_type())
35            .unwrap_or(TOKEN_INVALID_TYPE)
36    }
37
38    #[inline(always)]
39    fn mark(&mut self) -> isize { 0 }
40
41    #[inline(always)]
42    fn release(&mut self, _marker: isize) {}
43
44    #[inline(always)]
45    fn index(&self) -> isize { self.base.index() }
46
47    #[inline(always)]
48    fn seek(&mut self, index: isize) { self.base.seek(index); }
49
50    #[inline(always)]
51    fn size(&self) -> isize { self.base.size() }
52
53    fn get_source_name(&self) -> String { self.base.get_source_name() }
54}
55
56impl<'input, T: TokenSource<'input>> TokenStream<'input> for CommonTokenStream<'input, T> {
57    type TF = T::TF;
58
59    #[inline(always)]
60    fn lt(&mut self, k: isize) -> Option<&<Self::TF as TokenFactory<'input>>::Tok> {
61        if k == 1 {
62            return self.base.tokens.get(self.base.p as usize);
63        }
64        if k == 0 {
65            panic!();
66        }
67        if k < 0 {
68            return self.lb(-k);
69        }
70        self.lt_inner(k)
71    }
72
73    #[inline]
74    fn get(&self, index: isize) -> &<Self::TF as TokenFactory<'input>>::Tok { self.base.get(index) }
75
76    fn get_token_source(&self) -> &dyn TokenSource<'input, TF = Self::TF> {
77        self.base.get_token_source()
78    }
79
80    fn get_text_from_interval(&self, start: isize, stop: isize) -> String {
81        self.base.get_text_from_interval(start, stop)
82    }
83}
84
85impl<'input, T: TokenSource<'input>> CommonTokenStream<'input, T> {
86    /// Creates CommonTokenStream that produces tokens from `TOKEN_DEFAULT_CHANNEL`
87    pub fn new(lexer: T) -> CommonTokenStream<'input, T> {
88        Self::with_channel(lexer, TOKEN_DEFAULT_CHANNEL)
89    }
90
91    /// Creates CommonTokenStream that produces tokens from `channel`
92    pub fn with_channel(lexer: T, channel: isize) -> CommonTokenStream<'input, T> {
93        let mut r = CommonTokenStream {
94            base: UnbufferedTokenStream::new_buffered(lexer),
95            channel,
96        };
97        r.sync(0);
98        r
99    }
100
101    fn lt_inner(&mut self, k: isize) -> Option<&<T::TF as TokenFactory<'input>>::Tok> {
102        let mut i = self.base.p;
103        let mut n = 1; // we know tokens[p] is a good one
104                       // find k good tokens
105        while n < k {
106            // skip off-channel tokens, but make sure to not look past EOF
107            if self.sync(i + 1) {
108                i = self.next_token_on_channel(i + 1, self.channel, 1);
109            }
110            n += 1;
111        }
112        //		if ( i>range ) range = i;
113        return self.base.tokens.get(i as usize);
114    }
115
116    /// Restarts this token stream
117    pub fn reset(&mut self) {
118        self.base.p = 0;
119        self.base.current_token_index = 0;
120    }
121
122    /// Creates iterator over this token stream
123    pub fn iter(&mut self) -> IterWrapper<'_, Self> { IterWrapper(self) }
124
125    fn sync(&mut self, i: isize) -> bool {
126        let need = i - self.size() + 1;
127        if need > 0 {
128            let fetched = self.base.fill(need);
129            return fetched >= need;
130        }
131
132        true
133    }
134    //
135    //    fn fetch(&self, n: isize) -> int { unimplemented!() }
136    //
137    //    fn get_tokens(&self, start: isize, stop: isize, types: &IntervalSet) -> Vec<Token> { unimplemented!() }
138    //
139    //    fn lazy_init(&self) { unimplemented!() }
140    //
141    //    fn setup(&self) { unimplemented!() }
142    //
143    //    fn get_token_source(&self) -> TokenSource { unimplemented!() }
144    //
145    //    fn set_token_source(&self, tokenSource: TokenSource) { unimplemented!() }
146
147    //todo make this const generic over direction
148    fn next_token_on_channel(&mut self, mut i: isize, channel: isize, direction: isize) -> isize {
149        self.sync(i);
150        if i >= self.size() {
151            return self.size() - 1;
152        }
153
154        let mut token = self.base.tokens[i as usize].borrow();
155        while token.get_channel() != channel {
156            if token.get_token_type() == EOF || i < 0 {
157                return i;
158            }
159
160            i += direction;
161            self.sync(i);
162            token = self.base.tokens[i as usize].borrow();
163        }
164
165        return i;
166    }
167    //
168    //    fn previous_token_on_channel(&self, i: isize, channel: isize) -> int { unimplemented!() }
169    //
170    //    fn get_hidden_tokens_to_right(&self, tokenIndex: isize, channel: isize) -> Vec<Token> { unimplemented!() }
171    //
172    //    fn get_hidden_tokens_to_left(&self, tokenIndex: isize, channel: isize) -> Vec<Token> { unimplemented!() }
173    //
174    //    fn filter_for_channel(&self, left: isize, right: isize, channel: isize) -> Vec<Token> { unimplemented!() }
175    //
176    //    fn get_source_name(&self) -> String { unimplemented!() }
177    //
178    //    fn get_all_text(&self) -> String { unimplemented!() }
179    //
180    //    fn get_text_from_tokens(&self, start: Token, end: Token) -> String { unimplemented!() }
181    //
182    //    fn get_text_from_rule_context(&self, interval: RuleContext) -> String { unimplemented!() }
183    //
184    //    fn get_text_from_interval(&self, interval: &Interval) -> String { unimplemented!() }
185    //
186    //    fn fill(&self) { unimplemented!() }
187    //
188    //    fn adjust_seek_index(&self, i: isize) -> int { unimplemented!() }
189
190    fn lb(
191        &mut self,
192        k: isize,
193    ) -> Option<&<<Self as TokenStream<'input>>::TF as TokenFactory<'input>>::Tok> {
194        if k == 0 || (self.base.p - k) < 0 {
195            return None;
196        }
197
198        let mut i = self.base.p;
199        let mut n = 1;
200        // find k good tokens looking backwards
201        while n <= k && i > 0 {
202            // skip off-channel tokens
203            i = self.next_token_on_channel(i - 1, self.channel, -1);
204            n += 1;
205        }
206        if i < 0 {
207            return None;
208        }
209
210        return self.base.tokens.get(i as usize);
211    }
212
213    //    fn get_number_of_on_channel_tokens(&self) -> int { unimplemented!() }
214}