conch-parser 0.1.1

A library for parsing programs written in the shell programming language.
Documentation
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
712
713
714
715
716
717
718
719
720
721
722
723
724
725
726
727
728
729
730
731
732
733
734
735
736
737
738
739
740
741
742
743
744
745
746
747
748
749
750
751
752
753
754
755
756
757
758
759
760
761
762
763
764
765
766
767
768
769
770
771
772
773
774
775
776
777
//! An module for easily iterating over a `Token` stream.

use parse::SourcePos;
use token::Token;
use token::Token::*;
use std::iter as std_iter;
use std::mem;

/// Indicates an error such that EOF was encountered while some unmatched
/// tokens were still pending. The error stores the unmatched token
/// and the position where it appears in the source.
#[derive(Debug)]
pub struct UnmatchedError(pub Token, pub SourcePos);

/// An internal variant that indicates if a token should be yielded
/// or the current position updated to some value.
#[derive(Debug)]
enum TokenOrPos {
    /// A consumed token which should be yielded.
    Tok(Token),
    /// The current position should be updated to the contained value.
    Pos(SourcePos),
}

impl TokenOrPos {
    /// Returns `true` if `self` is a `Tok` value.
    #[inline]
    fn is_tok(&self) -> bool {
        match *self {
            TokenOrPos::Tok(_) => true,
            TokenOrPos::Pos(_) => false,
        }
    }
}

/// An iterator that can track its internal position in the stream.
pub trait PositionIterator: Iterator {
    /// Get the current position of the iterator.
    fn pos(&self) -> SourcePos;
}

impl<'a, T: PositionIterator> PositionIterator for &'a mut T {
    fn pos(&self) -> SourcePos {
        (**self).pos()
    }
}

/// An iterator that supports peeking a single element in the stream.
///
/// Identical to `std::iter::Peekable` but in a trait form.
pub trait PeekableIterator: Iterator {
    /// Peek at the next item, identical to `std::iter::Peekable::peek`.
    fn peek(&mut self) -> Option<&Self::Item>;
}

impl<'a, T: PeekableIterator> PeekableIterator for &'a mut T {
    fn peek(&mut self) -> Option<&Self::Item> {
        (**self).peek()
    }
}

impl<I: Iterator> PeekableIterator for std_iter::Peekable<I> {
    fn peek(&mut self) -> Option<&Self::Item> {
        std_iter::Peekable::peek(self)
    }
}

/// A marker trait that unifies `PeekableIterator` and `PositionIterator`.
pub trait PeekablePositionIterator: PeekableIterator + PositionIterator {}
impl<T: PeekableIterator + PositionIterator> PeekablePositionIterator for T {}

/// A convenience trait for converting `Token` iterators into other sub-iterators.
pub trait TokenIterator: Sized + PeekablePositionIterator<Item = Token> {
    /// Returns an iterator that yields at least one token, but continues to yield
    /// tokens until all matching cases of single/double quotes, backticks,
    /// ${ }, $( ), or ( ) are found.
    fn balanced(&mut self) -> Balanced<&mut Self> {
        Balanced::new(self, None)
    }

    /// Returns an iterator that yields tokens up to when a (closing) single quote
    /// is reached (assuming that the caller has reached the opening quote and
    /// wishes to continue up to but not including the closing quote).
    fn single_quoted(&mut self, pos: SourcePos) -> Balanced<&mut Self> {
        Balanced::new(self, Some((SingleQuote, pos)))
    }

    /// Returns an iterator that yields tokens up to when a (closing) double quote
    /// is reached (assuming that the caller has reached the opening quote and
    /// wishes to continue up to but not including the closing quote).
    fn double_quoted(&mut self, pos: SourcePos) -> Balanced<&mut Self> {
        Balanced::new(self, Some((DoubleQuote, pos)))
    }

    /// Returns an iterator that yields tokens up to when a (closing) backtick
    /// is reached (assuming that the caller has reached the opening backtick and
    /// wishes to continue up to but not including the closing backtick).
    fn backticked(&mut self, pos: SourcePos) -> Balanced<&mut Self> {
        Balanced::new(self, Some((Backtick, pos)))
    }

    /// Returns an iterator that yields tokens up to when a (closing) backtick
    /// is reached (assuming that the caller has reached the opening backtick and
    /// wishes to continue up to but not including the closing backtick).
    /// Any backslashes followed by \, $, or ` are removed from the stream.
    fn backticked_remove_backslashes(&mut self, pos: SourcePos)
        -> BacktickBackslashRemover<&mut Self>
    {
        BacktickBackslashRemover::new(self.backticked(pos))
    }
}

/// Convenience trait for `Token` iterators which could be "rewound" so that
/// they can yield tokens that were already pulled out of their stream.
trait RewindableTokenIterator {
    /// Rewind the iterator with the provided tokens. Vector should contain
    /// the tokens in the order they should be yielded.
    fn rewind(&mut self, tokens: Vec<TokenOrPos>);

    /// Grab the next token (or internal position) that should be buffered
    /// by the caller.
    fn next_token_or_pos(&mut self) -> Option<TokenOrPos>;
}

/// A Token iterator that keeps track of how many lines have been read.
#[must_use = "iterator adaptors are lazy and do nothing unless consumed"]
#[derive(Debug)]
pub struct TokenIter<I> {
    /// The underlying token iterator being wrapped. Iterator is fused to avoid
    /// inconsistent behavior when doing multiple peek ahead operations.
    iter: std_iter::Fuse<I>,
    /// Any tokens that were previously yielded but to be consumed later, stored
    /// as a stack. Intersperced between the tokens are any changes to the current
    /// position that should be applied. This is useful for situations where the
    /// parser may have removed certain tokens (e.g. \ when unescaping), but we still
    /// want to keep track of token positions in the actual source.
    prev_buffered: Vec<TokenOrPos>,
    /// The current position in the source that we have consumed up to
    pos: SourcePos,
}

impl<I: Iterator<Item = Token>> PositionIterator for TokenIter<I> {
    fn pos(&self) -> SourcePos {
        self.pos
    }
}

impl<I: Iterator<Item = Token>> PeekableIterator for TokenIter<I> {
    fn peek(&mut self) -> Option<&Self::Item> {
        {
            // Peek the next token, then drop the wrapper to get the token pushed
            // back on our buffer. Not the clearest solution, but gets around
            // the borrow checker.
            let mut peeked = self.multipeek();
            if peeked.peek_next().is_none() {
                // Didn't buffer anything, nothing to peek
                return None;
            }
        }

        if let Some(&TokenOrPos::Tok(ref t)) = self.prev_buffered.last() {
            Some(t)
        } else {
            unreachable!("unexpected state: peeking next token failed. This is a bug!")
        }
    }
}

impl<I: Iterator<Item = Token>> Iterator for TokenIter<I> {
    type Item = Token;

    fn next(&mut self) -> Option<Token> {
        let mut ret = None;
        loop {
            // Make sure we update our current position before continuing.
            match self.next_token_or_pos() {
                Some(TokenOrPos::Tok(next)) => {
                    self.pos.advance(&next);
                    ret = Some(next);
                    break;
                },

                Some(TokenOrPos::Pos(_)) => panic!("unexpected state. This is a bug!"),
                None => break,
            }
        }

        // Make sure we update our position according to any trailing `Pos` points.
        // The parser expects that polling our current position will give it the
        // position of the next token we will yield. If we perform this check right
        // before yielding the next token, the parser will believe that token appears
        // much earlier in the source than it actually does.
        self.updated_buffered_pos();
        ret
    }

    fn size_hint(&self) -> (usize, Option<usize>) {
        let (low_hint, hi) = self.iter.size_hint();
        let low = if self.prev_buffered.is_empty() {
            low_hint
        } else {
            self.prev_buffered.len()
        };
        (low, hi)
    }
}

impl<I: Iterator<Item = Token>> RewindableTokenIterator for TokenIter<I> {
    fn rewind(&mut self, tokens: Vec<TokenOrPos>) {
        self.buffer_tokens_and_positions_to_yield_first(tokens, None);
    }

    fn next_token_or_pos(&mut self) -> Option<TokenOrPos> {
        self.prev_buffered.pop()
            .or_else(|| self.iter.next().map(TokenOrPos::Tok))
    }
}

impl<I: Iterator<Item = Token>> TokenIterator for TokenIter<I> {}

impl<I: Iterator<Item = Token>> TokenIter<I> {
    /// Creates a new TokenIter from another Token iterator.
    pub fn new(iter: I) -> TokenIter<I> {
        TokenIter {
            iter: iter.fuse(),
            prev_buffered: Vec::new(),
            pos: SourcePos::new(),
        }
    }

    /// Creates a new TokenIter from another Token iterator and an initial position.
    pub fn with_position(iter: I, pos: SourcePos) -> TokenIter<I> {
        let mut iter = TokenIter::new(iter);
        iter.pos = pos;
        iter
    }

    /// Return a wrapper which allows for arbitrary look ahead. Dropping the
    /// wrapper will restore the internal stream back to what it was.
    pub fn multipeek(&mut self) -> Multipeek {
        Multipeek::new(self)
    }

    /// Update the current position based on any buffered state.
    ///
    /// This allows us to always correctly report the position of the next token
    /// we are about to yield.
    fn updated_buffered_pos(&mut self) {
        while let Some(&TokenOrPos::Pos(pos)) = self.prev_buffered.last() {
            self.prev_buffered.pop();
            self.pos = pos;
        }
    }

    /// Accepts a vector of tokens (and positions) to be yielded completely before the
    /// inner iterator is advanced further. The optional `buf_start` (if provided)
    /// indicates what the iterator's position should have been if we were to naturally
    /// yield the provided buffer.
    fn buffer_tokens_and_positions_to_yield_first(
        &mut self,
        mut tokens: Vec<TokenOrPos>,
        token_start: Option<SourcePos>
    ) {
        self.prev_buffered.reserve(tokens.len() + 1);

        // Push the current position further up the stack since we want to
        // restore it before yielding any previously-peeked tokens.
        if token_start.is_some() {
            self.prev_buffered.push(TokenOrPos::Pos(self.pos));
        }

        // Buffer the newly provided tokens in reverse since we are using a stack
        tokens.reverse();
        self.prev_buffered.extend(tokens);

        // Set our position to what it should be as we yield the buffered tokens
        if let Some(p) = token_start {
            self.pos = p;
        }

        self.updated_buffered_pos();
    }

    /// Accepts a vector of tokens to be yielded completely before the inner
    /// iterator is advanced further. The provided `buf_start` indicates
    /// what the iterator's position should have been if we were to naturally
    /// yield the provided buffer.
    pub fn buffer_tokens_to_yield_first(&mut self, buf: Vec<Token>, buf_start: SourcePos) {
        let tokens = buf.into_iter().map(TokenOrPos::Tok).collect();
        self.buffer_tokens_and_positions_to_yield_first(tokens, Some(buf_start));
    }

    /// Collects all tokens yielded by `TokenIter::backticked_remove_backslashes`
    /// and creates a `TokenIter` which will yield the collected tokens, and maintain
    /// the correct position of where each token appears in the original source,
    /// regardless of how many backslashes may have been removed since then.
    pub fn token_iter_from_backticked_with_removed_backslashes(&mut self, pos: SourcePos)
        -> Result<TokenIter<std_iter::Empty<Token>>, UnmatchedError>
    {
        BacktickBackslashRemover::create_token_iter(self.backticked(pos))
    }
}

/// A wrapper for peeking arbitrary amounts into a `Token` stream.
/// Inspired by the `Multipeek` implementation in the `itertools` crate.
#[must_use = "iterator adaptors are lazy and do nothing unless consumed"]
pub struct Multipeek<'a> {
    /// The underlying token iterator. This is pretty much just a `TokenIter`,
    /// but we use a trait object to avoid having a generic signature and
    /// make this wrapper more flexible.
    iter: &'a mut RewindableTokenIterator,
    /// A buffer of values taken from the underlying iterator, in the order
    /// they were pulled.
    buf: Vec<TokenOrPos>,
}

impl<'a> Drop for Multipeek<'a> {
    fn drop(&mut self) {
        let tokens = mem::replace(&mut self.buf, Vec::new());
        self.iter.rewind(tokens);
    }
}

impl<'a> Multipeek<'a> {
    /// Wrap an iterator for arbitrary look-ahead.
    fn new(iter: &'a mut RewindableTokenIterator) -> Self {
        Multipeek {
            iter: iter,
            buf: Vec::new(),
        }
    }

    /// Public method for lazily peeking the next (unpeeked) value.
    /// Implemented as its own API instead of as an `Iterator` to avoid
    /// confusion with advancing the regular iterator.
    pub fn peek_next(&mut self) -> Option<&Token> {
        loop {
            match self.iter.next_token_or_pos() {
                Some(t) => {
                    let is_tok = t.is_tok();
                    self.buf.push(t);

                    if is_tok {
                        break;
                    }
                },
                None => return None,
            }
        }

        if let Some(&TokenOrPos::Tok(ref t)) = self.buf.last() {
            Some(t)
        } else {
            None
        }
    }
}

/// A wrapper which allows treating `TokenIter<I>` and `TokenIter<Empty<_>>` as
/// the same thing, even though they are technically different types.
#[must_use = "iterator adaptors are lazy and do nothing unless consumed"]
#[derive(Debug)]
pub enum TokenIterWrapper<I> {
    /// A `TokenIter` which holds an aribtrary `Iterator` over `Token`s.
    Regular(TokenIter<I>),
    /// A `TokenIter` which has all `Token`s buffered in memory, and thus
    /// has no underlying iterator.
    Buffered(TokenIter<std_iter::Empty<Token>>),
}

impl<I: Iterator<Item = Token>> PositionIterator for TokenIterWrapper<I> {
    fn pos(&self) -> SourcePos {
        match *self {
            TokenIterWrapper::Regular(ref inner) => inner.pos(),
            TokenIterWrapper::Buffered(ref inner) => inner.pos(),
        }
    }
}

impl<I: Iterator<Item = Token>> PeekableIterator for TokenIterWrapper<I> {
    fn peek(&mut self) -> Option<&Self::Item> {
        match *self {
            TokenIterWrapper::Regular(ref mut inner) => inner.peek(),
            TokenIterWrapper::Buffered(ref mut inner) => inner.peek(),
        }
    }
}

impl<I: Iterator<Item = Token>> Iterator for TokenIterWrapper<I> {
    type Item = Token;

    fn next(&mut self) -> Option<Self::Item> {
        match *self {
            TokenIterWrapper::Regular(ref mut inner) => inner.next(),
            TokenIterWrapper::Buffered(ref mut inner) => inner.next(),
        }
    }
}

impl<I: Iterator<Item = Token>> TokenIterator for TokenIterWrapper<I> {}

impl<I: Iterator<Item = Token>> TokenIterWrapper<I> {
    /// Return a wrapper which allows for arbitrary look ahead. Dropping the
    /// wrapper will restore the internal stream back to what it was.
    pub fn multipeek(&mut self) -> Multipeek {
        match *self {
            TokenIterWrapper::Regular(ref mut inner) => inner.multipeek(),
            TokenIterWrapper::Buffered(ref mut inner) => inner.multipeek(),
        }
    }

    /// Delegates to `TokenIter::buffer_tokens_to_yield_first`.
    pub fn buffer_tokens_to_yield_first(&mut self, buf: Vec<Token>, buf_start: SourcePos) {
        match *self {
            TokenIterWrapper::Regular(ref mut inner) => inner.buffer_tokens_to_yield_first(buf, buf_start),
            TokenIterWrapper::Buffered(ref mut inner) => inner.buffer_tokens_to_yield_first(buf, buf_start),
        }
    }

    /// Delegates to `TokenIter::token_iter_from_backticked_with_removed_backslashes`.
    pub fn token_iter_from_backticked_with_removed_backslashes(&mut self, pos: SourcePos)
        -> Result<TokenIter<std_iter::Empty<Token>>, UnmatchedError>
    {
        match *self {
            TokenIterWrapper::Regular(ref mut inner) =>
                inner.token_iter_from_backticked_with_removed_backslashes(pos),
            TokenIterWrapper::Buffered(ref mut inner) =>
                inner.token_iter_from_backticked_with_removed_backslashes(pos),
        }
    }
}

/// An iterator that yields at least one token, but continues to yield
/// tokens until all matching cases of single/double quotes, backticks,
/// ${ }, $( ), or ( ) are found.
#[must_use = "iterator adaptors are lazy and do nothing unless consumed"]
#[derive(Debug)]
pub struct Balanced<I> {
    /// The underlying token iterator.
    iter: I,
    /// Any token we had to peek after a backslash but haven't yielded yet,
    /// as well as the position after it.
    escaped: Option<(Token, SourcePos)>,
    /// A stack of pending unmatched tokens we still must encounter.
    stack: Vec<(Token, SourcePos)>,
    /// Indicates if we should yield the final, outermost delimeter.
    skip_last_delimeter: bool,
    /// Makes the iterator *fused* by yielding None forever after we are done.
    done: bool,
    /// The current position of the iterator.
    pos: SourcePos,
}

impl<I: PositionIterator> Balanced<I> {
    /// Constructs a new balanced iterator.
    ///
    /// If no delimeter is given, a single token will be yielded, unless the
    /// first found token is an opening one (e.g. "), making the iterator yield
    /// tokens until its matching delimeter is found (the matching delimeter *will*
    /// be consumed).
    ///
    /// If a delimeter (and its position) is specified, tokens are yielded *up to*
    /// the delimeter, but the delimeter will be silently consumed.
    pub fn new(iter: I, delim: Option<(Token, SourcePos)>) -> Self {
        Balanced {
            escaped: None,
            skip_last_delimeter: delim.is_some(),
            stack: delim.map_or(Vec::new(), |d| vec!(d)),
            done: false,
            pos: iter.pos(),
            iter: iter,
        }
    }
}

impl<I: PeekablePositionIterator<Item = Token>> PositionIterator for Balanced<I> {
    fn pos(&self) -> SourcePos {
        self.pos
    }
}

impl<I: PeekablePositionIterator<Item = Token>> Iterator for Balanced<I> {
    type Item = Result<Token, UnmatchedError>;

    fn next(&mut self) -> Option<Self::Item> {
        if let Some((tok, pos)) = self.escaped.take() {
            self.pos = pos;
            return Some(Ok(tok));
        } else if self.done {
            return None;
        }

        if self.stack.last().map(|t| &t.0) == self.iter.peek() {
            let ret = self.iter.next().map(Ok);
            self.stack.pop();
            let stack_empty = self.stack.is_empty();
            self.done |= stack_empty;
            self.pos = self.iter.pos();
            if self.skip_last_delimeter && stack_empty {
                return None;
            } else {
                return ret;
            };
        }

        // Tokens between single quotes have no special meaning
        // so we should make sure we don't treat anything specially.
        if let Some(&(SingleQuote, pos)) = self.stack.last() {
            let ret = match self.iter.next() {
                // Closing SingleQuote should have been captured above
                Some(t) => Some(Ok(t)),
                // Make sure we indicate errors on missing closing quotes
                None => Some(Err(UnmatchedError(SingleQuote, pos))),
            };

            self.pos = self.iter.pos();
            return ret;
        }

        let cur_pos = self.iter.pos();
        let ret = match self.iter.next() {
            Some(Backslash) => {
                // Make sure that we indicate our position as before the escaped token,
                // and NOT as the underlying iterator's position, which will indicate the
                // position AFTER the escaped token (which we are buffering ourselves)
                self.pos = self.iter.pos();

                debug_assert_eq!(self.escaped, None);
                self.escaped = self.iter.next().map(|t| (t, self.iter.pos()));
                // Make sure we stop yielding tokens after the stored escaped token
                // otherwise we risk consuming one token too many!
                self.done |= self.stack.is_empty();
                return Some(Ok(Backslash));
            },

            Some(Backtick) => {
                self.stack.push((Backtick, cur_pos));
                Some(Ok(Backtick))
            },

            Some(SingleQuote) => {
                if self.stack.last().map(|t| &t.0) != Some(&DoubleQuote) {
                    self.stack.push((SingleQuote, cur_pos));
                }
                Some(Ok(SingleQuote))
            },

            Some(DoubleQuote) => {
                self.stack.push((DoubleQuote, cur_pos));
                Some(Ok(DoubleQuote))
            },

            Some(ParenOpen) => {
                self.stack.push((ParenClose, cur_pos));
                Some(Ok(ParenOpen))
            },

            Some(Dollar) => {
                let cur_pos = self.iter.pos(); // Want the pos of curly or paren, not $ here
                match self.iter.peek() {
                    Some(&CurlyOpen) => self.stack.push((CurlyClose, cur_pos)),
                    Some(&ParenOpen) => {}, // Already handled by paren case above

                    // We have nothing further to match
                    _ => { self.done |= self.stack.is_empty(); },
                }
                Some(Ok(Dollar))
            },

            Some(t) => {
                // If we aren't looking for any more delimeters we should only
                // consume a single token (since its balanced by nature)
                self.done |= self.stack.is_empty();
                Some(Ok(t))
            },

            None => match self.stack.pop() {
                // Its okay to hit EOF if everything is balanced so far
                None => { self.done = true; None },
                // But its not okay otherwise
                Some((ParenClose, pos)) => Some(Err(UnmatchedError(ParenOpen, pos))),
                Some((CurlyClose, pos)) => Some(Err(UnmatchedError(CurlyOpen, pos))),
                Some((delim, pos))      => Some(Err(UnmatchedError(delim, pos))),
            },
        };

        self.pos = self.iter.pos();
        ret
    }

    fn size_hint(&self) -> (usize, Option<usize>) {
        // Our best guess is as good as the internal token iterator's...
        self.iter.size_hint()
    }

}

/// A `Balanced` backtick `Token` iterator which removes all backslashes
/// from the stream that are followed by \, $, or `.
#[must_use = "iterator adaptors are lazy and do nothing unless consumed"]
#[derive(Debug)]
pub struct BacktickBackslashRemover<I> {
    /// The underlying token iterator.
    iter: Balanced<I>,
    peeked: Option<Result<Token, UnmatchedError>>,
    /// Makes the iterator *fused* by yielding None forever after we are done.
    done: bool,
}

impl<I> BacktickBackslashRemover<I> {
    /// Constructs a new balanced backtick iterator which removes all backslashes
    /// from the stream that are followed by \, $, or `.
    pub fn new(iter: Balanced<I>) -> Self {
        BacktickBackslashRemover {
            iter: iter,
            peeked: None,
            done: false,
        }
    }
}

impl<I: PeekablePositionIterator<Item = Token>> BacktickBackslashRemover<I> {
    /// Collects all tokens yielded by `TokenIter::backticked_remove_backslashes`
    /// and creates a `TokenIter` which will yield the collected tokens, and maintain
    /// the correct position of where each token appears in the original source,
    /// regardless of how many backslashes may have been removed since then.
    fn create_token_iter(mut iter: Balanced<I>)
        -> Result<TokenIter<std_iter::Empty<Token>>, UnmatchedError>
    {
        let mut all_chunks = Vec::new();
        let mut chunk_start = iter.pos();
        let mut chunk = Vec::new();

        loop {
            match iter.next() {
                Some(Ok(Backslash)) => {
                    let next_pos = iter.pos();
                    match iter.next() {
                        Some(Ok(tok@Dollar))    |
                        Some(Ok(tok@Backtick))  |
                        Some(Ok(tok@Backslash)) => {
                            all_chunks.push((chunk, chunk_start));
                            chunk_start = next_pos;
                            chunk = vec!(tok);
                        },

                        Some(tok) => {
                            chunk.push(Backslash);
                            chunk.push(try!(tok));
                        },

                        None => chunk.push(Backslash),
                    }
                },

                Some(tok) => chunk.push(try!(tok)),
                None => break,
            }
        }

        if !chunk.is_empty() {
            all_chunks.push((chunk, chunk_start));
        }

        let mut tok_iter = TokenIter::with_position(std_iter::empty(), iter.pos());
        while let Some((chunk, chunk_end)) = all_chunks.pop() {
            tok_iter.buffer_tokens_to_yield_first(chunk, chunk_end);
        }
        Ok(tok_iter)
    }
}

if_nightly! {
    impl<I> ::std::iter::FusedIterator for BacktickBackslashRemover<I>
        where I: PeekablePositionIterator<Item = Token>
    {}
}

impl<I: PeekablePositionIterator<Item = Token>> Iterator for BacktickBackslashRemover<I> {
    type Item = Result<Token, UnmatchedError>;

    fn next(&mut self) -> Option<Self::Item> {
        if self.peeked.is_some() {
            return self.peeked.take();
        } else if self.done {
            return None;
        }

        match self.iter.next() {
            Some(Ok(Backslash)) => {
                match self.iter.next() {
                    ret@Some(Ok(Dollar))    |
                    ret@Some(Ok(Backtick))  |
                    ret@Some(Ok(Backslash)) => ret,

                    Some(t) => {
                        debug_assert!(self.peeked.is_none());
                        self.peeked = Some(t);
                        Some(Ok(Backslash))
                    },

                    None => {
                        self.done = true;
                        Some(Ok(Backslash))
                    },
                }
            },

            Some(t) => Some(t),
            None => {
                self.done = true;
                None
            },
        }
    }

    fn size_hint(&self) -> (usize, Option<usize>) {
        // The number of tokens we actually yield will never be
        // more than those of the underlying iterator, and will
        // probably be less, but this is a good enough estimate.
        self.iter.size_hint()
    }
}

#[cfg(test)]
mod tests {
    use parse::SourcePos;
    use super::{PositionIterator, TokenIter, TokenOrPos};
    use token::Token;

    #[test]
    fn test_multipeek() {
        let tokens = vec!(Token::ParenOpen, Token::Semi, Token::Dollar, Token::ParenClose);

        let mut tok_iter = TokenIter::new(tokens.clone().into_iter());
        {
            let mut multipeek = tok_iter.multipeek();
            let mut expected_peeked = tokens.iter();
            while let Some(t) = multipeek.peek_next() {
                assert_eq!(expected_peeked.next(), Some(t));
            }

            // Exhausted the expected stream
            assert_eq!(expected_peeked.next(), None);
        }

        // Original iterator still yields the expected values
        assert_eq!(tokens, tok_iter.collect::<Vec<_>>());
    }

    #[test]
    fn test_buffering_tokens_should_immediately_update_position() {
        fn src(byte: usize, line: usize, col: usize) -> SourcePos {
            SourcePos {
                byte: byte,
                line: line,
                col: col,
            }
        }

        let mut tok_iter = TokenIter::new(::std::iter::empty());

        let pos = src(4, 4, 4);

        tok_iter.buffer_tokens_and_positions_to_yield_first(
            vec!(
                TokenOrPos::Pos(src(2, 2, 2)),
                TokenOrPos::Pos(src(3, 3, 3)),
                TokenOrPos::Pos(pos),
                TokenOrPos::Tok(Token::Newline),
            ),
            Some(src(1, 1, 1)),
        );

        assert_eq!(tok_iter.pos(), pos);
    }
}