flexi_parse/
lib.rs

1//! flexi-parse is a crate for parsing arbitrary syntax into a syntax tree. It
2//! is intended to be more flexible than a parser generator or parser
3//! combinator, while still being simple to use.
4
5#![cfg_attr(all(doc, not(doctest)), feature(doc_auto_cfg))]
6
7use std::borrow::Cow;
8use std::collections::HashSet;
9use std::fmt;
10use std::fs;
11use std::io;
12use std::iter::Extend;
13use std::path::Path;
14use std::ptr;
15use std::result;
16use std::sync::atomic::AtomicUsize;
17use std::sync::atomic::Ordering;
18use std::sync::Arc;
19use std::sync::Mutex;
20use std::sync::OnceLock;
21
22pub mod error;
23use error::Error;
24use error::ErrorKind;
25
26pub mod group;
27
28mod lookahead;
29pub use lookahead::Lookahead;
30pub use lookahead::Peek;
31
32pub mod punctuated;
33
34mod scanner;
35
36mod to_string;
37
38pub mod to_tokens;
39
40pub mod token;
41use to_tokens::ToTokens;
42use token::Ident;
43use token::LitStrDoubleQuote;
44use token::LitStrSingleQuote;
45use token::SingleCharPunct;
46use token::Token;
47use token::WhiteSpace;
48
49#[cfg(feature = "proc-macro2")]
50mod proc_macro;
51#[cfg(feature = "proc-macro2")]
52pub use proc_macro::ToTokensWrapper;
53
54fn default_source_file<'a>() -> &'a Arc<SourceFile> {
55    static DEFAULT_SOURCE_FILE: OnceLock<Arc<SourceFile>> = OnceLock::new();
56    DEFAULT_SOURCE_FILE.get_or_init(|| {
57        Arc::new(SourceFile {
58            name: String::new(),
59            path: None,
60            contents: String::new(),
61        })
62    })
63}
64
65/// A struct representing a file of source code.
66///
67/// This type is the input to [`parse_source`].
68#[derive(Clone, PartialEq, Eq, PartialOrd, Ord, Hash)]
69pub struct SourceFile {
70    name: String,
71    path: Option<String>,
72    contents: String,
73}
74
75impl SourceFile {
76    /// Reads the file at the given path into a `SourceFile`.
77    ///
78    /// ## Errors
79    /// This function returns an error if the given path is not readable.
80    pub fn read(path: &Path) -> io::Result<SourceFile> {
81        let name = path
82            .file_name()
83            .ok_or_else(|| io::Error::new(io::ErrorKind::InvalidInput, "invalid filename"))?
84            .to_string_lossy()
85            .into_owned();
86        let contents = fs::read_to_string(path)?;
87        Ok(SourceFile {
88            name,
89            path: Some(path.to_string_lossy().into_owned()),
90            contents,
91        })
92    }
93
94    /// Creates a new `SourceFile` with the given name and contents.
95    pub const fn new(name: String, contents: String) -> SourceFile {
96        SourceFile {
97            name,
98            path: None,
99            contents,
100        }
101    }
102
103    fn id(&self) -> &String {
104        self.path.as_ref().unwrap_or(&self.name)
105    }
106}
107
108impl fmt::Debug for SourceFile {
109    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
110        f.debug_struct("SourceFile")
111            .field("name", &self.name)
112            .field("path", &self.path)
113            .finish_non_exhaustive()
114    }
115}
116
117/// A region of source code.
118///
119/// Note that unlike [`proc_macro::Span`], this struct contains a reference to
120/// the file containing it.
121///
122/// [`proc_macro::Span`]: https://doc.rust-lang.org/stable/proc_macro/struct.Span.html
123#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)]
124pub struct Span {
125    start: usize,
126    end: usize,
127    source: Arc<SourceFile>,
128}
129
130impl Span {
131    const fn new(start: usize, end: usize, source: Arc<SourceFile>) -> Span {
132        Span { start, end, source }
133    }
134
135    /// Create a new [`Span`] covering no tokens.
136    ///
137    /// This span has a special 'sentinel' source file which is automatically
138    /// overridden when joining with [`Span::across`].
139    pub fn empty() -> Span {
140        Span {
141            start: 0,
142            end: 0,
143            source: Arc::clone(default_source_file()),
144        }
145    }
146
147    /// Create a new [`Span`] from the start of `start` to the end of `end`.
148    ///
149    /// ## Panics
150    /// This function will panic if `start` and `end` come from different source
151    /// files and neither of them .
152    pub fn across(start: &Span, end: &Span) -> Span {
153        if &start.source == default_source_file() {
154            Span {
155                start: start.start,
156                end: end.end,
157                source: Arc::clone(&end.source),
158            }
159        } else {
160            assert_eq!(
161                start.source, end.source,
162                "both inputs to `across` must come from the same source file"
163            );
164            Span {
165                start: start.start,
166                end: end.end,
167                source: Arc::clone(&start.source),
168            }
169        }
170    }
171
172    #[doc(hidden)]
173    pub const fn source(&self) -> &Arc<SourceFile> {
174        &self.source
175    }
176
177    /// Returns the start line and start column.
178    fn start_location(&self) -> (usize, usize) {
179        let mut newlines = 0;
180        let mut last_newline = 0;
181        for (i, char) in self.source.contents[..self.start].chars().enumerate() {
182            if char == '\n' {
183                newlines += 1;
184                last_newline = i;
185            }
186        }
187
188        (newlines + 1, self.start - last_newline + 1)
189    }
190
191    /// Returns true if `self` covers no tokens.
192    pub const fn is_empty(&self) -> bool {
193        self.start == self.end
194    }
195}
196
197/// Parsing interface for types with a default parsing method.
198pub trait Parse: Sized {
199    /// Parses the input into this type.
200    ///
201    /// ## Errors
202    /// This function returns an error if `source` doesn't contain a valid instance
203    /// of `T`.
204    fn parse(input: ParseStream) -> Result<Self>;
205}
206
207impl<T: Parse> Parse for Box<T> {
208    fn parse(input: ParseStream) -> Result<Self> {
209        Ok(Box::new(T::parse(input)?))
210    }
211}
212
213/// A parser that can parse a stream of tokens into a syntax tree node.
214pub trait Parser: Sized {
215    /// The return type of this parser.
216    type Output;
217
218    /// Parses a [`TokenStream`] into the relevant syntax tree node.
219    ///
220    /// ## Errors
221    /// This function returns an error if `source` doesn't contain a valid
222    /// instance of `T`.
223    fn parse(self, tokens: TokenStream) -> Result<Self::Output>;
224}
225
226impl<F: FnOnce(ParseStream) -> Result<T>, T> Parser for F {
227    type Output = T;
228
229    fn parse(self, tokens: TokenStream) -> Result<Self::Output> {
230        let cursor = Cursor {
231            stream: Cow::Borrowed(tokens.tokens.as_slice()),
232            offset: AtomicUsize::new(0),
233            len: tokens.tokens.len(),
234        };
235        self(&ParseBuffer::new(
236            cursor,
237            Arc::clone(tokens.source.as_ref().unwrap()),
238        ))
239    }
240}
241
242/// Parses the given tokens into the syntax tree node `T`.
243///
244/// This function ignores all whitespace.
245///
246/// ## Errors
247/// Forwards any error from `T::parse`.
248pub fn parse<T: Parse>(mut tokens: TokenStream) -> Result<T> {
249    tokens.remove_whitespace();
250    Parser::parse(T::parse, tokens)
251}
252
253/// Scans and parses the given source file into the syntax tree node `T`.
254///
255/// This function ignores all whitespace.
256///
257/// ## Errors
258/// Forwards any errors from `T::parse`.
259pub fn parse_source<T: Parse>(source: Arc<SourceFile>) -> Result<T> {
260    let (tokens, error) = scanner::scan(source, 0, None);
261    match parse(tokens) {
262        Ok(value) => error.map_or(Ok(value), Err),
263        Err(err) => {
264            if let Some(error) = error {
265                Err(error.with(err))
266            } else {
267                Err(err)
268            }
269        }
270    }
271}
272
273/// Scans and parses the given string into the syntax tree node `T`.
274///
275/// This function ignores all whitespace.
276///
277/// ## Errors
278/// Forwards any errors from `T::parse`.
279pub fn parse_string<T: Parse>(source: String) -> Result<T> {
280    let source = Arc::new(SourceFile {
281        name: "str".to_string(),
282        path: None,
283        contents: source,
284    });
285    parse_source(source)
286}
287
288/// Attempts to repeatedly parse `input` into the given syntax tree node,
289/// using `T`'s default parsing implementation, and continuing until `input` is
290/// exhausted.
291///
292/// Note that this function doesn't perform any error recovery.
293///
294/// ## Errors
295/// Forwards any errors from `T::parse`.
296pub fn parse_repeated<T: Parse>(input: ParseStream) -> Result<Vec<T>> {
297    let mut items = vec![];
298
299    while !input.is_empty() {
300        items.push(input.parse()?);
301    }
302
303    Ok(items)
304}
305
306/// Gets the `Ok` value, panicking with a formatted error message if the value
307/// is `Err`.
308///
309/// ## Panics
310/// Panics if the contained value is `Err`.
311#[cfg(feature = "ariadne")]
312pub fn pretty_unwrap<T>(result: Result<T>) -> T {
313    result.unwrap_or_else(|err| {
314        let mut buf = vec![];
315        for report in err.to_reports() {
316            report.write(&mut buf).unwrap();
317        }
318        String::from_utf8(buf).map_or_else(
319            |_| {
320                err.eprint().unwrap();
321                panic!("failed due to above errors");
322            },
323            |s| panic!("{s}"),
324        )
325    })
326}
327
328/// A sequence of tokens.
329///
330/// This is the return type of
331/// [`Group::token_stream`][group::Group::into_token_stream], and can be created
332/// from a [`proc_macro::TokenStream`][proc-macro] or
333/// [`proc_macro2::TokenStream`][proc-macro2].
334///
335/// [proc-macro]: https://doc.rust-lang.org/proc_macro/struct.TokenStream.html
336/// [proc-macro2]: https://docs.rs/proc-macro2/latest/proc_macro2/struct.TokenStream.html
337#[derive(Debug, Clone, PartialEq, PartialOrd, Hash)]
338pub struct TokenStream {
339    tokens: Vec<Entry>,
340    source: Option<Arc<SourceFile>>,
341}
342
343impl TokenStream {
344    fn new(tokens: Vec<Entry>, source: Option<Arc<SourceFile>>) -> TokenStream {
345        TokenStream { tokens, source }
346    }
347
348    fn filter<F: FnMut(&TokenStream) -> Vec<usize>>(&mut self, mut function: F) {
349        let mut indices = function(self);
350        indices.sort_unstable();
351        indices.reverse();
352        for index in indices {
353            self.tokens.remove(index);
354        }
355    }
356
357    /// Removes all whitespace that doesn't come at the start of a line.
358    ///
359    /// Note that the `parse*` functions remove all whitespace.
360    pub fn prepare_whitespace(&mut self) {
361        self.filter(|tokens| {
362            let mut indices = vec![];
363            let mut post_newline = true;
364            for (index, entry) in tokens.tokens.iter().enumerate() {
365                if let Entry::WhiteSpace(whitespace) = entry {
366                    if matches!(whitespace, WhiteSpace::NewLine(_)) {
367                        post_newline = true;
368                    } else if !post_newline {
369                        indices.push(index);
370                    }
371                } else {
372                    post_newline = false;
373                }
374            }
375            indices
376        });
377    }
378
379    /// Removes all non-newline whitespace from `self`.
380    ///
381    /// Note that the `parse*` functions will remove all whitespace.
382    pub fn remove_blank_space(&mut self) {
383        self.filter(|tokens| {
384            let mut indices = vec![];
385            for (index, entry) in tokens.tokens.iter().enumerate() {
386                if let Entry::WhiteSpace(whitespace) = entry {
387                    if !matches!(whitespace, WhiteSpace::NewLine(_)) {
388                        indices.push(index);
389                    }
390                }
391            }
392            indices
393        });
394    }
395
396    /// Removes all whitespace tokens from this stream.
397    ///
398    /// This method is automatically called by the `parse*` functions.
399    pub fn remove_whitespace(&mut self) {
400        self.filter(|tokens| {
401            let mut indices = vec![];
402            for (index, entry) in tokens.tokens.iter().enumerate() {
403                if let Entry::WhiteSpace(_) = entry {
404                    indices.push(index);
405                }
406            }
407            indices
408        });
409    }
410
411    /// Returns true if there are no tokens in `self`.
412    pub fn is_empty(&self) -> bool {
413        self.tokens.len() == 1
414    }
415
416    fn push(&mut self, entry: Entry) {
417        if self.source.is_none() {
418            self.source = Some(Arc::clone(&entry.span().source));
419        }
420        self.tokens.push(entry);
421    }
422
423    /// Add another [`TokenStream`] to the end of `self`.
424    pub fn append(&mut self, other: &mut TokenStream) {
425        self.tokens.append(&mut other.tokens);
426    }
427}
428
429impl TryFrom<Arc<SourceFile>> for TokenStream {
430    type Error = Error;
431
432    fn try_from(value: Arc<SourceFile>) -> Result<Self> {
433        let (tokens, error) = scanner::scan(value, 0, None);
434        error.map_or(Ok(tokens), Err)
435    }
436}
437
438impl<A: ToTokens> Extend<A> for TokenStream {
439    fn extend<T: IntoIterator<Item = A>>(&mut self, iter: T) {
440        for item in iter {
441            self.tokens.append(&mut item.into_token_stream().tokens);
442        }
443    }
444}
445
446/// Creates a new error in the given source file, at the given location, and
447/// with the given message and code.
448///
449/// `location` will accept any type that is `Token`, `Delimiter`, or a `Span`.
450pub fn new_error<L: Into<Span>>(message: String, location: L, code: u16) -> Error {
451    let span = location.into();
452    Error::new(
453        Arc::clone(&span.source),
454        ErrorKind::Custom {
455            message,
456            span,
457            code,
458        },
459    )
460}
461
462/// A cursor position within a token stream.
463pub struct ParseBuffer<'a> {
464    cursor: Cursor<'a>,
465    source: Arc<SourceFile>,
466    error: Mutex<Error>,
467}
468
469impl<'a> ParseBuffer<'a> {
470    fn new(cursor: Cursor<'a>, source: Arc<SourceFile>) -> ParseBuffer<'a> {
471        ParseBuffer {
472            cursor,
473            source,
474            error: Mutex::new(Error::empty()),
475        }
476    }
477
478    /// Attempts to parse `self` into the given syntax tree node, using `T`'s
479    /// default parsing implementation.
480    ///
481    /// ## Errors
482    /// Returns an error if `T`'s `Parse` implementation fails.
483    pub fn parse<T: Parse>(&self) -> Result<T> {
484        T::parse(self)
485    }
486
487    /// Returns true if this stream has been exhausted.
488    pub fn is_empty(&self) -> bool {
489        self.cursor.eof()
490    }
491
492    /// Creates a new error at the given location with the given message and
493    /// code.
494    pub fn new_error<T: Into<Span>>(&self, message: String, location: T, code: u16) -> Error {
495        Error::new(
496            Arc::clone(&self.source),
497            ErrorKind::Custom {
498                message,
499                span: location.into(),
500                code,
501            },
502        )
503    }
504
505    /// Adds a new error to this buffer's storage.
506    #[allow(clippy::missing_panics_doc)] // Will not panic.
507    pub fn add_error(&self, error: Error) {
508        self.error.lock().unwrap().add(error);
509    }
510
511    /// Returns an error consisting of all errors from
512    /// [`ParseBuffer::add_error`], if it has been called.
513    #[allow(clippy::missing_panics_doc)] // Will not panic.
514    pub fn get_error(&self) -> Option<Error> {
515        let error = self.error.lock().unwrap();
516        if error.is_empty() {
517            None
518        } else {
519            Some(error.to_owned())
520        }
521    }
522
523    /// Repeatedly skips tokens until `function` returns true or `self` is
524    /// empty.
525    pub fn synchronise<F: FnMut(ParseStream<'_>) -> bool>(&self, mut function: F) {
526        while !self.is_empty() && !function(self) {
527            let _ = self.next();
528        }
529        let _ = self.next();
530    }
531
532    fn try_parse<T: Parse>(&self) -> Result<T> {
533        let offset = self.cursor.offset.load(Ordering::SeqCst);
534        T::parse(self).map_err(move |err| {
535            self.cursor.offset.store(offset, Ordering::SeqCst);
536            err
537        })
538    }
539
540    /// Parses `T1` and `T2`, with no whitespace allowed between them.
541    ///
542    /// ## Errors
543    /// Returns an error if `self` does not start with the required tokens.
544    pub fn parse_joint<T1: Token, T2: Token>(&self) -> Result<(T1, T2)> {
545        if self.current()?.span().end < self.next()?.span().start {
546            return Err(Error::new(
547                Arc::clone(&self.source),
548                ErrorKind::UnexpectedToken {
549                    expected: HashSet::from_iter([T1::display() + &T2::display()]),
550                    span: self.current()?.span().to_owned(),
551                },
552            ));
553        }
554        let t1 = self.parse()?;
555        let t2 = self.parse()?;
556        Ok((t1, t2))
557    }
558
559    /// Attempts to parse `self` into `Vec<T>`, with no separating punctuation,
560    /// fully consuming `self`.
561    ///
562    /// To parse separated instances of `T`, see
563    /// [Punctuated][punctuated::Punctuated].
564    ///
565    /// ## Errors
566    /// Returns an error if `self` is not a valid sequence of `T`.
567    pub fn parse_repeated<T: Parse>(&self) -> Result<Vec<T>> {
568        let mut items = vec![];
569
570        while !self.is_empty() {
571            items.push(self.parse()?);
572        }
573
574        Ok(items)
575    }
576
577    /// Returns true if the next token is an instance of `T`.
578    #[allow(clippy::needless_pass_by_value)]
579    pub fn peek<T: Peek>(&self, token: T) -> bool {
580        let _ = token;
581        self.parse_undo::<T::Token>().is_ok()
582    }
583
584    /// Returns true if the next token is an instance of `T`.
585    ///
586    /// Note that for the purposes of this function, multi-character punctuation
587    /// like `+=` is considered to be two tokens, and float literals are
588    /// considered to be three tokens (start, `.`, end).
589    pub fn peek2<T: Peek>(&self, token: T) -> bool {
590        let buffer = self.fork();
591        let _ = buffer.next();
592        buffer.peek::<T>(token)
593    }
594
595    fn parse_undo<T: Parse>(&self) -> Result<T> {
596        let offset = self.cursor.offset.load(Ordering::SeqCst);
597        let val = T::parse(self);
598        self.cursor.offset.store(offset, Ordering::SeqCst);
599        val
600    }
601
602    fn report_error_tokens(&self) -> Result<()> {
603        if self.cursor.eof() {
604            return Ok(());
605        }
606        let mut error = false;
607        while let Entry::Error(_) = self.cursor.current() {
608            error = true;
609            self.cursor.next();
610        }
611        if error {
612            Err(Error::new(Arc::clone(&self.source), ErrorKind::Silent))
613        } else {
614            Ok(())
615        }
616    }
617
618    fn next(&'a self) -> Result<&'a Entry> {
619        self.report_error_tokens()?;
620        self.next_raw().map_or_else(
621            || {
622                Err(Error::new(
623                    Arc::clone(&self.source),
624                    ErrorKind::EndOfFile(self.source.contents.len()),
625                ))
626            },
627            Ok,
628        )
629    }
630
631    fn next_raw(&'a self) -> Option<&'a Entry> {
632        self.cursor.next()
633    }
634
635    fn current(&'a self) -> Result<&'a Entry> {
636        self.report_error_tokens()?;
637        if self.cursor.eof() {
638            Err(Error::new(
639                Arc::clone(&self.source),
640                ErrorKind::EndOfFile(self.source.contents.len()),
641            ))
642        } else {
643            Ok(self.cursor.current())
644        }
645    }
646
647    /// Gets the span of the current token.
648    ///
649    /// ## Errors
650    /// Returns an error if `self` is empty.
651    pub fn current_span(&self) -> Result<Span> {
652        Ok(self.current()?.span().to_owned())
653    }
654
655    fn get_relative(&'a self, offset: isize) -> Result<&'a Entry> {
656        self.cursor.get_relative(offset).ok_or(Error::new(
657            Arc::clone(&self.source),
658            ErrorKind::EndOfFile(self.source.contents.len()),
659        ))
660    }
661
662    /// Creates a new `ParseBuffer` at the same position as `self`.
663    ///
664    /// Changes to `self` will not affect the fork, and vice versa.
665    #[must_use]
666    pub fn fork(&self) -> ParseBuffer<'a> {
667        ParseBuffer::new(self.cursor.clone(), Arc::clone(&self.source))
668    }
669
670    /// Commits a forked buffer into `self`, updating `self` to reflect `fork`.
671    ///
672    /// ## Panics
673    /// This function will panic if `fork` wasn't forked from `self` or if
674    /// `self` is further ahead than `fork`.
675    pub fn commit(&self, fork: &Self) {
676        if !ptr::eq(self.cursor.stream.as_ptr(), fork.cursor.stream.as_ptr()) {
677            panic!("cannot commit ParseBuffer that wasn't forked from this buffer");
678        } else if fork.cursor.offset.load(Ordering::SeqCst)
679            < self.cursor.offset.load(Ordering::SeqCst)
680        {
681            panic!("cannot commit original ParseBuffer into fork");
682        }
683        self.cursor
684            .offset
685            .store(fork.cursor.offset.load(Ordering::SeqCst), Ordering::SeqCst);
686    }
687
688    /// Creates an error with the message `Unexpected token` and the given
689    /// expected tokens.
690    ///
691    /// Use of this function is generally discouraged in favour of
692    /// [`Lookahead::error`].
693    pub fn unexpected_token(&self, expected: HashSet<String>) -> Error {
694        let current = match self.current() {
695            Ok(current) => current,
696            Err(err) => return err,
697        };
698        Error::new(
699            Arc::clone(&self.source),
700            ErrorKind::UnexpectedToken {
701                expected,
702                span: current.span().clone(),
703            },
704        )
705    }
706
707    /// Creates a helper struct for peeking at the next token.
708    pub fn lookahead(&self) -> Lookahead<'a> {
709        Lookahead::new(self.fork())
710    }
711
712    /// Skips over all whitespace tokens before the next non-whitespace token.
713    ///
714    /// This method will not skip newlines.
715    pub fn skip_whitespace(&self) {
716        while let Entry::WhiteSpace(whitespace) = self.cursor.current() {
717            if matches!(whitespace, WhiteSpace::NewLine(_)) {
718                break;
719            }
720            if self.next_raw().is_none() {
721                break;
722            }
723        }
724    }
725
726    /// Creates a new empty Span with this stream's source file.
727    pub fn empty_span(&self) -> Span {
728        Span {
729            start: 0,
730            end: 0,
731            source: Arc::clone(&self.source),
732        }
733    }
734}
735
736impl fmt::Debug for ParseBuffer<'_> {
737    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
738        f.debug_struct("ParseBuffer")
739            .field(
740                "tokens",
741                &&self.cursor.stream[self.cursor.offset.load(Ordering::SeqCst)..],
742            )
743            .field("source", &self.source)
744            .field("error", &self.error)
745            .finish()
746    }
747}
748
749impl<'a> From<TokenStream> for ParseBuffer<'a> {
750    fn from(value: TokenStream) -> Self {
751        let len = value.tokens.len();
752        let cursor = Cursor {
753            stream: Cow::Owned(value.tokens),
754            offset: AtomicUsize::new(0),
755            len,
756        };
757        ParseBuffer::new(
758            cursor,
759            value
760                .source
761                .unwrap_or_else(|| Arc::clone(default_source_file())),
762        )
763    }
764}
765
766impl<'a> From<&'a TokenStream> for ParseBuffer<'a> {
767    fn from(value: &'a TokenStream) -> Self {
768        let cursor = Cursor {
769            stream: Cow::Borrowed(&value.tokens),
770            offset: AtomicUsize::new(0),
771            len: value.tokens.len(),
772        };
773        let source = Arc::clone(value.source.as_ref().unwrap_or_else(default_source_file));
774        ParseBuffer::new(cursor, source)
775    }
776}
777
778/// Returns true if [`ParseBuffer::peek`] would return true for any types
779/// passed.
780///
781/// Accepts a `ParseStream` followed by one or more types.
782#[macro_export]
783macro_rules! peek_any {
784    ( $input:expr, $( $ty:tt ),+ $(,)? ) => {
785        $( $input.peek($ty) || )+ false
786    };
787}
788
789/// Returns true if [`ParseBuffer::peek2`] would return true for any types
790/// passed.
791///
792/// Accepts a `ParseStream` followed by one or more types.
793#[macro_export]
794macro_rules! peek2_any {
795    ( $input:expr, $( $ty:tt ),+ $(,)? ) => {
796        $( $input.peek2($ty) || )+ false
797    };
798}
799
800/// The input type for all parsing functions.
801pub type ParseStream<'a> = &'a ParseBuffer<'a>;
802
803#[derive(Debug)]
804struct Cursor<'a> {
805    stream: Cow<'a, [Entry]>,
806    offset: AtomicUsize,
807    len: usize,
808}
809
810impl<'a> Cursor<'a> {
811    fn bump(&self) -> Option<usize> {
812        let offset = self.offset.load(Ordering::SeqCst);
813        if self.eof() {
814            None
815        } else {
816            Some(offset + 1)
817        }
818    }
819
820    fn current(&'a self) -> &'a Entry {
821        &self.stream[self.offset.load(Ordering::SeqCst)]
822    }
823
824    pub fn eof(&self) -> bool {
825        self.offset.load(Ordering::SeqCst) == self.len
826    }
827
828    fn next(&'a self) -> Option<&'a Entry> {
829        self.bump().map(|next| {
830            let token = self.current();
831            self.offset.store(next, Ordering::SeqCst);
832            token
833        })
834    }
835
836    fn get_relative(&'a self, offset: isize) -> Option<&'a Entry> {
837        let current_offset = self.offset.load(Ordering::SeqCst);
838        let index = if offset < 0 {
839            current_offset - offset.unsigned_abs()
840        } else {
841            current_offset + offset.unsigned_abs()
842        };
843        self.stream.get(index)
844    }
845}
846
847impl<'a> Clone for Cursor<'a> {
848    fn clone(&self) -> Self {
849        Cursor {
850            stream: self.stream.clone(),
851            offset: AtomicUsize::new(self.offset.load(Ordering::SeqCst)),
852            len: self.len,
853        }
854    }
855}
856
857#[derive(Debug, Clone, PartialEq, PartialOrd, Hash)]
858enum Entry {
859    Error(Span),
860    Ident(Ident),
861    Punct(SingleCharPunct),
862    WhiteSpace(WhiteSpace),
863    #[cfg(feature = "scan-strings")]
864    LitStrDoubleQuote(LitStrDoubleQuote),
865    #[cfg(feature = "scan-strings")]
866    LitStrSingleQuote(LitStrSingleQuote),
867}
868
869impl Entry {
870    fn span(&self) -> &Span {
871        match self {
872            Entry::Error(span) => span,
873            Entry::Ident(ident) => &ident.span,
874            Entry::Punct(punct) => &punct.span,
875            Entry::WhiteSpace(whitespace) => whitespace.span(),
876            #[cfg(feature = "scan-strings")]
877            Entry::LitStrDoubleQuote(str) => str.span(),
878            #[cfg(feature = "scan-strings")]
879            Entry::LitStrSingleQuote(str) => str.span(),
880        }
881    }
882
883    #[cfg(feature = "proc-macro2")]
884    fn set_span(&mut self, span: Span) {
885        match self {
886            Entry::Error(current_span) => *current_span = span,
887            Entry::Ident(ident) => ident.span = span,
888            Entry::Punct(punct) => punct.span = span,
889            Entry::WhiteSpace(whitespace) => whitespace.set_span(span),
890            #[cfg(feature = "scan-strings")]
891            Entry::LitStrDoubleQuote(str) => str.set_span(span),
892            #[cfg(feature = "scan-strings")]
893            Entry::LitStrSingleQuote(str) => str.set_span(span),
894        }
895    }
896}
897
898impl From<Ident> for Entry {
899    fn from(value: Ident) -> Self {
900        Self::Ident(value)
901    }
902}
903
904impl From<SingleCharPunct> for Entry {
905    fn from(value: SingleCharPunct) -> Self {
906        Self::Punct(value)
907    }
908}
909
910/// The return type of a parsing function.
911pub type Result<T> = result::Result<T, Error>;
912
913#[doc(hidden)]
914pub mod private {
915    pub trait Sealed {}
916
917    #[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord)]
918    pub enum Marker {}
919}
920
921#[cfg(test)]
922mod tests;