Trait ParseStreamer

Source
pub trait ParseStreamer {
    type Token: Token + Spanned;

    // Required methods
    fn is_empty(&mut self) -> bool;
    fn peek(&mut self) -> Option<&Self::Token>;
    fn prev(&mut self) -> Option<&Self::Token>;
    fn check_kind(&mut self, kind: <Self::Token as Token>::Kind) -> bool;
    fn check(&mut self, compare: Self::Token) -> bool;
    fn consume(
        &mut self,
        compare: Self::Token,
    ) -> Result<Self::Token, SpannedError>;
    fn consume_kind(
        &mut self,
        kind: <Self::Token as Token>::Kind,
    ) -> Result<Self::Token, SpannedError>;
    fn consume_as(
        &mut self,
        kind: <Self::Token as Token>::Kind,
        convert: fn(Substr, Span) -> Self::Token,
    ) -> Result<Self::Token, SpannedError>;
    fn upgrade_last(
        &mut self,
        kind: <Self::Token as Token>::Kind,
        convert: fn(Substr, Span) -> Self::Token,
    ) -> Result<Self::Token, SpannedError>;
    fn upgrade(
        &mut self,
        token: Self::Token,
        convert: fn(Substr, Span) -> Self::Token,
    ) -> Result<Self::Token, SpannedError>;
    fn discard(&mut self);

    // Provided method
    fn parse<P>(&mut self) -> Result<P, SpannedError>
       where P: Parse<Stream = Self> { ... }
}
Expand description

A user-friendly interface for implementing a hand-written LL(1) or recursive descent parser with backtracking.

It serves as the input parameter for the Parse trait’s parse method, allowing the implementation of a full syntax tree parser to be broken up into discrete Parse implementations for each node in the tree.

Required Associated Types§

Required Methods§

Source

fn is_empty(&mut self) -> bool

Returns true when there are no more tokens in the stream.

Source

fn peek(&mut self) -> Option<&Self::Token>

Returns a Some reference to the next Token in the stream without advancing the iterator, or None if the stream is empty.

// ...
#[derive(Token, Debug)]
enum Token {
    #[pattern = "[a-zA-Z_][a-zA-Z0-9_]*"]
    Ident(Substr, Span),
}

let input = "foo";
let mut parser = ParseStream::<Token, TokenStream<Token>>::from(input);
assert!(parser.peek().is_some());

match parser.peek() {
    Some(peeked) => {
        assert!(matches!(peeked.as_matchable(), (TokenKind::Ident, "foo", _)));
    },
    None => unreachable!(),
}

let _ = parser.consume_kind(TokenKind::Ident)?;
assert!(parser.peek().is_none());
Source

fn prev(&mut self) -> Option<&Self::Token>

Returns a Some reference to the last token consumed by the iterator. Returns None if the source string contains no tokens, or if no tokens have been consumed yet.

Underlying data access is O(1) in the crate-provided implementation.

// ...
#[derive(Token, Debug, PartialEq)]
enum Token {
    #[pattern = "[a-zA-Z_][a-zA-Z0-9_]*"]
    Ident(Substr, Span),
}

let input = "foo";
let mut parser = ParseStream::<Token, TokenStream<Token>>::from(input);
assert!(parser.prev().is_none());

let foo = parser.consume_kind(TokenKind::Ident)?;
assert!(parser.prev().is_some());
assert_eq!(parser.prev().unwrap(), &foo);
Source

fn check_kind(&mut self, kind: <Self::Token as Token>::Kind) -> bool

Indicates whether the next Token in the stream matches the given Kind, without advancing the iterator.

// ...
#[derive(Token, Debug, PartialEq)]
enum Token {
    #[pattern = "[a-zA-Z_][a-zA-Z0-9_]*"]
    Ident(Substr, Span),
    #[pattern = r"\S+"]
    Unrecognized(Substr, Span),
}

let input = "foo";
let mut parser = ParseStream::<Token, TokenStream<Token>>::from(input);
assert_eq!(
    parser.check_kind(TokenKind::Unrecognized),
    false,
);
assert_eq!(
    parser.check_kind(TokenKind::Ident),
    true,
);
Source

fn check(&mut self, compare: Self::Token) -> bool

Indicates whether the next Token in the stream matches the given argument by comparing their Kinds and lexemes. Does not advance the iterator.

// ...
#[derive(Token, Debug, PartialEq)]
enum Token {
    #[pattern = "[-+*/=<>]"]
    Operator(Substr, Span),
}

let input = "=";
let mut parser = ParseStream::<Token, TokenStream<Token>>::from(input);
assert_eq!(
    parser.check(operator![>]),
    false,
);
assert_eq!(
    parser.check(operator![=]),
    true,
);
Source

fn consume(&mut self, compare: Self::Token) -> Result<Self::Token, SpannedError>

Advances the iterator, returning Ok with the next Token if it matches the given argument by comparing their Kinds and lexemes. Otherwise returns a contextual Err(SpannedError).

// ...
#[derive(Token, Debug, PartialEq)]
enum Token {
    #[pattern = "[-+*/=<>]"]
    Operator(Substr, Span),
}

let input = "=<";
let mut parser = ParseStream::<Token, TokenStream<Token>>::from(input);

let eq = parser.consume(operator![=]);
assert!(eq.is_ok());

let gt = parser.consume(operator![>]);
assert!(gt.is_err());

let error = gt.unwrap_err();
assert_eq!(format!("{error}"), r#"
ERROR: Expected `>`
  |
1 | =<
  |  ^
"#);
Source

fn consume_kind( &mut self, kind: <Self::Token as Token>::Kind, ) -> Result<Self::Token, SpannedError>

Advances the iterator, returning Ok with the next Token if it matches the given Kind. Otherwise returns a contextual Err(SpannedError).

// ...
#[derive(Token, Debug, PartialEq)]
enum Token {
    #[pattern = "[-+*/<>]"]
    Operator(Substr, Span),

    #[pattern = "[0-9]+"]
    Number(Substr, Span),
}

let input = "2++";
let mut parser = ParseStream::<Token, TokenStream<Token>>::from(input);

let lhs = parser.consume_kind(TokenKind::Number);
assert!(lhs.is_ok());

let op = parser.consume_kind(TokenKind::Operator);
assert!(op.is_ok());

let rhs = parser.consume_kind(TokenKind::Number);
assert!(rhs.is_err());

let error = rhs.unwrap_err();
assert_eq!(format!("{error}"), r#"
ERROR: Expected Number, found Operator
  |
1 | 2++
  |   ^
"#);
Source

fn consume_as( &mut self, kind: <Self::Token as Token>::Kind, convert: fn(Substr, Span) -> Self::Token, ) -> Result<Self::Token, SpannedError>

Advances the iterator, consuming the next token while converting it into a different Kind using the provided convert function.

§Example

This is primarily useful for “upgrading” less specific token variants into more specific subsets of those variants at parse-time.

// ...
#[derive(Token, Debug, PartialEq)]
enum Token {
    #[subset_of(Ident)]
    #[pattern = "func|struct"]
    Storage(Substr, Span),

    #[pattern = "[a-zA-Z_][a-zA-Z0-9_]*"]
    Ident(Substr, Span),

    #[pattern = r"[(){}\[\]]"]
    Brace(Substr, Span),

    StructName(Substr, Span),
    FuncName(Substr, Span),
}

let input = r#"
    struct Foo {}
    func bar() {}
"#;

let mut parser = ParseStream::<Token, TokenStream<Token>>::from(input);

while let Some(token) = parser.next() {
    use TokenKind::*;

    match token.as_matchable() {
        (Storage, "struct", _) => {
            let ident = parser.consume_as(Ident, Token::struct_name)?;
            assert_eq!(ident.kind(), StructName);
            parser.consume(brace!("{"))?;
            parser.consume(brace!("}"))?;
        }
        (Storage, "func", _) => {
            let ident = parser.consume_as(Ident, Token::func_name)?;
            assert_eq!(ident.kind(), FuncName);
            parser.consume(brace!("("))?;
            parser.consume(brace!(")"))?;
            parser.consume(brace!("{"))?;
            parser.consume(brace!("}"))?;
        }
        (_, _, span) => {
            return Err(SpannedError {
                message: "Expected `struct` or `func`".into(),
                source: parser.source(),
                span: Some(span),
            });
        }
    }
}
Source

fn upgrade_last( &mut self, kind: <Self::Token as Token>::Kind, convert: fn(Substr, Span) -> Self::Token, ) -> Result<Self::Token, SpannedError>

Similar to consume_as, but retroactively upgrades the last token consumed by the parser.

§Example
// ...
#[derive(Token, Debug, PartialEq)]
enum Token {
    #[pattern = "[a-zA-Z_][a-zA-Z0-9_]*"]
    Ident(Substr, Span),

    #[pattern = "[()]"]
    Brace(Substr, Span),

    #[pattern = "="]
    Eq(Substr, Span),

    FuncName(Substr, Span),
}

let input = "foo = bar()";

let mut parser = ParseStream::<Token, TokenStream<Token>>::from(input);

let lhs = parser.consume_kind(TokenKind::Ident)?;
let eq = parser.consume(eq![=])?;
let mut rhs = parser.consume_kind(TokenKind::Ident)?;

match parser.peek() {
    Some(peeked) if matches!(peeked.as_matchable(), (TokenKind::Brace, "(", _)) => {
        rhs = parser.upgrade_last(TokenKind::Ident, Token::func_name)?;
    }
    _ => {}
}

assert_eq!(rhs.kind(), TokenKind::FuncName);
Source

fn upgrade( &mut self, token: Self::Token, convert: fn(Substr, Span) -> Self::Token, ) -> Result<Self::Token, SpannedError>

Similar to upgrade_last, but retroactively upgrades any arbitrary token the parser has previously consumed.

§Warning

The default implementation of this operation runs in O(n) time, where n is the number of tokens consumed so far. Prefer to use upgrade_last when possible.

§Panics

The default implementation will panic if the provided token cannot be found in the parser’s buffer of previously consumed tokens.

§Example
// ...
#[derive(Token, Debug, PartialEq)]
enum Token {
    #[pattern = "[a-zA-Z_][a-zA-Z0-9_]*"]
    Ident(Substr, Span),

    #[pattern = "[()]"]
    Brace(Substr, Span),

    #[pattern = "="]
    Eq(Substr, Span),

    FuncName(Substr, Span),
}

let input = "foo = bar()";

let mut parser = ParseStream::<Token, TokenStream<Token>>::from(input);

let lhs = parser.consume_kind(TokenKind::Ident)?;
let eq = parser.consume(eq![=])?;
let mut rhs = parser.consume_kind(TokenKind::Ident)?;

match parser.peek() {
    Some(peeked) if matches!(peeked.as_matchable(), (TokenKind::Brace, "(", _)) => {
        rhs = parser.upgrade(rhs, Token::func_name)?;
    }
    _ => {}
}

assert_eq!(rhs.kind(), TokenKind::FuncName);
Source

fn discard(&mut self)

Advances the iterator, ignoring the next Token.

Provided Methods§

Source

fn parse<P>(&mut self) -> Result<P, SpannedError>
where P: Parse<Stream = Self>,

Provides a more convenient API for parsing other implementers of the Parse trait.

// ...
#[derive(Token, Debug)]
enum Token {
    #[pattern = "[a-zA-Z_][a-zA-Z0-9_]*"]
    Ident(Substr, Span),
}

#[derive(Debug)]
struct IdentExpr(Token);

impl Parse for IdentExpr {
    // ...
}

let input = "foo bar baz";
let mut parser = ParseStream::from(input);

let foo = IdentExpr::parse(&mut parser)?;
let bar: IdentExpr = parser.parse()?;
let baz = parser.parse::<IdentExpr>()?;

Dyn Compatibility§

This trait is not dyn compatible.

In older versions of Rust, dyn compatibility was called "object safety", so this trait is not object safe.

Implementors§

Source§

impl ParseStreamer for wgsl_parser::ParseStream

Source§

impl<T, L> ParseStreamer for gramatika::parse::ParseStream<T, L>
where T: Token + Spanned + Debug, L: Lexer<Output = T>,

Source§

type Token = T