cynic_parser/schema_coordinates/
lexer.rs1use std::fmt;
2
3use logos::{Logos, SpannedIter};
4
5use crate::{
6 Span,
7 lexer::{LexicalError, Spanned, TokenExtras},
8 parser::AdditionalErrors,
9};
10
11pub struct Lexer<'input> {
12 token_stream: SpannedIter<'input, Token<'input>>,
14 input: &'input str,
15}
16
17impl<'input> Lexer<'input> {
18 pub fn new(input: &'input str) -> Self {
19 Self {
20 token_stream: Token::lexer(input).spanned(),
21 input,
22 }
23 }
24}
25
26impl<'input> Iterator for Lexer<'input> {
27 type Item = Spanned<Token<'input>, usize, AdditionalErrors>;
28
29 fn next(&mut self) -> Option<Self::Item> {
30 match self.token_stream.next() {
31 None => None,
32 Some((Ok(token), span)) => Some(Ok((span.start, token, span.end))),
33 Some((Err(_), span)) => {
34 Some(Err(AdditionalErrors::Lexical(LexicalError::InvalidToken(
35 self.input[span.start..span.end].to_string(),
36 Span::new(span.start, span.end),
37 ))))
38 }
39 }
40 }
41}
42
43#[derive(Logos, Copy, Clone, Debug, Eq, PartialEq, Ord, PartialOrd, Hash)]
45#[logos(extras = TokenExtras, skip r"[ \t\r\n\f,\ufeff]+|#[^\n\r]*")]
46pub enum Token<'a> {
47 #[token("@")]
49 At,
50
51 #[token(")")]
52 CloseParen,
53
54 #[token(":")]
55 Colon,
56
57 #[regex("[a-zA-Z_][a-zA-Z0-9_]*", |lex| lex.slice())]
58 Identifier(&'a str),
59
60 #[token("(")]
61 OpenParen,
62
63 #[token(".")]
64 Dot,
65}
66
67impl fmt::Display for Token<'_> {
68 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
69 f.write_str(match self {
70 Token::At => "at ('@')",
71 Token::CloseParen => "closing paren (')')",
72 Token::Colon => "colon (':')",
73 Token::Identifier(_) => "identifier",
74 Token::OpenParen => "opening paren ('(')",
75 Token::Dot => "dot ('.')",
76 })
77 }
78}
79
80impl From<lalrpop_util::ParseError<usize, Token<'_>, AdditionalErrors>> for crate::Error {
81 fn from(value: lalrpop_util::ParseError<usize, Token<'_>, AdditionalErrors>) -> Self {
82 use crate::Error;
83 use lalrpop_util::ParseError;
84
85 match value {
86 ParseError::InvalidToken { location } => Error::InvalidToken { location },
87 ParseError::UnrecognizedEof { location, expected } => {
88 Error::UnrecognizedEof { location, expected }
89 }
90 ParseError::UnrecognizedToken {
91 token: (lspan, token, rspan),
92 expected,
93 } => Error::UnrecognizedToken {
94 token: (lspan, token.to_string(), rspan),
95 expected,
96 },
97 ParseError::ExtraToken {
98 token: (lspan, token, rspan),
99 } => Error::ExtraToken {
100 token: (lspan, token.to_string(), rspan),
101 },
102 ParseError::User {
103 error: AdditionalErrors::Lexical(error),
104 } => Error::Lexical(error),
105 ParseError::User {
106 error: AdditionalErrors::MalformedString(error),
107 } => Error::MalformedStringLiteral(error),
108 ParseError::User {
109 error: AdditionalErrors::MalformedDirectiveLocation(lhs, location, rhs),
110 } => Error::MalformedDirectiveLocation(lhs, location, rhs),
111 ParseError::User {
112 error: AdditionalErrors::VariableInConstPosition(lhs, name, rhs),
113 } => Error::MalformedDirectiveLocation(lhs, name, rhs),
114 }
115 }
116}