apollo_parser/lexer/
token.rs

1use crate::TokenKind;
2use std::fmt;
3
4/// A token generated by the lexer.
5#[derive(Clone, PartialEq, Eq)]
6pub struct Token<'a> {
7    pub(crate) kind: TokenKind,
8    pub(crate) data: &'a str,
9    pub(crate) index: usize,
10}
11
12impl<'a> Token<'a> {
13    /// Returns the kind of token.
14    pub fn kind(&self) -> TokenKind {
15        self.kind
16    }
17
18    /// Returns the source text for this token.
19    pub fn data(&self) -> &'a str {
20        self.data
21    }
22
23    /// Returns the byte offset of this token in the source text.
24    pub fn index(&self) -> usize {
25        self.index
26    }
27}
28
29impl fmt::Debug for Token<'_> {
30    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
31        let start = self.index;
32        let end = self.index + self.data.len();
33
34        match &self.kind {
35            TokenKind::Whitespace => {
36                write!(f, "WHITESPACE@{}:{} {:?}", start, end, self.data)
37            }
38            TokenKind::Comment => {
39                write!(f, "COMMENT@{}:{} {:?}", start, end, self.data)
40            }
41            TokenKind::Bang => {
42                write!(f, "BANG@{}:{} {:?}", start, end, self.data)
43            }
44            TokenKind::Dollar => {
45                write!(f, "DOLLAR@{}:{} {:?}", start, end, self.data)
46            }
47            TokenKind::Amp => {
48                write!(f, "AMP@{}:{} {:?}", start, end, self.data)
49            }
50            TokenKind::Spread => {
51                write!(f, "SPREAD@{}:{} {:?}", start, end, self.data)
52            }
53            TokenKind::Colon => {
54                write!(f, "COLON@{}:{} {:?}", start, end, self.data)
55            }
56            TokenKind::Comma => {
57                write!(f, "COMMA@{}:{} {:?}", start, end, self.data)
58            }
59            TokenKind::Eq => {
60                write!(f, "EQ@{}:{} {:?}", start, end, self.data)
61            }
62            TokenKind::At => {
63                write!(f, "AT@{}:{} {:?}", start, end, self.data)
64            }
65            TokenKind::LParen => {
66                write!(f, "L_PAREN@{}:{} {:?}", start, end, self.data)
67            }
68            TokenKind::RParen => {
69                write!(f, "R_PAREN@{}:{} {:?}", start, end, self.data)
70            }
71            TokenKind::LBracket => {
72                write!(f, "L_BRACKET@{}:{} {:?}", start, end, self.data)
73            }
74            TokenKind::RBracket => {
75                write!(f, "R_BRACKET@{}:{} {:?}", start, end, self.data)
76            }
77            TokenKind::LCurly => {
78                write!(f, "L_CURLY@{}:{} {:?}", start, end, self.data)
79            }
80            TokenKind::RCurly => {
81                write!(f, "R_CURLY@{}:{} {:?}", start, end, self.data)
82            }
83            TokenKind::Pipe => {
84                write!(f, "PIPE@{}:{} {:?}", start, end, self.data)
85            }
86            TokenKind::Eof => {
87                write!(f, "EOF@{start}:{start}")
88            }
89
90            // composite nodes
91            TokenKind::Name => {
92                write!(f, "NAME@{}:{} {:?}", start, end, self.data)
93            }
94            TokenKind::StringValue => {
95                write!(f, "STRING_VALUE@{}:{} {:?}", start, end, self.data)
96            }
97            TokenKind::Int => {
98                write!(f, "INT@{}:{} {:?}", start, end, self.data)
99            }
100            TokenKind::Float => {
101                write!(f, "FLOAT@{}:{} {:?}", start, end, self.data)
102            }
103        }
104    }
105}