wgsl_parser/
token.rs

1//! Defines the lexical [`Token`]s of the WGSL grammar, and an auto-generated
2//! [`Lexer`].
3//!
4//! [`Token`]: enum@Token
5//! [`Lexer`]: struct@Lexer
6
7use core::fmt;
8use std::hash::Hash;
9
10use gramatika::{Span, Spanned, Substr, Token as _, TokenStream};
11
12#[derive(DebugLispToken, Token)]
13pub enum Token {
14	#[pattern = r"[\[\](){}]"]
15	Brace(Substr, Span),
16
17	#[pattern = "//"]
18	#[pattern = r"/\*"]
19	CommentStart(Substr, Span),
20
21	#[pattern = r"\*/"]
22	CommentEnd(Substr, Span),
23
24	#[subset_of(Ident)]
25	#[pattern = r"(bool|[fiu](8|16|32|64))"] // Primitives
26	#[pattern = r"(mat[2-4]x[2-4][fh]?|vec[2-4][iufh]?)"] // Math
27	#[pattern = r"((binding_)?array|atomic|ptr)"]
28	#[pattern = r"(sampler(_comparison)?)"]
29	#[pattern = r"(texture_(depth_)?multisampled_2d)"]
30	#[pattern = r"(texture_external)"]
31	#[pattern = r"(texture_depth_(2d|cube)(_array)?)"]
32	#[pattern = r"(texture_(1d|2d(_array)?|3d|cube(_array)?))"]
33	#[pattern = r"(texture_storage_(1d|2d(_array)?|3d))"]
34	Type(Substr, Span),
35
36	#[pattern = r"#(define_import_path|import)"]
37	// #[pattern = r"#(if(n?def)?|else|endif)"] // TODO
38	Directive(Substr, Span),
39
40	#[pattern = r"#if.*"]
41	#[pattern = r"#else.*"]
42	#[pattern = r"#endif.*"]
43	Pragma(Substr, Span),
44
45	#[pattern = r"#\{.+?\}"]
46	#[pattern = r"#[a-zA-Z_][a-zA-Z0-9_]*"]
47	PreprocessorInsertion(Substr, Span),
48
49	#[subset_of(Ident)]
50	#[pattern = r"(const|fn|let|struct|alias|var|override|export)"]
51	#[pattern = r"(function|private|read(_write)?|storage|uniform|workgroup|write)"]
52	#[pattern = r"(break|case|continu(e|ing)|default|else|fallthrough|for|if|loop|return|switch|from)"]
53	#[pattern = r"(true|false)"]
54	#[pattern = r"(bitcast|discard|enable|import)"]
55	Keyword(Substr, Span),
56
57	#[pattern = "[a-zA-Z_][0-9a-zA-Z_]*"]
58	Ident(Substr, Span),
59
60	// Decimal
61	#[pattern = r"0[fh]"]
62	#[pattern = r"[1-9][0-9]*[fh]"]
63	#[pattern = r"[0-9]*\.[0-9]+([eE][-+]?[0-9]+)?[fh]?"]
64	#[pattern = r"[0-9]+\.[0-9]*([eE][-+]?[0-9]+)?[fh]?"]
65	#[pattern = r"[0-9]+[eE][+-]?[0-9]+[fh]?"]
66	// Hex
67	#[pattern = r"0[xX][0-9a-fA-F]*\.[0-9a-fA-F]+([pP][-+]?[0-9]+[fh]?)?"]
68	#[pattern = r"0[xX][0-9a-fA-F]+\.[0-9a-fA-F]*([pP][-+]?[0-9]+[fh]?)?"]
69	#[pattern = r"0[xX][0-9a-fA-F]+[pP][-+]?[0-9]+[fh]?"]
70	FloatLiteral(Substr, Span),
71
72	// Hex
73	#[pattern = "0[xX][0-9a-fA-F]+[iu]?"]
74	// Decimal
75	#[pattern = "0[iu]?"]
76	#[pattern = "[1-9][0-9]*[iu]?"]
77	IntLiteral(Substr, Span),
78
79	#[pattern = r#""[-_ ./a-zA-Z0-9]+""#]
80	Path(Substr, Span),
81
82	#[pattern = "->"]
83	#[pattern = r"[-+*/%&|^]="]
84	#[pattern = r"<<=|>>="]
85	#[pattern = r"&&?|\|\|?|--?|\+\+?|>>|<<"]
86	#[pattern = "[=!<>]=?"]
87	#[pattern = "[%*/~^]"]
88	Operator(Substr, Span),
89
90	#[pattern = r"::?|[,.;@]"]
91	Punct(Substr, Span),
92
93	#[pattern = r"\S+"]
94	Unrecognized(Substr, Span),
95
96	// Tokens without patterns -- need to be upgraded
97	Attribute(Substr, Span),
98	Function(Substr, Span),
99	Param(Substr, Span),
100	Struct(Substr, Span),
101	Field(Substr, Span),
102	// TODO: Rename this to `Namespace`
103	Module(Substr, Span),
104	Plain(Substr, Span),
105}
106
107pub type Lexer = TokenStream<Token>;
108
109impl fmt::Debug for Token {
110	fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
111		<Self as gramatika::DebugLisp>::fmt(self, f, 0)
112	}
113}
114
115impl fmt::Display for Token {
116	fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
117		write!(f, "{}", self.lexeme())
118	}
119}
120
121impl Hash for Token {
122	fn hash<H>(&self, state: &mut H)
123	where H: std::hash::Hasher {
124		self.kind().hash(state);
125		self.lexeme().hash(state);
126		self.span().hash(state);
127	}
128}
129
130impl PartialEq for Token {
131	fn eq(&self, other: &Self) -> bool {
132		self.kind() == other.kind()
133			&& self.lexeme() == other.lexeme()
134			&& self.span() == other.span()
135	}
136}
137
138impl Eq for Token {}