use core::fmt;
use std::hash::Hash;
use gramatika::{Span, Spanned, Substr, Token as _, TokenStream};
#[derive(DebugLispToken, Token)]
pub enum Token {
#[pattern = r"[\[\](){}]"]
Brace(Substr, Span),
#[pattern = "//"]
#[pattern = r"/\*"]
CommentStart(Substr, Span),
#[pattern = r"\*/"]
CommentEnd(Substr, Span),
#[subset_of(Ident)]
#[pattern = r"(bool|[fiu](8|16|32|64))"] #[pattern = r"(mat[2-4]x[2-4][fh]?|vec[2-4][iufh]?)"] #[pattern = r"((binding_)?array|atomic|ptr)"]
#[pattern = r"(sampler(_comparison)?)"]
#[pattern = r"(texture_(depth_)?multisampled_2d)"]
#[pattern = r"(texture_external)"]
#[pattern = r"(texture_depth_(2d|cube)(_array)?)"]
#[pattern = r"(texture_(1d|2d(_array)?|3d|cube(_array)?))"]
#[pattern = r"(texture_storage_(1d|2d(_array)?|3d))"]
Type(Substr, Span),
#[pattern = r"#(define_import_path|import)"]
Directive(Substr, Span),
#[pattern = r"#if.*"]
#[pattern = r"#else.*"]
#[pattern = r"#endif.*"]
Pragma(Substr, Span),
#[pattern = r"#\{.+?\}"]
#[pattern = r"#[a-zA-Z_][a-zA-Z0-9_]*"]
PreprocessorInsertion(Substr, Span),
#[subset_of(Ident)]
#[pattern = r"(const|fn|let|struct|alias|var|override|export)"]
#[pattern = r"(function|private|read(_write)?|storage|uniform|workgroup|write)"]
#[pattern = r"(break|case|continu(e|ing)|default|else|fallthrough|for|if|loop|return|switch|from)"]
#[pattern = r"(true|false)"]
#[pattern = r"(bitcast|discard|enable|import)"]
Keyword(Substr, Span),
#[pattern = "[a-zA-Z_][0-9a-zA-Z_]*"]
Ident(Substr, Span),
#[pattern = r"0[fh]"]
#[pattern = r"[1-9][0-9]*[fh]"]
#[pattern = r"[0-9]*\.[0-9]+([eE][-+]?[0-9]+)?[fh]?"]
#[pattern = r"[0-9]+\.[0-9]*([eE][-+]?[0-9]+)?[fh]?"]
#[pattern = r"[0-9]+[eE][+-]?[0-9]+[fh]?"]
#[pattern = r"0[xX][0-9a-fA-F]*\.[0-9a-fA-F]+([pP][-+]?[0-9]+[fh]?)?"]
#[pattern = r"0[xX][0-9a-fA-F]+\.[0-9a-fA-F]*([pP][-+]?[0-9]+[fh]?)?"]
#[pattern = r"0[xX][0-9a-fA-F]+[pP][-+]?[0-9]+[fh]?"]
FloatLiteral(Substr, Span),
#[pattern = "0[xX][0-9a-fA-F]+[iu]?"]
#[pattern = "0[iu]?"]
#[pattern = "[1-9][0-9]*[iu]?"]
IntLiteral(Substr, Span),
#[pattern = r#""[-_ ./a-zA-Z0-9]+""#]
Path(Substr, Span),
#[pattern = "->"]
#[pattern = r"[-+*/%&|^]="]
#[pattern = r"<<=|>>="]
#[pattern = r"&&?|\|\|?|--?|\+\+?|>>|<<"]
#[pattern = "[=!<>]=?"]
#[pattern = "[%*/~^]"]
Operator(Substr, Span),
#[pattern = r"::?|[,.;@]"]
Punct(Substr, Span),
#[pattern = r"\S+"]
Unrecognized(Substr, Span),
Attribute(Substr, Span),
Function(Substr, Span),
Param(Substr, Span),
Struct(Substr, Span),
Field(Substr, Span),
Module(Substr, Span),
Plain(Substr, Span),
}
pub type Lexer = TokenStream<Token>;
impl fmt::Debug for Token {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
<Self as gramatika::DebugLisp>::fmt(self, f, 0)
}
}
impl fmt::Display for Token {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{}", self.lexeme())
}
}
impl Hash for Token {
fn hash<H>(&self, state: &mut H)
where H: std::hash::Hasher {
self.kind().hash(state);
self.lexeme().hash(state);
self.span().hash(state);
}
}
impl PartialEq for Token {
fn eq(&self, other: &Self) -> bool {
self.kind() == other.kind()
&& self.lexeme() == other.lexeme()
&& self.span() == other.span()
}
}
impl Eq for Token {}