ast_description_lang/
tokens.rs1use crate::{Config, Specs};
2use anyhow::{anyhow, Result};
3use proc_macro2::TokenStream;
4use quote::{format_ident, quote};
5use std::str::FromStr;
6
7impl Specs<'_> {
8 pub fn generate_tokens_mod(&self, config: &Config<'_>) -> Result<TokenStream> {
9 let error = TokenStream::from_str(config.error).map_err(|err| anyhow!("{err}"))?;
10 let tokens_mod = TokenStream::from_str(config.tokens_mod).map_err(|err| anyhow!("{err}"))?;
11
12 let enum_ = self.generate_tokens_enum();
13 let fmt = self.generate_tokens_fmt();
14 let parse = self.generate_tokens_parse(error, tokens_mod);
15 Ok(quote! {
16 #![allow(dead_code)]
17 #enum_
18 #fmt
19 #parse
20 })
21 }
22
23 fn generate_tokens_enum(&self) -> TokenStream {
24 let dynamic_tokens = self.dynamic_tokens.iter().map(|token| {
25 let name = token.0.as_type();
26 let pattern = token.1;
27 let constructor = format_ident!("make_{}", token.0 .0);
28 quote! {
29 #[regex(#pattern, super::#constructor)]
30 #name(super::#name)
31 }
32 });
33
34 let static_tokens = self.static_tokens.iter().map(|(name, keyword)| {
35 let name = name.as_type();
36 quote! {
37 #[token(#keyword)]
38 #name
39 }
40 });
41
42 quote! {
43 #[derive(Clone, Debug, logos::Logos, Eq, PartialEq, Hash)]
44 pub enum Token {
45 #[regex(r"\s+", logos::skip)]
46 #[error]
47 Error,
48
49 #(#dynamic_tokens),*,
50 #(#static_tokens),*,
51 }
52 }
53 }
54
55 fn generate_tokens_fmt(&self) -> TokenStream {
56 let dynamic_tokens = self.dynamic_tokens.iter().map(|(token, _)| {
57 let token = token.as_type();
58 quote! { Token::#token(value) => std::fmt::Display::fmt(value, f) }
59 });
60
61 let static_tokens = self.static_tokens.iter().map(|(token, symbol)| {
62 let token = token.as_type();
63 quote! { Token::#token => write!(f, "{}", #symbol) }
64 });
65
66 quote! {
67 impl std::fmt::Display for Token {
68 fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
69 #[allow(clippy::write_literal)]
70 match self {
71 Token::Error => write!(f, "INVALID_TOKEN"),
72 #(#dynamic_tokens),*,
73 #(#static_tokens),*,
74 }
75 }
76 }
77 }
78 }
79
80 fn generate_tokens_parse(&self, error: TokenStream, tokens_mod: TokenStream) -> TokenStream {
81 let static_tokens = self.static_tokens.iter().map(|(ident, _)| {
82 let ty = ident.as_type();
83 quote! {
84 pub fn #ident() -> impl Parser<Token, (), Error = Error> + Clone {
85 just(Token::#ty).ignored()
86 }
87 }
88 });
89
90 let dynamic_tokens = self.dynamic_tokens.iter().map(|(ident, _)| {
91 let ty = ident.as_type();
92 quote! {
93 pub fn #ident() -> impl Parser<Token, super::super::#ty, Error = Error> {
94 select!{ Token::#ty(value) => value }
95 }
96 }
97 });
98
99 quote! {
100 pub mod parse {
101 use chumsky::prelude::*;
102 #[allow(unused_imports)]
103 use #tokens_mod::{self, Token};
104
105 type Error = #error;
106
107 #(#static_tokens)*
108 #(#dynamic_tokens)*
109 }
110 }
111 }
112}