1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
use crate::{Config, Specs};
use anyhow::{anyhow, Result};
use proc_macro2::TokenStream;
use quote::{format_ident, quote};
use std::str::FromStr;
impl Specs<'_> {
pub fn generate_tokens_mod(&self, config: &Config<'_>) -> Result<TokenStream> {
let error = TokenStream::from_str(config.error).map_err(|err| anyhow!("{err}"))?;
let tokens_mod = TokenStream::from_str(config.tokens_mod).map_err(|err| anyhow!("{err}"))?;
let enum_ = self.generate_tokens_enum();
let fmt = self.generate_tokens_fmt();
let parse = self.generate_tokens_parse(error, tokens_mod);
Ok(quote! {
#![allow(dead_code)]
#enum_
#fmt
#parse
})
}
fn generate_tokens_enum(&self) -> TokenStream {
let dynamic_tokens = self.dynamic_tokens.iter().map(|token| {
let name = token.0.as_type();
let pattern = token.1;
let constructor = format_ident!("make_{}", token.0 .0);
quote! {
#[regex(#pattern, super::#constructor)]
#name(super::#name)
}
});
let static_tokens = self.static_tokens.iter().map(|(name, keyword)| {
let name = name.as_type();
quote! {
#[token(#keyword)]
#name
}
});
quote! {
#[derive(Clone, Debug, logos::Logos, Eq, PartialEq, Hash)]
pub enum Token {
#[regex(r"\s+", logos::skip)]
#[error]
Error,
#(#dynamic_tokens),*,
#(#static_tokens),*,
}
}
}
fn generate_tokens_fmt(&self) -> TokenStream {
let dynamic_tokens = self.dynamic_tokens.iter().map(|(token, _)| {
let token = token.as_type();
quote! { Token::#token(value) => std::fmt::Display::fmt(value, f) }
});
let static_tokens = self.static_tokens.iter().map(|(token, symbol)| {
let token = token.as_type();
quote! { Token::#token => write!(f, "{}", #symbol) }
});
quote! {
impl std::fmt::Display for Token {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
#[allow(clippy::write_literal)]
match self {
Token::Error => write!(f, "INVALID_TOKEN"),
#(#dynamic_tokens),*,
#(#static_tokens),*,
}
}
}
}
}
fn generate_tokens_parse(&self, error: TokenStream, tokens_mod: TokenStream) -> TokenStream {
let static_tokens = self.static_tokens.iter().map(|(ident, _)| {
let ty = ident.as_type();
quote! {
pub fn #ident() -> impl Parser<Token, (), Error = Error> + Clone {
just(Token::#ty).ignored()
}
}
});
let dynamic_tokens = self.dynamic_tokens.iter().map(|(ident, _)| {
let ty = ident.as_type();
quote! {
pub fn #ident() -> impl Parser<Token, super::super::#ty, Error = Error> {
select!{ Token::#ty(value) => value }
}
}
});
quote! {
pub mod parse {
use chumsky::prelude::*;
#[allow(unused_imports)]
use #tokens_mod::{self, Token};
type Error = #error;
#(#static_tokens)*
#(#dynamic_tokens)*
}
}
}
}