1#[macro_export]
8macro_rules! sdk {
9 (
10 context: $context:ty;
11 target: $target:ident;
12 tokens: [ $( $token_type:ident ),* , ];
13 rules: [ $( $token_rule:expr ),* , ];
14 semantics: [ $( $sem_type:ident ),* , ];
15) => {
16 use $crate::sdk::{
17 ParseHook,
18 TokenType,
19 TokenImpl,
20 TokenStream,
21 TokenBlocks,
22 ASTParser,
23 Regex,
24 AsKebabCase,
25 CreateParseTree,
26 ParseTreeContext,
27 lex
28 };
29 use $crate::{optional, required, list};
30
31 pub type Token = TokenImpl<Tok>;
37
38 pub type Ctx = ParseTreeContext<$context, Tok>;
42
43 #[derive(PartialEq, Default, Debug, Clone)]
45 pub enum Tok {
46 #[default]
48 Unknown,
49 Decor{
53 tag: String,
54 base:Box<Tok>},
55 $( $token_type, )*
56
57 $( $sem_type, )*
58 }
59
60 impl TokenType for Tok {
63 fn html_class(&self) -> Option<String> {
64 Some(match self {
65 Tok::Unknown => "token unknown".to_string(),
66 Tok::Decor{tag, base} => format!("{tag}{}", base.html_class().map(|s| format!(" {}", s)).unwrap_or_default()),
67 $( Tok::$token_type => format!("token {}",AsKebabCase(stringify!($token_type))) ,)*
68 $( Tok::$sem_type => format!("semantic token {}",AsKebabCase(stringify!($sem_type))) ),*
69 })
70 }
71 }
72
73 pub fn tokenize(src: &str) -> lex::TokenizerOutput<Tok>{
77 let rules = vec![
78 $( $token_rule ),*
79 ];
80 lex::run_tokenizer(src, Tok::Unknown, &rules)
81 }
82
83 impl CreateParseTree for ast::$target {
84 type T = Tok;
85 type C = $context;
86 type P<'p> = pt::$target<'p>;
87
88 fn parse_pt_with_context_internal<'a>(
89 &'a self,
90 ctx: &mut Ctx
91 ) -> Self::P<'a> {
92 pt::$target::from_ast(self, ctx)
93 }
94 }
95
96 pub struct Parser;
97
98 impl ASTParser for Parser {
99 type T = Tok;
100 type A = ast::$target;
101
102 fn parse_ast(&self, ts: &mut TokenStream<Self::T>) -> Option<Self::A>{
103 ast::$target::parse(ts)
104 }
105 }
106
107 macro_rules! token {
109 ($param_type_name:ident :: parse ( $ts:ident ) ) => {
110 {
111 $ts.consume().filter(|token| {
112 match &token.token_type {
113 Tok::$param_type_name => true,
114 _ => false,
115 }
116 }).cloned()
117 }
118 };
119 ($param_type_name:ident :: $lit:literal ( $ts:ident ) ) => {
120 {
121 $ts.consume().filter(|token| {
122 if let Tok::$param_type_name = &token.token_type {
123 if &token.value == $lit {
124 return true;
125 }
126 }
127 false
128 }).cloned()
129 }
130 };
131 }
132};
133}
134
135#[macro_export]
137macro_rules! impl_union {
138 (
139 $from_ast:ident, $type_name:ident,
140 { $( $derivation_type_name:ident, )* }
141) => {
142 impl ast::$type_name {
144 fn parse(ts: & mut TokenStream<Tok>) -> Option<Self> {
145 if !ts.push() { return None; };
146 $(
147 if let Some(r) = ast::$derivation_type_name::parse(ts) {
148 ts.pop();
149 return Some(Self::$derivation_type_name(Box::new(r)))
150 }
151 ts.restore();
152 )*
153 ts.pop();
154 return None;
155 }
156 fn apply_semantic(&self, si: &mut TokenBlocks<Tok>, ovr: &Option<Tok>) {
157 match self {
158 $( Self::$derivation_type_name(r) => r.apply_semantic(si, ovr), )*
159 }
160 }
161 }
162 impl<'p> pt::$type_name<'p> {
163 fn $from_ast(ast: &'p ast::$type_name, ctx: &mut Ctx) -> Self {
164 match ast {
165 $(
166 ast::$type_name::$derivation_type_name(ast) => {
167 return Self::$derivation_type_name(Box::new(pt::$derivation_type_name::from_ast(ast, ctx)));
168 }
169 )*
170 }
171 }
172 }
173};
174}