1use std::collections::HashMap;
2
3use convert_case::Casing;
4use deki_core::*;
5use deki_proc::{syn::{parse2, Generics}, *};
6use proc_macro2::{Delimiter, Group, TokenStream, TokenTree};
7use proc_macro::TokenStream as CompilerTokens;
8use syn::spanned::Spanned;
9
10#[proc_macro]
26 pub fn xoxo(item:CompilerTokens) -> CompilerTokens {
27 TokenStream::from(item).replace_atoms(|t|match t {
28 TokenTree::Ident(i) if i.to_string().as_str() == "X" => "true".ident_span(i.span()).into(),
29 TokenTree::Ident(i) if i.to_string().as_str() == "O" => "false".ident_span(i.span()).into(),
30 _ => t
31 }).into()
32 }
33
34 #[proc_macro]
46 pub fn quimp (item:CompilerTokens) -> CompilerTokens {
47 let stream: TokenStream = item.into();
48 let mut iter = stream.peek_iter();
49 let name = iter.next().unwrap();
50
51 let mut gens = qt!();
52 while let Some(tok) = iter.next_if(|a|!a.is_string("fn")) {
53 gens.extend([tok]);
54 }
55 let gens: Generics = parse2(gens).unwrap();
56 let (gen_impl,gen_typ,gen_where) = gens.split_for_impl();
57
58 let mut split = iter.split_punct('|');
59 let toki = split.remove(0);
60 let iter = toki.peek_iter();
61
62 let mut stream = qt!{};
63 for func in iter.split_punct(';') {
64 let mut fiter = func.peek_iter();
65 fiter.next();
66 let func = fiter.next().unwrap();
67 let trai = func.to_string().to_case(Case::Pascal).ident();
68 let stuff = TokenStream::from_iter(fiter);
69 stream.extend(qt!(
70 impl #gen_impl #trai for #name #gen_typ #gen_where {
71 fn #func #stuff
72 }
73 ));
74 }
75 let implo = split.pop().map(|a|{
76 TokenStream::from_iter(a.into_iter())
77 });
78 qt!{
79 #stream
80 impl #gen_impl #name #gen_typ #gen_where {
81 #implo
82 }
83 }.into()
84 }
85
86 #[proc_macro_attribute]
91 pub fn imp (attr:CompilerTokens,item:CompilerTokens) -> CompilerTokens {
92 let item: TokenStream = item.into();
93 let attr: TokenStream = attr.into();
94 deki_proc::imp(attr,item).into()
95 }
96
97 #[proc_macro]
120 pub fn match_fns (item:CompilerTokens) -> CompilerTokens {
121 let stream: TokenStream = item.into();
122 let mut stream = stream.peek_iter();
123 let name = stream.next().unwrap().unwrap_group().stream();
124 let iter = stream.split_punct(';');
125
126 let mut funcs = Vec::new();
127 let mut matches = HashMap::<String,TokenStream>::new();
128 let mut current = qt![];
129
130 for tok in iter {
131 let mut toki = tok.peek_iter();
132 let title = toki.peek().and_then(|t|{
134 exit!{*TokenTree::Group(g) = t}
135 exit!{*Delimiter::Bracket = g.delimiter()}
136 Some(g.stream())
137 });
138 if let Some(title) = title {
139 toki.next();
140 current = title;
141 }
142 if current.is_empty() {
143 funcs.push(TokenStream::from_iter(toki));
144 } else {
145 let [func,b] = toki.split_punct(':').try_into().unwrap();
146 matches.entry(func.to_string()).or_default()
147 .extend(qt!{#name #current => #b,});
148 }
149 }
150
151 let mut asdf = qt![];
152 for a in funcs {
153 let mut aiter = a.peek_iter();
154 exit!{bb = aiter.next()}
155 exit!{atr = aiter.next(),unwrap_group()}
156 let atr = atr.stream().peek_iter().split_punct(',');
157 next!{mchs = matches.remove(&bb.to_string())}
158 let more = TokenStream::from_iter(aiter);
159 asdf.extend(qt!(
160 pub fn #bb (&self #(,#atr)*) #more {
161 match self { #mchs _ => Default::default() }
162 }
163 ));
164 }
165
166 qt![impl #name {#asdf}].into()
167 }
168
169
170fn foname_tree(t:&TokenTree) -> Option<TokenTree> {
173 exit!{*TokenTree::Group(g0) = t}
174 exit!{*Delimiter::Bracket = g0.delimiter()}
175 let mut g0 = g0.stream().as_vec();
176 exit!{if g0.len()!=1}
177 exit!{*TokenTree::Group(g1) = g0.pop().unwrap()}
178 exit!{*Delimiter::Parenthesis = g1.delimiter()}
179 let stream = g1.stream();
180 let span = stream.span();
181 let mut split = stream.peek_iter().split_punct('@');
182 let case = split.get(1).map(|t|match t.to_string().as_str() {
183 "snake" => Case::Snake,
184 "camel" => Case::Camel,
185 "scream" => Case::UpperSnake,
186 "flat" => Case::Flat,
187 "upper" => Case::UpperFlat,
188 _ => Case::Pascal
189 }).unwrap_or(Case::Pascal);
190 let stream = split.swap_remove(0);
191 let text = stream.to_string().chars()
192 .map(|c|if c.is_alphanumeric() {c} else {'_'})
193 .collect::<String>()
194 .to_case(case);
195 Some(text.ident_span(span).into())
196 }
197
198 fn foname_stream(i:TokenStream) -> TokenStream {
199 TokenStream::from_iter(i.into_iter().map(|p| match foname_tree(&p) {
200 Some(t) => t,
201 _ => match p {
202 TokenTree::Group(g) => {
203 let stream = foname_stream(g.stream());
204 TokenTree::Group(Group::new(g.delimiter(),stream))
205 }
206 _ => p
207 }
208 }))
209 }
210
211 #[proc_macro]
212 pub fn foname(token:CompilerTokens) -> CompilerTokens {
213 foname_stream(token.into()).into()
214 }
215
216