1#![recursion_limit = "128"]
2use proc_macro2::*;
3use quote::*;
4use regex::Regex;
5use std::iter::once;
6
7use html5ever::tendril::*;
8use html5ever::tokenizer::BufferQueue;
9use html5ever::tokenizer::{
10 CharacterTokens, CommentToken, EndTag, NullCharacterToken, StartTag, TagToken, EOFToken, ParseError, Token, TokenSink, TokenSinkResult, Tokenizer, TokenizerOpts, DoctypeToken,
11};
12
13#[proc_macro_attribute]
14pub fn template(
15 attr: proc_macro::TokenStream,
16 item: proc_macro::TokenStream,
17) -> proc_macro::TokenStream {
18 let attr = proc_macro2::TokenStream::from(attr);
19 let item = proc_macro2::TokenStream::from(item);
20 let mut file_ = String::new();
21 let mut attr = attr.into_iter();
22 let mut re = regex::Regex::new(r"(\{\{)|(\}\})|(\{([^\}\n]+)\})").expect("regex");
23 let mut error = "anyhow::Error".to_string();
24 while let Some(a) = attr.next() {
25 if format!("{}", a) == "path" {
26 if let (Some(a), Some(b)) = (attr.next(), attr.next()) {
27 if format!("{}", a) == "=" {
28 file_ = format!("{}", b)
29 }
30 }
31 } else if format!("{}", a) == "regex" {
32 if let (Some(a), Some(b)) = (attr.next(), attr.next()) {
33 if format!("{}", a) == "=" {
34 re = regex::Regex::new(&format!("{}", b)).expect("regex");
35 }
36 }
37 } else if format!("{}", a) == "error" {
38 if let (Some(a), Some(b)) = (attr.next(), attr.next()) {
39 if format!("{}", a) == "=" {
40 let e = format!("{}", b);
41 error.clear();
42 error.push_str(e.trim_matches('"'));
43 }
44 }
45 } else if format!("{}", a) == "," {
46 continue
47 } else {
48 println!("unknown attribute {:?}", a);
49 }
50 }
51 let cargo_dir = std::env::var("CARGO_MANIFEST_DIR").unwrap();
52 let mut file = std::path::Path::new(&cargo_dir).join("templates");
53 file.push(file_.trim_matches('"'));
54 let template = std::fs::read_to_string(&file).unwrap();
55
56 let re_amp = regex::Regex::new(r"(&[a-zA-Z]+;)|&").unwrap();
58 let template = re_amp.replace_all(&template, |cap: ®ex::Captures| {
59 if let Some(c) = cap.get(1) {
60 c.as_str().to_string()
61 } else {
62 "&".to_string()
63 }
64 });
65
66 let mut name = None;
67 let mut item = item.into_iter();
68 let mut item_ = Vec::new();
69 let mut spec = proc_macro2::TokenStream::new();
70 let mut spec2 = proc_macro2::TokenStream::new();
71 let mut is_name = true;
72 let mut last_was_name = false;
73 loop {
74 match item.next() {
75 Some(TokenTree::Ident(id)) => {
76 if id.to_string() == "struct" || id.to_string() == "enum" {
77 let it = item.next().unwrap();
78 name = Some(syn::Ident::new(&format!("{}", it), it.span()));
79 item_.push(TokenTree::Ident(id));
80 item_.push(it);
81 last_was_name = true;
82 } else {
83 item_.push(TokenTree::Ident(id));
84 }
85 }
86 None => break,
87 Some(TokenTree::Punct(p)) => {
88 if last_was_name {
90 if p.to_string() == "<" {
91 let mut level = 1;
92 spec.extend(once(TokenTree::Punct(p.clone())));
93 spec2.extend(once(TokenTree::Punct(p.clone())));
94 item_.push(TokenTree::Punct(p));
95 loop {
96 match item.next() {
97 Some(TokenTree::Punct(p)) => {
98 let pp = p.to_string();
99 spec.extend(once(TokenTree::Punct(p.clone())));
100 item_.push(TokenTree::Punct(p.clone()));
101 if pp == ">" {
102 level -= 1;
103 if level <= 0 {
104 spec2.extend(once(TokenTree::Punct(p.clone())));
105 break;
106 }
107 } else if pp == "<" {
108 level += 1;
109 } else if pp == ":" {
110 is_name = false;
111 } else if pp == "," && level == 1 {
112 spec2.extend(once(TokenTree::Punct(p.clone())));
113 is_name = true;
114 } else if is_name {
115 spec2.extend(once(TokenTree::Punct(p.clone())));
116 }
117 }
118 Some(it) => {
119 spec.extend(once(it.clone()));
120 if is_name {
121 spec2.extend(once(it.clone()));
122 }
123 item_.push(it)
124 }
125 None => break,
126 }
127 }
128 } else {
129 item_.push(TokenTree::Punct(p));
130 }
131 } else {
132 item_.push(TokenTree::Punct(p))
133 }
134 }
135 Some(it) => item_.push(it),
136 }
137 }
138 let name = name.unwrap();
139
140 use std::iter::FromIterator;
141 let item = proc_macro2::TokenStream::from_iter(item_);
142
143 let tokens = walk(re, &template);
144 let tok: TokenStream = tokens.parse().unwrap();
145 let file = file.to_str().unwrap();
146 let error: TokenStream = error.parse().unwrap();
147 let tokens = quote! {
148 impl #spec cuach::Render for #name #spec2 {
149 type Error = #error;
150 fn render_into<W: std::fmt::Write>(&self, w: &mut W) -> Result<(), Self::Error> {
151 let _ = include_bytes!(#file);
152 use std::fmt::Write;
153 use cuach::Render;
154 #tok
155 Ok(())
156 }
157 }
158 #item
159 };
160 proc_macro::TokenStream::from(tokens)
161}
162
163use std::fmt::Write;
164
165#[derive(Clone)]
166struct TokenPrinter {
167 result: String,
168 current: String,
169 re: Regex,
170 post_comment: bool,
171 last_was_content: bool,
172}
173
174impl TokenSink for TokenPrinter {
175 type Handle = ();
176
177 fn process_token(&mut self, token: Token, _line_number: u64) -> TokenSinkResult<()> {
178 match token {
179 CharacterTokens(contents) => {
180 if !self.post_comment || !contents.trim().is_empty() {
181 if self.post_comment {
182 self.current.push_str(contents.trim_start());
183 } else {
184 self.current.push_str(&contents);
185 }
186 self.post_comment = false;
187 }
188 self.last_was_content = true;
189 }
190 NullCharacterToken => {}
191 TagToken(tag) => {
192 self.last_was_content = false;
193 self.post_comment = false;
195 match tag.kind {
196 StartTag => {
197 write!(&mut self.current, "<{}", tag.name).unwrap();
198 }
199 EndTag => {
200 write!(&mut self.current, "</{}>", tag.name).unwrap();
201 }
202 }
203
204 for attr in tag.attrs.iter() {
205 if tag.name.as_bytes() == b"script"
206 && (attr.name.local.as_bytes() == b"async"
207 || attr.name.local.as_bytes() == b"defer")
208 {
209 write!(&mut self.current, " {}", attr.name.local).unwrap();
210 } else if attr.value.chars().any(|x| x == '"') && attr.value.chars().all(|x| x != '\'') {
211 write!(&mut self.current, " {}='{}'", attr.name.local, attr.value)
212 .unwrap();
213 } else {
214 write!(&mut self.current, " {}=\"{}\"", attr.name.local, attr.value.replace("\"", """))
215 .unwrap();
216 }
217 }
218 if tag.self_closing {
219 write!(&mut self.current, "/>").unwrap()
220 } else if let StartTag = tag.kind {
221 write!(&mut self.current, ">").unwrap()
222 }
223 }
224 ParseError(err) => {
225 panic!("ERROR: {}", err);
226 }
227 CommentToken(contents) => {
228 while self.current.ends_with("\n") {
230 self.current.pop();
231 }
232 self.rollup();
233 let contents = contents.replace("&", "&");
234 self.result.push_str("\n");
235 self.result.push_str(&contents);
236 self.result.push_str("\n");
237 self.post_comment = true;
238 self.last_was_content = false;
239 }
240 EOFToken => {
241 self.rollup();
242 }
243 DoctypeToken(doc) => {
244 if let Some(ref name) = doc.name {
245 write!(&mut self.current, "<!DOCTYPE {}>", name).unwrap();
246 }
247 }
248 }
249 TokenSinkResult::Continue
250 }
251}
252
253fn walk(re: Regex, input: &str) -> String {
254 let sink = TokenPrinter {
255 post_comment: true,
256 re,
257 current: String::new(),
258 result: String::new(),
259 last_was_content: false,
260 };
261 let chunk = StrTendril::try_from_byte_slice(input.as_bytes()).unwrap();
262 let mut input = BufferQueue::new();
263 input.push_back(chunk.try_reinterpret().unwrap());
264
265 let mut tok = Tokenizer::new(sink, TokenizerOpts::default());
266 let _ = tok.feed(&mut input);
267 assert!(input.is_empty());
268 tok.end();
269 tok.sink.result
270}
271enum Arg<'a> {
272 Text(&'a str),
273 Arg(&'a str),
274}
275
276struct Args<'a, 'b> {
277 caps: regex::CaptureMatches<'a, 'b>,
278 start: usize,
279 s: &'b str,
280 reserve: Option<Arg<'b>>,
281 finished: bool,
282}
283
284impl TokenPrinter {
285 fn rollup(&mut self) {
286 if !self.current.is_empty() {
287 let args = Args {
288 start: 0,
289 caps: self.re.captures_iter(&self.current),
290 s: &self.current,
291 reserve: None,
292 finished: false,
293 };
294 for args in args {
295 match args {
296 Arg::Text(t) => {
297 let re = regex::Regex::new(r"\s+").expect("regex");
298 let t = re.replace_all(&t, " ");
299 writeln!(&mut self.result, "w.write_str({:?})?;", t).unwrap()
300 }
301 Arg::Arg(a) => {
302 writeln!(&mut self.result, "(\n{}\n).render_into(w)?;", a).unwrap()
303 }
304 }
305 }
306 self.current.clear();
307 }
308 }
309}
310
311impl<'a, 'b> Iterator for Args<'a, 'b> {
312 type Item = Arg<'b>;
313 fn next(&mut self) -> Option<Self::Item> {
314 if self.finished {
315 return None;
316 }
317 if let Some(r) = self.reserve.take() {
318 return Some(r);
319 } else if let Some(cap) = self.caps.next() {
320 if let Some(cap2) = cap.get(1) {
321 return if cap2.start() > self.start {
322 let r = self.s.split_at(cap2.start()).0;
323 let r = r.split_at(self.start).1;
324 self.start = cap2.end();
325 self.reserve = Some(Arg::Text("{"));
326 Some(Arg::Text(r))
327 } else {
328 self.start = cap2.end();
329 Some(Arg::Text("{"))
330 };
331 } else if let Some(cap2) = cap.get(2) {
332 return if cap2.start() > self.start {
333 let r = self.s.split_at(cap2.start()).0;
334 let r = r.split_at(self.start).1;
335 self.start = cap2.end();
336 self.reserve = Some(Arg::Text("}"));
337 Some(Arg::Text(r))
338 } else {
339 self.start = cap2.end();
340 Some(Arg::Text("}"))
341 };
342 } else if let Some(cap2) = cap.get(3) {
343 let r = self.s.split_at(cap2.start()).0;
344 let r = r.split_at(self.start).1;
345 return if cap2.start() > self.start {
346 self.start = cap2.end();
347 self.reserve = Some(Arg::Arg(cap.get(4).unwrap().as_str()));
348 Some(Arg::Text(r))
349 } else {
350 self.start = cap2.end();
351 Some(Arg::Arg(cap.get(4).unwrap().as_str()))
352 };
353 }
354 }
355 if self.start < self.s.len() {
356 self.finished = true;
357 Some(Arg::Text(self.s.split_at(self.start).1))
358 } else {
359 None
360 }
361 }
362}