1use proc_macro::TokenStream;
2use proc_macro2::{Delimiter, Group, Span, TokenStream as TokenStream2, TokenTree};
3use quote::quote;
4use swc_common::{FileName, SourceMap};
5use swc_ecma_parser::{EsSyntax, Parser, StringInput, Syntax};
6use syn::{
7 Expr, LitStr, Result, Token,
8 parse::{Nothing, Parse, ParseStream},
9 parse_macro_input,
10 punctuated::Punctuated,
11};
12
13const SURREAL_JS_BUNDLE: &str = include_str!("../assets/surreal.js");
14const CSS_SCOPE_INLINE_JS_BUNDLE: &str = include_str!("../assets/css-scope-inline.js");
15
16enum JsInput {
17 Literal(LitStr),
18 Tokens(TokenStream2),
19}
20
21impl Parse for JsInput {
22 fn parse(input: ParseStream) -> Result<Self> {
23 if input.peek(LitStr) {
24 let content: LitStr = input.parse()?;
25 Ok(JsInput::Literal(content))
26 } else {
27 let tokens: TokenStream2 = input.parse()?;
28 Ok(JsInput::Tokens(tokens))
29 }
30 }
31}
32
33enum CssInput {
34 Literal(LitStr),
35 Tokens(TokenStream2),
36}
37
38impl Parse for CssInput {
39 fn parse(input: ParseStream) -> Result<Self> {
40 if input.peek(LitStr) {
41 let content: LitStr = input.parse()?;
42 Ok(CssInput::Literal(content))
43 } else {
44 let tokens: TokenStream2 = input.parse()?;
45 Ok(CssInput::Tokens(tokens))
46 }
47 }
48}
49
50fn expand_css_markup(css_input: CssInput) -> TokenStream {
51 let content_lit = match css_input {
52 CssInput::Literal(content) => content,
53 CssInput::Tokens(tokens) => {
54 let css = tokens_to_css(tokens);
55 if let Err(message) = validate_css(&css) {
56 return syn::Error::new(Span::call_site(), message)
57 .to_compile_error()
58 .into();
59 }
60 LitStr::new(&css, Span::call_site())
61 }
62 };
63
64 let output = quote! {
65 {
66 fn callsite_id(prefix: &str, file: &str, line: u32, col: u32) -> String {
67 let mut h: u64 = 0xcbf29ce484222325; for b in file.as_bytes() {
70 h ^= *b as u64;
71 h = h.wrapping_mul(0x100000001b3);
72 }
73 for b in line.to_le_bytes() {
74 h ^= b as u64;
75 h = h.wrapping_mul(0x100000001b3);
76 }
77 for b in col.to_le_bytes() {
78 h ^= b as u64;
79 h = h.wrapping_mul(0x100000001b3);
80 }
81
82 format!("{prefix}{h:016x}")
84 }
85
86 let __id = callsite_id(
87 "mx-css-",
88 file!(),
89 line!(),
90 column!(),
91 );
92
93 maud::html! {
94 style data-mx-css-id=(__id) {
95 (maud::PreEscaped(#content_lit))
96 }
97 }
98 }
99 };
100
101 TokenStream::from(output)
102}
103
104fn expand_css_helper(tokens: TokenStream2) -> TokenStream {
105 let output = quote! {
106 fn css() -> maud::Markup {
107 ::maud_extensions::inline_css! { #tokens }
108 }
109 };
110
111 TokenStream::from(output)
112}
113
114#[proc_macro]
115pub fn css(input: TokenStream) -> TokenStream {
116 let tokens: TokenStream2 = input.into();
117 expand_css_helper(tokens)
118}
119
120fn tokens_to_css(tokens: TokenStream2) -> String {
121 let mut out = String::new();
122 let mut prev_word = false;
123
124 for token in tokens {
125 match token {
126 TokenTree::Group(group) => {
127 let (open, close) = match group.delimiter() {
128 proc_macro2::Delimiter::Parenthesis => ('(', ')'),
129 proc_macro2::Delimiter::Bracket => ('[', ']'),
130 proc_macro2::Delimiter::Brace => ('{', '}'),
131 proc_macro2::Delimiter::None => (' ', ' '),
132 };
133 let needs_space = prev_word
134 && matches!(
135 group.delimiter(),
136 proc_macro2::Delimiter::Brace | proc_macro2::Delimiter::None
137 );
138 if needs_space {
139 out.push(' ');
140 }
141 if open != ' ' {
142 out.push(open);
143 }
144 out.push_str(&tokens_to_css(group.stream()));
145 if close != ' ' {
146 out.push(close);
147 }
148 prev_word = false;
149 }
150 TokenTree::Ident(ident) => {
151 if prev_word {
152 out.push(' ');
153 }
154 out.push_str(&ident.to_string());
155 prev_word = true;
156 }
157 TokenTree::Literal(literal) => {
158 if prev_word {
159 out.push(' ');
160 }
161 out.push_str(&literal.to_string());
162 prev_word = true;
163 }
164 TokenTree::Punct(punct) => {
165 out.push(punct.as_char());
166 prev_word = false;
167 }
168 }
169 }
170
171 out
172}
173
174fn validate_css(css: &str) -> core::result::Result<(), String> {
175 let mut input = cssparser::ParserInput::new(css);
176 let mut parser = cssparser::Parser::new(&mut input);
177 loop {
178 match parser.next_including_whitespace_and_comments() {
179 Ok(_) => {}
180 Err(err) => match err.kind {
181 cssparser::BasicParseErrorKind::EndOfInput => return Ok(()),
182 _ => return Err("inline_css! could not parse CSS tokens".to_string()),
183 },
184 }
185 }
186}
187
188fn expand_js_markup(js_input: JsInput) -> TokenStream {
189 let (content_lit, js_string) = match js_input {
190 JsInput::Literal(content) => {
191 let js_string = content.value();
192 (content, js_string)
193 }
194 JsInput::Tokens(tokens) => {
195 let js = tokens_to_js(tokens);
196 (LitStr::new(&js, Span::call_site()), js)
197 }
198 };
199 if let Err(message) = validate_js(&js_string) {
200 return syn::Error::new(Span::call_site(), message)
201 .to_compile_error()
202 .into();
203 }
204
205 let output = quote! {
206 maud::html! {
207 script {
208 (maud::PreEscaped(#content_lit))
209 }
210 }
211 };
212
213 TokenStream::from(output)
214}
215
216fn expand_js_helper(tokens: TokenStream2) -> TokenStream {
217 let output = quote! {
218 fn js() -> maud::Markup {
219 ::maud_extensions::inline_js! { #tokens }
220 }
221 };
222
223 TokenStream::from(output)
224}
225
226#[proc_macro]
227pub fn js(input: TokenStream) -> TokenStream {
228 let tokens: TokenStream2 = input.into();
229 expand_js_helper(tokens)
230}
231
232#[proc_macro]
233pub fn inline_js(input: TokenStream) -> TokenStream {
234 let js_input = parse_macro_input!(input as JsInput);
235 expand_js_markup(js_input)
236}
237
238#[proc_macro]
239pub fn inline_css(input: TokenStream) -> TokenStream {
240 let css_input = parse_macro_input!(input as CssInput);
241 expand_css_markup(css_input)
242}
243
244fn component_syntax_error() -> syn::Error {
245 syn::Error::new(
246 Span::call_site(),
247 "component! expects exactly one top-level element with a body block, e.g. component! { article { ... } }",
248 )
249}
250
251#[proc_macro]
252pub fn component(input: TokenStream) -> TokenStream {
253 let mut tokens: Vec<TokenTree> = TokenStream2::from(input).into_iter().collect();
254
255 while matches!(
256 tokens.last(),
257 Some(TokenTree::Punct(punct)) if punct.as_char() == ';'
258 ) {
259 tokens.pop();
260 }
261
262 if tokens.is_empty() {
263 return component_syntax_error().to_compile_error().into();
264 }
265
266 if !matches!(tokens.first(), Some(TokenTree::Ident(_))) {
267 return component_syntax_error().to_compile_error().into();
268 }
269
270 let root_body_count = tokens
271 .iter()
272 .filter(|token| matches!(token, TokenTree::Group(group) if group.delimiter() == Delimiter::Brace))
273 .count();
274
275 if root_body_count != 1 {
276 return component_syntax_error().to_compile_error().into();
277 }
278
279 let Some(TokenTree::Group(root_group)) = tokens.last() else {
280 return component_syntax_error().to_compile_error().into();
281 };
282 if root_group.delimiter() != Delimiter::Brace {
283 return component_syntax_error().to_compile_error().into();
284 }
285
286 let mut injected_body = root_group.stream();
287 injected_body.extend(quote! { (js()) (css()) });
288 let mut updated_group = Group::new(Delimiter::Brace, injected_body);
289 updated_group.set_span(root_group.span());
290 let last_index = tokens.len() - 1;
291 tokens[last_index] = TokenTree::Group(updated_group);
292
293 let root_tokens: TokenStream2 = tokens.into_iter().collect();
294 let output = quote! {
295 maud::html! {
296 #root_tokens
297 }
298 };
299
300 output.into()
301}
302
303#[proc_macro]
304pub fn js_file(input: TokenStream) -> TokenStream {
305 let path = parse_macro_input!(input as Expr);
306 let output = quote! {
307 maud::html! {
308 script {
309 (maud::PreEscaped(include_str!(#path)))
310 }
311 }
312 };
313
314 TokenStream::from(output)
315}
316
317#[proc_macro]
318pub fn css_file(input: TokenStream) -> TokenStream {
319 let path = parse_macro_input!(input as Expr);
320 let output = quote! {
321 maud::html! {
322 style {
323 (maud::PreEscaped(include_str!(#path)))
324 }
325 }
326 };
327
328 TokenStream::from(output)
329}
330
331#[proc_macro]
332pub fn surreal_scope_inline(input: TokenStream) -> TokenStream {
333 let _ = parse_macro_input!(input as Nothing);
334 let surreal_js = LitStr::new(SURREAL_JS_BUNDLE, Span::call_site());
335 let css_scope_inline_js = LitStr::new(CSS_SCOPE_INLINE_JS_BUNDLE, Span::call_site());
336 let output = quote! {
337 maud::html! {
338 script {
339 (maud::PreEscaped(#surreal_js))
340 }
341 script {
342 (maud::PreEscaped(#css_scope_inline_js))
343 }
344 }
345 };
346
347 TokenStream::from(output)
348}
349
350fn tokens_to_js(tokens: TokenStream2) -> String {
351 let mut out = String::new();
352 let mut prev_word = false;
353
354 for token in tokens {
355 match token {
356 TokenTree::Group(group) => {
357 let (open, close) = match group.delimiter() {
358 proc_macro2::Delimiter::Parenthesis => ('(', ')'),
359 proc_macro2::Delimiter::Bracket => ('[', ']'),
360 proc_macro2::Delimiter::Brace => ('{', '}'),
361 proc_macro2::Delimiter::None => (' ', ' '),
362 };
363 let needs_space = prev_word
364 && matches!(
365 group.delimiter(),
366 proc_macro2::Delimiter::Brace | proc_macro2::Delimiter::None
367 );
368 if needs_space {
369 out.push(' ');
370 }
371 if open != ' ' {
372 out.push(open);
373 }
374 out.push_str(&tokens_to_js(group.stream()));
375 if close != ' ' {
376 out.push(close);
377 }
378 prev_word = false;
379 }
380 TokenTree::Ident(ident) => {
381 if prev_word {
382 out.push(' ');
383 }
384 out.push_str(&ident.to_string());
385 prev_word = true;
386 }
387 TokenTree::Literal(literal) => {
388 if prev_word {
389 out.push(' ');
390 }
391 out.push_str(&literal.to_string());
392 prev_word = true;
393 }
394 TokenTree::Punct(punct) => {
395 out.push(punct.as_char());
396 prev_word = false;
397 }
398 }
399 }
400
401 out
402}
403
404fn validate_js(js: &str) -> core::result::Result<(), String> {
405 let cm = SourceMap::default();
406 let fm = cm.new_source_file(
407 FileName::Custom("inline.js".to_string()).into(),
408 js.to_string(),
409 );
410 let input = StringInput::from(&*fm);
411 let mut parser = Parser::new(Syntax::Es(EsSyntax::default()), input, None);
412 match parser.parse_script() {
413 Ok(_) => Ok(()),
414 Err(err) => Err(format!("inline_js! could not parse JavaScript: {err:#?}")),
415 }
416}
417
418struct FontFace {
419 path: LitStr,
420 family: LitStr,
421 weight: Option<LitStr>,
422 style: Option<LitStr>,
423}
424
425impl Parse for FontFace {
426 fn parse(input: ParseStream) -> syn::Result<Self> {
427 let path: LitStr = input.parse()?;
428 input.parse::<Token![,]>()?;
429 let family: LitStr = input.parse()?;
430
431 let weight = if input.peek(Token![,]) {
432 input.parse::<Token![,]>()?;
433 if input.peek(LitStr) {
434 Some(input.parse()?)
435 } else {
436 None
437 }
438 } else {
439 None
440 };
441
442 let style = if weight.is_some() && input.peek(Token![,]) {
443 input.parse::<Token![,]>()?;
444 if input.peek(LitStr) {
445 Some(input.parse()?)
446 } else {
447 None
448 }
449 } else {
450 None
451 };
452
453 Ok(FontFace {
454 path,
455 family,
456 weight,
457 style,
458 })
459 }
460}
461
462struct FontFaceList {
463 fonts: Punctuated<FontFace, Token![;]>,
464}
465
466impl Parse for FontFaceList {
467 fn parse(input: ParseStream) -> syn::Result<Self> {
468 let fonts = Punctuated::parse_terminated(input)?;
469 Ok(FontFaceList { fonts })
470 }
471}
472
473#[proc_macro]
474pub fn font_face(input: TokenStream) -> TokenStream {
475 let font = parse_macro_input!(input as FontFace);
476
477 let path = font.path;
478 let family = font.family;
479 let weight = font
480 .weight
481 .unwrap_or_else(|| LitStr::new("normal", Span::call_site()));
482 let style = font
483 .style
484 .unwrap_or_else(|| LitStr::new("normal", Span::call_site()));
485
486 let expanded = quote! {
487 {
488 use base64::Engine;
489 use base64::engine::general_purpose::STANDARD;
490 use maud::PreEscaped;
491
492 let font_bytes = include_bytes!(#path);
493 let mut base64_string = String::new();
494
495 STANDARD.encode_string(font_bytes, &mut base64_string);
496
497 let path_str = #path;
498 let format = if path_str.ends_with(".ttf") {
499 "truetype"
500 } else if path_str.ends_with(".otf") {
501 "opentype"
502 } else if path_str.ends_with(".woff") {
503 "woff"
504 } else if path_str.ends_with(".woff2") {
505 "woff2"
506 } else {
507 "truetype"
508 };
509
510 let font_type = if path_str.ends_with(".woff2") {
511 "woff2"
512 } else if path_str.ends_with(".woff") {
513 "woff"
514 } else if path_str.ends_with(".otf") {
515 "opentype"
516 } else {
517 "truetype"
518 };
519
520 let css = format!(
521 "@font-face {{\n font-family: '{}';\n src: url('data:font/{};base64,{}') format('{}');\n font-weight: {};\n font-style: {};\n}}",
522 #family,
523 font_type,
524 base64_string,
525 format,
526 #weight,
527 #style
528 );
529
530 PreEscaped(css)
531 }
532 };
533
534 expanded.into()
535}
536
537#[proc_macro]
538pub fn font_faces(input: TokenStream) -> TokenStream {
539 let fonts = parse_macro_input!(input as FontFaceList);
540
541 let font_faces = fonts.fonts.iter().map(|font| {
542 let path = &font.path;
543 let family = &font.family;
544 let weight = font
545 .weight
546 .as_ref()
547 .map_or_else(|| quote! { "normal" }, |w| quote! { #w });
548 let style = font
549 .style
550 .as_ref()
551 .map_or_else(|| quote! { "normal" }, |s| quote! { #s });
552
553 quote! {
554 {
555 use maud_extensions::font_face;
556 let face = font_face!(#path, #family, #weight, #style);
557 css.push_str(&face.0);
558 }
559 }
560 });
561
562 let expanded = quote! {
563 {
564 use maud::PreEscaped;
565 let mut css = String::new();
566
567 #(#font_faces)*
568
569 PreEscaped(css)
570 }
571 };
572
573 expanded.into()
574}