html_node_macro/
lib.rs

1#![cfg_attr(docsrs, feature(doc_auto_cfg))]
2
3mod node_handlers;
4
5use std::collections::{HashMap, HashSet};
6
7use node_handlers::{
8    handle_block, handle_comment, handle_doctype, handle_element, handle_fragment, handle_raw_text,
9    handle_text,
10};
11use proc_macro::TokenStream;
12use proc_macro2::{Ident, TokenStream as TokenStream2};
13use proc_macro2_diagnostics::Diagnostic;
14use quote::quote;
15use rstml::{node::Node, Parser, ParserConfig};
16use syn::Type;
17
18#[proc_macro]
19pub fn html(tokens: TokenStream) -> TokenStream {
20    html_inner(tokens.into(), None)
21}
22
23#[cfg(feature = "typed")]
24#[proc_macro]
25pub fn typed_html(tokens: TokenStream) -> TokenStream {
26    use syn::{punctuated::Punctuated, token::Paren, Token};
27
28    #[derive(syn_derive::Parse)]
29    struct ColonAndType {
30        _colon_token: syn::Token![:],
31        ty: Type,
32    }
33
34    #[derive(syn_derive::Parse)]
35    enum MaybeColonAndType {
36        #[parse(peek = Token![:])]
37        ColonAndType(ColonAndType),
38        Nothing,
39    }
40
41    #[derive(syn_derive::Parse)]
42    struct Extension {
43        prefix: Ident,
44        colon_and_type: MaybeColonAndType,
45    }
46
47    #[derive(syn_derive::Parse)]
48    struct Extensions {
49        #[syn(parenthesized)]
50        #[allow(dead_code)]
51        paren_token: Paren,
52
53        #[syn(in = paren_token)]
54        #[parse(Punctuated::parse_terminated)]
55        extensions: Punctuated<Extension, syn::Token![,]>,
56    }
57
58    #[derive(syn_derive::Parse)]
59    enum MaybeExtensions {
60        #[parse(peek = Paren)]
61        Extensions(Extensions),
62        Nothing,
63    }
64
65    #[derive(syn_derive::Parse)]
66    struct TypedHtmlOptions {
67        extensions: MaybeExtensions,
68        tokens: TokenStream2,
69    }
70
71    let options = syn::parse_macro_input!(tokens as TypedHtmlOptions);
72
73    let mut extensions = match options.extensions {
74        MaybeExtensions::Extensions(extensions) => extensions
75            .extensions
76            .into_iter()
77            .map(|extension| match extension.colon_and_type {
78                MaybeColonAndType::ColonAndType(ColonAndType { ty, .. }) => {
79                    (extension.prefix, Some(ty))
80                }
81                MaybeColonAndType::Nothing => (extension.prefix, None),
82            })
83            .collect::<HashMap<_, _>>(),
84        MaybeExtensions::Nothing => HashMap::new(),
85    };
86
87    extensions.insert(Ident::new("data", proc_macro2::Span::call_site()), None);
88    extensions.insert(Ident::new("aria", proc_macro2::Span::call_site()), None);
89
90    html_inner(options.tokens, Some(&extensions))
91}
92
93fn html_inner(
94    tokens: TokenStream2,
95    extensions: Option<&HashMap<Ident, Option<Type>>>,
96) -> TokenStream {
97    // from: https://html.spec.whatwg.org/dev/syntax.html#void-elements
98    let void_elements = [
99        "area", "base", "br", "col", "embed", "hr", "img", "input", "link", "meta", "source",
100        "track", "wbr",
101    ]
102    .into_iter()
103    .collect::<HashSet<_>>();
104
105    // from: https://html.spec.whatwg.org/dev/syntax.html#raw-text-elements
106    let raw_text_elements = ["script", "style"].into_iter().collect();
107
108    let config = ParserConfig::new()
109        .recover_block(true)
110        .always_self_closed_elements(void_elements.clone())
111        .raw_text_elements(raw_text_elements);
112
113    let parser = Parser::new(config);
114    let (parsed_nodes, parsing_diagnostics) = parser.parse_recoverable(tokens).split_vec();
115    let (tokenized_nodes, tokenization_diagnostics) =
116        tokenize_nodes(&void_elements, extensions, &parsed_nodes);
117
118    let node = match &*tokenized_nodes {
119        [node] => quote!(#node),
120        nodes => {
121            quote! {
122                ::html_node::Node::Fragment(
123                    ::html_node::Fragment {
124                        children: ::std::vec![#(#nodes),*],
125                    }
126                )
127            }
128        }
129    };
130
131    let errors = parsing_diagnostics
132        .into_iter()
133        .chain(tokenization_diagnostics)
134        .map(Diagnostic::emit_as_expr_tokens);
135
136    quote! {
137        {
138            #(#errors;)*
139            #node
140        }
141    }
142    .into()
143}
144
145fn tokenize_nodes(
146    void_elements: &HashSet<&str>,
147    extensions: Option<&HashMap<Ident, Option<Type>>>,
148    nodes: &[Node],
149) -> (Vec<TokenStream2>, Vec<Diagnostic>) {
150    let (token_streams, diagnostics) = nodes
151        .iter()
152        .map(|node| match node {
153            Node::Comment(comment) => (handle_comment(comment), vec![]),
154            Node::Doctype(doctype) => (handle_doctype(doctype), vec![]),
155            Node::Fragment(fragment) => handle_fragment(void_elements, extensions, fragment),
156            Node::Element(element) => handle_element(void_elements, extensions, element),
157            Node::Block(block) => (handle_block(block), vec![]),
158            Node::Text(text) => (handle_text(text), vec![]),
159            Node::RawText(text) => (handle_raw_text(text), vec![]),
160        })
161        .unzip::<_, _, Vec<_>, Vec<_>>();
162
163    let diagnostics = diagnostics.into_iter().flatten().collect();
164
165    (token_streams, diagnostics)
166}