1#![recursion_limit = "128"]
2#![cfg_attr(can_show_location_of_runtime_parse_error, feature(proc_macro_span))]
3
4extern crate proc_macro;
5
6use proc_macro::TokenStream;
7
8mod config;
9mod declare;
10mod error;
11mod html;
12mod ident;
13mod lexer;
14mod map;
15mod parser;
16mod span;
17
18#[proc_macro]
24pub fn html(input: TokenStream) -> TokenStream {
25 let stream = lexer::unroll_stream(input.into(), false);
26 let result = html::expand_html(&stream);
27 TokenStream::from(match result {
28 Err(err) => error::parse_error(&stream, &err),
29 Ok((node, ty)) => match node.into_token_stream(&ty) {
30 Err(err) => err,
31 Ok(success) => success,
32 },
33 })
34}
35
36#[cfg(feature = "dodrio")]
42#[proc_macro]
43pub fn dodrio(input: TokenStream) -> TokenStream {
44 let stream = lexer::unroll_stream(input.into(), false);
45 let result = html::expand_dodrio(&stream);
46 TokenStream::from(match result {
47 Err(err) => error::parse_error(&stream, &err),
48 Ok((bump, node)) => match node.into_dodrio_token_stream(&bump, false) {
49 Err(err) => err,
50 Ok(success) => success,
52 },
53 })
54}
55
56#[proc_macro]
59pub fn declare_elements(input: TokenStream) -> TokenStream {
60 let stream = lexer::keywordise(lexer::unroll_stream(input.into(), true));
61 let result = declare::expand_declare(&stream);
62 TokenStream::from(match result {
63 Err(err) => error::parse_error(&stream, &err),
64 Ok(decls) => {
65 let mut out = proc_macro2::TokenStream::new();
66 for decl in decls {
67 out.extend(decl.into_token_stream());
68 }
69 out
70 }
71 })
72}