1#![recursion_limit = "128"]
2#![cfg_attr(can_show_location_of_runtime_parse_error, feature(proc_macro_span))]
3
4extern crate proc_macro;
5
6use proc_macro::TokenStream;
7use proc_macro_hack::proc_macro_hack;
8
9mod config;
10mod declare;
11mod error;
12mod html;
13mod ident;
14mod lexer;
15mod map;
16mod parser;
17mod span;
18
19#[proc_macro_hack]
25pub fn html(input: TokenStream) -> TokenStream {
26 let stream = lexer::unroll_stream(input.into(), false);
27 let result = html::expand_html(&stream);
28 TokenStream::from(match result {
29 Err(err) => error::parse_error(&stream, &err),
30 Ok((node, ty)) => match node.into_token_stream(&ty) {
31 Err(err) => err,
32 Ok(success) => success,
33 },
34 })
35}
36
37#[cfg(feature = "dodrio")]
43#[proc_macro_hack]
44pub fn dodrio(input: TokenStream) -> TokenStream {
45 let stream = lexer::unroll_stream(input.into(), false);
46 let result = html::expand_dodrio(&stream);
47 TokenStream::from(match result {
48 Err(err) => error::parse_error(&stream, &err),
49 Ok((bump, node)) => match node.into_dodrio_token_stream(&bump, false) {
50 Err(err) => err,
51 Ok(success) => success,
53 },
54 })
55}
56
57#[proc_macro]
60pub fn declare_elements(input: TokenStream) -> TokenStream {
61 let stream = lexer::keywordise(lexer::unroll_stream(input.into(), true));
62 let result = declare::expand_declare(&stream);
63 TokenStream::from(match result {
64 Err(err) => error::parse_error(&stream, &err),
65 Ok(decls) => {
66 let mut out = proc_macro2::TokenStream::new();
67 for decl in decls {
68 out.extend(decl.into_token_stream());
69 }
70 out
71 }
72 })
73}