1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
#![recursion_limit = "128"]
#![cfg_attr(can_show_location_of_runtime_parse_error, feature(proc_macro_span))]
extern crate proc_macro;
use proc_macro::TokenStream;
use proc_macro_hack::proc_macro_hack;
mod config;
mod declare;
mod error;
mod html;
mod ident;
mod lexer;
mod map;
mod parser;
mod span;
#[proc_macro_hack]
pub fn html(input: TokenStream) -> TokenStream {
let stream = lexer::unroll_stream(input.into(), false);
let result = html::expand_html(&stream);
TokenStream::from(match result {
Err(err) => error::parse_error(&stream, &err),
Ok((node, ty)) => match node.into_token_stream(&ty) {
Err(err) => err,
Ok(success) => success,
},
})
}
#[cfg(feature = "dodrio")]
#[proc_macro_hack]
pub fn dodrio(input: TokenStream) -> TokenStream {
let stream = lexer::unroll_stream(input.into(), false);
let result = html::expand_dodrio(&stream);
TokenStream::from(match result {
Err(err) => error::parse_error(&stream, &err),
Ok((bump, node)) => match node.into_dodrio_token_stream(&bump, false) {
Err(err) => err,
Ok(success) => success,
},
})
}
#[proc_macro]
pub fn declare_elements(input: TokenStream) -> TokenStream {
let stream = lexer::keywordise(lexer::unroll_stream(input.into(), true));
let result = declare::expand_declare(&stream);
TokenStream::from(match result {
Err(err) => error::parse_error(&stream, &err),
Ok(decls) => {
let mut out = proc_macro2::TokenStream::new();
for decl in decls {
out.extend(decl.into_token_stream());
}
out
}
})
}