1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
#![recursion_limit = "128"]
#![cfg_attr(can_show_location_of_runtime_parse_error, feature(proc_macro_span))]
extern crate proc_macro;
mod error;
mod rsx;
mod ident;
mod lexer;
mod map;
mod parser;
mod span;
use proc_macro::TokenStream;
use proc_macro2::{Ident, TokenStream as TokenStream2, Literal, Span};
use proc_macro_hack::proc_macro_hack;
use quote::quote;
use syn::{DeriveInput, parse_macro_input};
use alchemy_styles::cssparser::{Parser, ParserInput, RuleListParser};
use alchemy_styles::styles_parser::{Rule, RuleParser};
#[proc_macro_hack]
pub fn rsx(input: TokenStream) -> TokenStream {
let stream = lexer::unroll_stream(input.into(), false);
let result = rsx::expand_rsx(&stream);
TokenStream::from(match result {
Err(err) => error::parse_error(&stream, &err),
Ok((node, ty)) => match node.into_token_stream(&ty) {
Err(err) => err,
Ok(success) => success,
},
})
}
#[proc_macro_hack]
pub fn styles(input: TokenStream) -> TokenStream {
let s = input.to_string().replace(" ", "");
let mut input = ParserInput::new(&s);
let mut parser = Parser::new(&mut input);
let parsed: Vec<Rule> = RuleListParser::new_for_stylesheet(&mut parser, RuleParser {})
.collect::<Vec<_>>()
.into_iter()
.filter_map(|rule| {
rule.ok()
})
.collect();
let mut body = TokenStream2::new();
for rule in parsed {
let mut stream = TokenStream2::new();
for style in rule.styles {
stream.extend(quote!(#style,));
}
let key = Literal::string(&rule.key);
body.extend(quote!(styles.insert(#key, vec![#stream]);))
}
quote!(alchemy::StyleSheet::new({
use alchemy::style_attributes::*;
use alchemy::Color;
let mut styles = std::collections::HashMap::new();
#body
styles
})).into()
}
#[proc_macro_derive(Props)]
pub fn writable_props_derive(input: TokenStream) -> TokenStream {
let input = parse_macro_input!(input as DeriveInput);
let name = &input.ident;
let name_props = Ident::new(&format!("{}Props", name), Span::call_site());
let generics = input.generics;
let (impl_generics, ty_generics, where_clause) = generics.split_for_impl();
TokenStream::from(quote! {
impl #impl_generics #name #ty_generics #where_clause {
pub fn default_props() -> #name_props {
#name_props::default()
}
}
impl #impl_generics alchemy::ComponentProps for #name #ty_generics #where_clause {
fn set_props(&mut self, new_props: &mut std::any::Any) {
match new_props.downcast_ref::<#name_props>() {
Some(props) => { },
None => { panic!("Woah there, somehow the wrong props were being passed!"); }
}
}
}
})
}