1use std::str::FromStr;
4
5use heck::{AsLowerCamelCase, AsPascalCase, AsSnekCase};
6use proc_macro2::{Group, Ident, Span, TokenStream, TokenTree};
7
8use quote::ToTokens;
9use syn::{
10 Lit,
11 parse::{Nothing, Parse, ParseStream},
12};
13
14use tokel_engine::prelude::{Registry, Transformer};
15
16#[derive(Debug, Default, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
25pub struct Concatenate;
26
27impl Transformer for Concatenate {
28 fn transform(
29 &mut self,
30 input: TokenStream,
31 argument: TokenStream,
32 ) -> Result<TokenStream, syn::Error> {
33 let _: Nothing = syn::parse2(argument)?;
35
36 if input.is_empty() {
38 return Ok(input);
39 }
40
41 let mut concatenated_string = String::new();
42 let mut first_span = Span::call_site();
43 let mut is_first = true;
44
45 for tree in input {
46 if is_first {
47 first_span = tree.span();
48
49 is_first = false;
50 }
51
52 concatenated_string.push_str(&tree.to_string());
54 }
55
56 let parsed_ident = syn::parse_str::<Ident>(&concatenated_string).map_err(|_| {
60 syn::Error::new(
61 first_span,
62 format!("concatenated string `{concatenated_string}` is not a valid identifier"),
63 )
64 })?;
65
66 Ok(quote::quote_spanned!(first_span=> #parsed_ident))
67 }
68}
69
70#[derive(Debug, Default, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
76pub struct Case;
77
78impl Transformer for Case {
79 fn transform(&mut self, input: TokenStream, argument: TokenStream) -> syn::Result<TokenStream> {
80 #[derive(Debug, Copy, Clone)]
81 enum Target {
82 Pascal,
83 Camel,
84 Snake,
85 }
86
87 impl Parse for Target {
88 fn parse(input: ParseStream) -> syn::Result<Self> {
89 let case_ident = input.parse::<Ident>()?;
90
91 let _: Nothing = input.parse()?;
92
93 Ok(match case_ident.to_string().as_str() {
94 "pascal" => Self::Pascal,
95 "camel" => Self::Camel,
96 "snake" => Self::Snake,
97 _ => return Err(syn::Error::new_spanned(case_ident, "unsupported case")),
98 })
99 }
100 }
101
102 let target_case: Target = syn::parse2(argument)?;
103
104 fn apply_case(string: String, case: Target) -> String {
105 match case {
106 Target::Pascal => AsPascalCase(string).to_string(),
107 Target::Camel => AsLowerCamelCase(string).to_string(),
108 Target::Snake => AsSnekCase(string).to_string(),
109 }
110 }
111
112 fn apply(input: TokenStream, case: Target) -> syn::Result<TokenStream> {
113 input
114 .into_iter()
115 .try_fold(TokenStream::new(), |mut acc, target_tree| {
116 let target_output = match target_tree {
117 TokenTree::Literal(target_lit) => {
118 match syn::parse2::<Lit>(target_lit.into_token_stream())? {
119 Lit::Str(inner) => {
120 TokenStream::from_str(apply_case(inner.value(), case).as_str())?
121 }
122 Lit::Bool(lit) => TokenStream::from_str(
123 apply_case(lit.value.to_string(), case).as_str(),
124 )?,
125
126 lit @ _ => lit.into_token_stream(),
127 }
128 }
129 TokenTree::Ident(target_ident) => TokenStream::from_str(
130 apply_case(target_ident.to_string(), case).as_str(),
131 )?,
132 TokenTree::Group(group) => group
133 .stream()
134 .into_iter()
135 .map(|tree| apply(tree.into_token_stream(), case))
136 .try_fold(TokenStream::new(), |mut acc, result| {
137 result.map(|stream| {
138 acc.extend(stream);
139 acc
140 })
141 })
142 .map(|a| {
143 let mut new_group = Group::new(group.delimiter(), a);
144
145 new_group.set_span(group.span());
146
147 new_group
148 })
149 .map(TokenTree::Group)
150 .map(ToTokens::into_token_stream)?,
151
152 target_tree @ _ => target_tree.into_token_stream(),
153 };
154
155 acc.extend(target_output);
156
157 Ok(acc)
158 })
159 }
160
161 apply(input, target_case)
162 }
163}
164
165#[inline]
173pub fn register(registry: &mut Registry) -> Result<(), Box<dyn Transformer>> {
174 registry
175 .try_insert("concatenate", Concatenate)
176 .map_err(Box::new)
177 .map_err(|t| t as Box<dyn Transformer>)?;
178
179 registry
180 .try_insert("case", Case)
181 .map_err(Box::new)
182 .map_err(|t| t as Box<dyn Transformer>)?;
183
184 Ok(())
185}