1extern crate alloc;
16extern crate proc_macro;
17
18mod attr;
19mod trait_bounds;
20mod utils;
21
22use self::attr::{
23 Attributes,
24 CaptureDocsAttr,
25 CratePathAttr,
26};
27use proc_macro::TokenStream;
28use proc_macro2::{
29 Span,
30 TokenStream as TokenStream2,
31};
32use quote::quote;
33use syn::{
34 parse::{
35 Error,
36 Result,
37 },
38 parse_quote,
39 punctuated::Punctuated,
40 token::Comma,
41 visit_mut::VisitMut,
42 Data,
43 DataEnum,
44 DataStruct,
45 DeriveInput,
46 Field,
47 Fields,
48 Ident,
49 Lifetime,
50};
51
52#[proc_macro_derive(TypeInfo, attributes(scale_info, codec))]
53pub fn type_info(input: TokenStream) -> TokenStream {
54 match generate(input.into()) {
55 Ok(output) => output.into(),
56 Err(err) => err.to_compile_error().into(),
57 }
58}
59
60fn generate(input: TokenStream2) -> Result<TokenStream2> {
61 let type_info_impl = TypeInfoImpl::parse(input)?;
62 let type_info_impl_toks = type_info_impl.expand()?;
63 Ok(quote! {
64 #[allow(non_upper_case_globals, unused_attributes, unused_qualifications)]
65 const _: () = {
66 #type_info_impl_toks;
67 };
68 })
69}
70
71struct TypeInfoImpl {
72 ast: DeriveInput,
73 attrs: Attributes,
74}
75
76impl TypeInfoImpl {
77 fn parse(input: TokenStream2) -> Result<Self> {
78 let ast: DeriveInput = syn::parse2(input)?;
79 let attrs = attr::Attributes::from_ast(&ast)?;
80
81 Ok(Self { ast, attrs })
82 }
83
84 fn expand(&self) -> Result<TokenStream2> {
85 let ident = &self.ast.ident;
86 let scale_info = crate_path(self.attrs.crate_path())?;
87
88 let where_clause = trait_bounds::make_where_clause(
89 &self.attrs,
90 ident,
91 &self.ast.generics,
92 &self.ast.data,
93 &scale_info,
94 )?;
95
96 let (impl_generics, ty_generics, _) = self.ast.generics.split_for_impl();
97
98 let type_params = self.ast.generics.type_params().map(|tp| {
99 let ty_ident = &tp.ident;
100 let ty = if self.attrs.skip_type_params().map_or(true, |skip| !skip.skip(tp)) {
101 quote! { ::core::option::Option::Some(#scale_info::meta_type::<#ty_ident>()) }
102 } else {
103 quote! { ::core::option::Option::None }
104 };
105 quote! {
106 #scale_info::TypeParameter::new(::core::stringify!(#ty_ident), #ty)
107 }
108 });
109
110 let build_type = match &self.ast.data {
111 Data::Struct(ref s) => self.generate_composite_type(s, &scale_info),
112 Data::Enum(ref e) => self.generate_variant_type(e, &scale_info),
113 Data::Union(_) => {
114 return Err(Error::new_spanned(&self.ast, "Unions not supported"))
115 }
116 };
117
118 let field_types = match &self.ast.data {
119 Data::Struct(ref s) => self.generate_composite_field_types(s, &scale_info)?,
120 Data::Enum(ref e) => self.generate_variant_field_types(e, &scale_info)?,
121 Data::Union(_) => {
122 return Err(Error::new_spanned(&self.ast, "Unions not supported"))
123 }
124 };
125
126 let docs = self.generate_docs(&self.ast.attrs);
127
128 Ok(quote! {
129 impl #impl_generics #scale_info::TypeInfo for #ident #ty_generics #where_clause {
130 type Identity = Self;
131 fn type_info() -> #scale_info::Type {
132 # (# field_types ) *
133 return #scale_info::Type::builder()
134 .path(#scale_info::Path::new(::core::stringify!(#ident), ::core::module_path!()))
135 .type_params(#scale_info::prelude::vec![ #( #type_params ),* ])
136 #docs
137 .#build_type
138 }
139 }
140 })
141 }
142
143 fn generate_composite_type(
144 &self,
145 data_struct: &DataStruct,
146 scale_info: &syn::Path,
147 ) -> TokenStream2 {
148 let fields = match data_struct.fields {
149 Fields::Named(ref fs) => {
150 let fields = self.generate_fields(&fs.named);
151 quote! { named()#( #fields )* }
152 }
153 Fields::Unnamed(ref fs) => {
154 let fields = self.generate_fields(&fs.unnamed);
155 quote! { unnamed()#( #fields )* }
156 }
157 Fields::Unit => {
158 quote! {
159 unit()
160 }
161 }
162 };
163
164 quote! {
165 composite(#scale_info::build::Fields::#fields)
166 }
167 }
168
169 fn generate_composite_field_types(&self, data_struct: &DataStruct, scale_info: &syn::Path) -> Result<Vec<TokenStream2>> {
170 match data_struct.fields {
171 Fields::Named(ref fs) => {
172 Ok(self.generate_types(&fs.named, scale_info))
173 }
174 Fields::Unnamed(ref _fs) => {
175 Err(Error::new_spanned(&self.ast, "Unnamed struct not supported"))
176 }
177 Fields::Unit => {
178 Err(Error::new_spanned(&self.ast, "Unit struct not supported"))
179 }
180 }
181 }
182
183 fn generate_types(&self, fields: &Punctuated<Field, Comma>, scale_info: &syn::Path) -> Vec<TokenStream2> {
184 let fields_type = fields
185 .iter()
186 .filter(|f| {
187 !utils::should_skip(&f.attrs)
188 })
189 .map(|f| {
190 let ty = &f.ty;
191 match ty {
192 syn::Type::Path(type_path) => {
193 let path_seg = type_path.path.segments.last().unwrap();
194 let name = path_seg.ident.to_string();
195 if name == "Option" || name == "Vec" || name == "BinaryExtension" {
196 if let syn::PathArguments::AngleBracketed(x) = &type_path.path.segments.last().unwrap().arguments {
197 if let syn::GenericArgument::Type(ty) = &x.args.last().unwrap() {
198 return quote!{#scale_info::add_scale_type(#ty::type_info());};
199 } else {
200 return quote!{};
201 }
202 } else {
203 return quote!{};
204 }
205 } else {
206 return quote!{#scale_info::add_scale_type(#ty::type_info());};
207 }
208 }
209 syn::Type::Array(a) => {
210 let elem_type = &*a.elem;
211 return quote!{#scale_info::add_scale_type(#elem_type::type_info());};
212 }
213 _ => {
214 return quote!{}
215 }
216 }
217 })
218 .collect::<Vec<_>>();
219 return fields_type;
220 }
221
222 fn generate_fields(&self, fields: &Punctuated<Field, Comma>) -> Vec<TokenStream2> {
223 fields
224 .iter()
225 .filter(|f| !utils::should_skip(&f.attrs))
226 .map(|f| {
227 let (ty, ident) = (&f.ty, &f.ident);
228 struct StaticLifetimesReplace;
231 impl VisitMut for StaticLifetimesReplace {
232 fn visit_lifetime_mut(&mut self, lifetime: &mut Lifetime) {
233 *lifetime = parse_quote!('static)
234 }
235 }
236 let mut ty = match ty {
237 syn::Type::Group(group) => (*group.elem).clone(),
242 _ => ty.clone(),
243 };
244 StaticLifetimesReplace.visit_type_mut(&mut ty);
245
246 let type_name = clean_type_string("e!(#ty).to_string());
247 let docs = self.generate_docs(&f.attrs);
248 let type_of_method = if utils::is_compact(f) {
249 quote!(compact)
250 } else {
251 quote!(ty)
252 };
253 let name = if let Some(ident) = ident {
254 quote!(.name(::core::stringify!(#ident)))
255 } else {
256 quote!()
257 };
258 quote!(
259 .field(|f| f
260 .#type_of_method::<#ty>()
261 #name
262 .type_name(#type_name)
263 #docs
264 )
265 )
266 })
267 .collect()
268 }
269
270 fn generate_variant_type(
271 &self,
272 data_enum: &DataEnum,
273 scale_info: &syn::Path,
274 ) -> TokenStream2 {
275 let variants = &data_enum.variants;
276
277 let variants = variants
278 .into_iter()
279 .filter(|v| !utils::should_skip(&v.attrs))
280 .enumerate()
281 .map(|(i, v)| {
282 let ident = &v.ident;
283 let v_name = quote! {::core::stringify!(#ident) };
284 let docs = self.generate_docs(&v.attrs);
285 let index = utils::variant_index(v, i);
286
287 let fields = match v.fields {
288 Fields::Named(ref fs) => {
289 let fields = self.generate_fields(&fs.named);
290 Some(quote! {
291 .fields(#scale_info::build::Fields::named()
292 #( #fields )*
293 )
294 })
295 }
296 Fields::Unnamed(ref fs) => {
297 let fields = self.generate_fields(&fs.unnamed);
298 Some(quote! {
299 .fields(#scale_info::build::Fields::unnamed()
300 #( #fields )*
301 )
302 })
303 }
304 Fields::Unit => None,
305 };
306
307 quote! {
308 .variant(#v_name, |v|
309 v
310 .index(#index as ::core::primitive::u8)
311 #fields
312 #docs
313 )
314 }
315 });
316 quote! {
317 variant(
318 #scale_info::build::Variants::new()
319 #( #variants )*
320 )
321 }
322 }
323
324 fn generate_variant_field_types(
325 &self,
326 data_enum: &DataEnum,
327 scale_info: &syn::Path,
328 ) -> Result<Vec<TokenStream2>> {
329 let variants = &data_enum.variants;
330
331 let mut variant_types: Vec<TokenStream2> = Vec::new();
332 for v in variants {
333 match v.fields {
334 Fields::Named(ref fs) => {
335 if fs.named.len() != 1 {
336 return Err(Error::new_spanned(v, "variant has more than one type is not supported by ABI"));
337 }
338 let ty = &fs.named.first().unwrap().ty;
339 variant_types.push(quote!{#scale_info::add_scale_type(#ty::type_info());});
340 }
341 Fields::Unnamed(ref fs) => {
342 if fs.unnamed.len() != 1 {
343 return Err(Error::new_spanned(v, "variant has more than one type is not supported by ABI"));
344 }
345 let ty = &fs.unnamed.first().unwrap().ty;
346 variant_types.push(quote!{#scale_info::add_scale_type(#ty::type_info());});
347 }
348 Fields::Unit => {
349 }
351 }
352 }
353 return Ok(variant_types);
354 }
355
356 fn generate_docs(&self, attrs: &[syn::Attribute]) -> Option<TokenStream2> {
357 let docs_builder_fn = match self.attrs.capture_docs() {
358 CaptureDocsAttr::Never => None, CaptureDocsAttr::Default => Some(quote!(docs)),
360 CaptureDocsAttr::Always => Some(quote!(docs_always)),
361 }?;
362
363 let docs = attrs
364 .iter()
365 .filter_map(|attr| {
366 if let Ok(syn::Meta::NameValue(meta)) = attr.parse_meta() {
367 if meta.path.get_ident().map_or(false, |ident| ident == "doc") {
368 if let syn::Lit::Str(lit) = &meta.lit {
369 let lit_value = lit.value();
370 let stripped =
371 lit_value.strip_prefix(' ').unwrap_or(&lit_value);
372 let lit: syn::Lit = parse_quote!(#stripped);
373 Some(lit)
374 } else {
375 None
376 }
377 } else {
378 None
379 }
380 } else {
381 None
382 }
383 })
384 .collect::<Vec<_>>();
385
386 Some(quote! {
387 .#docs_builder_fn(&[ #( #docs ),* ])
388 })
389 }
390}
391
392fn crate_name_path(name: &str) -> Result<syn::Path> {
394 proc_macro_crate::crate_name(name)
395 .map(|crate_name| {
396 use proc_macro_crate::FoundCrate::*;
397 match crate_name {
398 Itself => Ident::new("self", Span::call_site()).into(),
399 Name(name) => {
400 let crate_ident = Ident::new(&name, Span::call_site());
401 parse_quote!( ::#crate_ident )
402 }
403 }
404 })
405 .map_err(|e| syn::Error::new(Span::call_site(), &e))
406}
407
408fn crate_path(crate_path_attr: Option<&CratePathAttr>) -> Result<syn::Path> {
409 crate_path_attr
410 .map(|path_attr| Ok(path_attr.path().clone()))
411 .unwrap_or_else(|| crate_name_path("eosio-scale-info"))
412}
413
414fn clean_type_string(input: &str) -> String {
415 input
416 .replace(" ::", "::")
417 .replace(":: ", "::")
418 .replace(" ,", ",")
419 .replace(" ;", ";")
420 .replace(" [", "[")
421 .replace("[ ", "[")
422 .replace(" ]", "]")
423 .replace(" (", "(")
424 .replace(",(", ", (")
426 .replace("( ", "(")
427 .replace(" )", ")")
428 .replace(" <", "<")
429 .replace("< ", "<")
430 .replace(" >", ">")
431 .replace("& \'", "&'")
432}