1use crate::{Attribute, AttributeInner, FacetInner, IParse, Ident, ToTokenIter, TokenStream};
20use quote::quote;
21
22pub fn extract_derive_plugins(attrs: &[Attribute]) -> Vec<String> {
26 let mut plugins = Vec::new();
27
28 for attr in attrs {
29 if let AttributeInner::Facet(facet_attr) = &attr.body.content {
30 for inner in facet_attr.inner.content.iter().map(|d| &d.value) {
31 if let FacetInner::Simple(simple) = inner
32 && simple.key == "derive"
33 {
34 if let Some(ref args) = simple.args {
36 match args {
37 crate::AttrArgs::Parens(parens) => {
38 let content = &parens.content;
40 for token in content.clone() {
41 if let proc_macro2::TokenTree::Ident(ident) = token {
42 plugins.push(ident.to_string());
43 }
44 }
45 }
46 crate::AttrArgs::Equals(_) => {
47 }
49 }
50 }
51 }
52 }
53 }
54 }
55
56 plugins
57}
58
59pub fn plugin_to_crate_path(plugin_name: &str) -> TokenStream {
64 let snake_case = to_snake_case(plugin_name);
66 let crate_name = format!("facet_{snake_case}");
67 let crate_ident = quote::format_ident!("{}", crate_name);
68 quote! { ::#crate_ident }
69}
70
71fn to_snake_case(s: &str) -> String {
73 let mut result = String::new();
74 for (i, c) in s.chars().enumerate() {
75 if c.is_uppercase() {
76 if i > 0 {
77 result.push('_');
78 }
79 result.push(c.to_ascii_lowercase());
80 } else {
81 result.push(c);
82 }
83 }
84 result
85}
86
87fn strip_derive_attrs(tokens: TokenStream) -> TokenStream {
98 let mut result = TokenStream::new();
99 let mut iter = tokens.into_iter().peekable();
100
101 while let Some(tt) = iter.next() {
102 if let proc_macro2::TokenTree::Punct(p) = &tt
104 && p.as_char() == '#'
105 && let Some(proc_macro2::TokenTree::Group(g)) = iter.peek()
106 && g.delimiter() == proc_macro2::Delimiter::Bracket
107 {
108 let inner = g.stream();
110 if is_plugin_attr(&inner) {
111 iter.next(); continue;
114 }
115 }
116 result.extend(std::iter::once(tt));
117 }
118
119 result
120}
121
122fn is_plugin_attr(inner: &TokenStream) -> bool {
130 let mut iter = inner.clone().into_iter();
131
132 if let Some(proc_macro2::TokenTree::Ident(id)) = iter.next() {
134 if id != "facet" {
135 return false;
136 }
137 } else {
138 return false;
139 }
140
141 if let Some(proc_macro2::TokenTree::Group(g)) = iter.next() {
143 if g.delimiter() != proc_macro2::Delimiter::Parenthesis {
144 return false;
145 }
146
147 let content = g.stream();
148 let mut content_iter = content.into_iter();
149
150 if let Some(proc_macro2::TokenTree::Ident(id)) = content_iter.next() {
152 let first = id.to_string();
153
154 if first == "derive" {
156 return true;
157 }
158
159 if let Some(proc_macro2::TokenTree::Punct(p)) = content_iter.next()
161 && p.as_char() == ':'
162 && let Some(proc_macro2::TokenTree::Punct(p2)) = content_iter.next()
163 && p2.as_char() == ':'
164 {
165 return true;
167 }
168 }
169 }
170
171 false
172}
173
174#[deprecated(note = "use is_plugin_attr instead")]
176#[allow(dead_code)]
177fn is_facet_derive_attr(inner: &TokenStream) -> bool {
178 is_plugin_attr(inner)
179}
180
181pub fn generate_plugin_chain(
186 input_tokens: &TokenStream,
187 plugins: &[String],
188 facet_crate: &TokenStream,
189) -> Option<TokenStream> {
190 if plugins.is_empty() {
191 return None;
192 }
193
194 let plugin_paths: Vec<TokenStream> = plugins
197 .iter()
198 .map(|p| {
199 let crate_path = plugin_to_crate_path(p);
200 quote! { #crate_path::__facet_invoke }
201 })
202 .collect();
203
204 let first = &plugin_paths[0];
205 let rest: Vec<_> = plugin_paths[1..].iter().collect();
206
207 let remaining = if rest.is_empty() {
208 quote! {}
209 } else {
210 quote! { #(#rest),* }
211 };
212
213 Some(quote! {
214 #first! {
215 @tokens { #input_tokens }
216 @remaining { #remaining }
217 @plugins { }
218 @facet_crate { #facet_crate }
219 }
220 })
221}
222
223pub fn facet_finalize(input: TokenStream) -> TokenStream {
230 let mut iter = input.to_token_iter();
236
237 let mut tokens: Option<TokenStream> = None;
238 let mut plugins_section: Option<TokenStream> = None;
239 let mut facet_crate: Option<TokenStream> = None;
240
241 while let Ok(section) = iter.parse::<FinalizeSection>() {
243 match section.marker.name.to_string().as_str() {
244 "tokens" => {
245 tokens = Some(section.content.content);
246 }
247 "plugins" => {
248 plugins_section = Some(section.content.content);
249 }
250 "facet_crate" => {
251 facet_crate = Some(section.content.content);
252 }
253 other => {
254 let msg = format!("unknown section in __facet_finalize: @{other}");
255 return quote! { compile_error!(#msg); };
256 }
257 }
258 }
259
260 let tokens = match tokens {
261 Some(t) => t,
262 None => {
263 return quote! { compile_error!("__facet_finalize: missing @tokens section"); };
264 }
265 };
266
267 let facet_crate = facet_crate.unwrap_or_else(|| quote! { ::facet });
268
269 let filtered_tokens = strip_derive_attrs(tokens.clone());
271
272 let mut type_iter = filtered_tokens.clone().to_token_iter();
274 let facet_impl = match type_iter.parse::<crate::Cons<crate::AdtDecl, crate::EndOfStream>>() {
275 Ok(it) => match it.first {
276 crate::AdtDecl::Struct(parsed) => crate::process_struct::process_struct(parsed),
277 crate::AdtDecl::Enum(parsed) => crate::process_enum::process_enum(parsed),
278 },
279 Err(err) => {
280 let msg = format!("__facet_finalize: could not parse type: {err}");
281 return quote! { compile_error!(#msg); };
282 }
283 };
284
285 let plugin_impls = if let Some(plugins_tokens) = plugins_section {
287 extract_plugin_templates(plugins_tokens, &filtered_tokens, &facet_crate)
289 } else {
290 vec![]
291 };
292
293 quote! {
294 #facet_impl
295 #(#plugin_impls)*
296 }
297}
298
299struct PluginTemplate {
301 #[allow(dead_code)] name: String,
303 template: TokenStream,
304}
305
306fn extract_plugin_templates(
308 plugins_tokens: TokenStream,
309 type_tokens: &TokenStream,
310 facet_crate: &TokenStream,
311) -> Vec<TokenStream> {
312 let plugins = parse_plugin_sections(plugins_tokens);
314
315 let parsed_type = match facet_macro_parse::parse_type(type_tokens.clone()) {
317 Ok(ty) => ty,
318 Err(e) => {
319 let msg = format!("failed to parse type for plugin templates: {e}");
320 return vec![quote! { compile_error!(#msg); }];
321 }
322 };
323
324 plugins
326 .into_iter()
327 .map(|plugin| evaluate_template(plugin.template, &parsed_type, facet_crate))
328 .collect()
329}
330
331fn parse_plugin_sections(tokens: TokenStream) -> Vec<PluginTemplate> {
333 let mut plugins = Vec::new();
334 let mut iter = tokens.into_iter().peekable();
335
336 while let Some(tt) = iter.next() {
337 if let proc_macro2::TokenTree::Punct(p) = &tt
339 && p.as_char() == '@'
340 {
341 if let Some(proc_macro2::TokenTree::Ident(id)) = iter.peek()
343 && *id == "plugin"
344 {
345 iter.next(); if let Some(proc_macro2::TokenTree::Group(g)) = iter.next()
349 && g.delimiter() == proc_macro2::Delimiter::Brace
350 && let Some(plugin) = parse_plugin_content(g.stream())
351 {
352 plugins.push(plugin);
353 }
354 }
355 }
356 }
357
358 plugins
359}
360
361fn parse_plugin_content(tokens: TokenStream) -> Option<PluginTemplate> {
363 let mut name: Option<String> = None;
364 let mut template: Option<TokenStream> = None;
365 let mut iter = tokens.into_iter().peekable();
366
367 while let Some(tt) = iter.next() {
368 if let proc_macro2::TokenTree::Punct(p) = &tt
369 && p.as_char() == '@'
370 && let Some(proc_macro2::TokenTree::Ident(id)) = iter.peek()
371 {
372 let key = id.to_string();
373 iter.next(); if let Some(proc_macro2::TokenTree::Group(g)) = iter.next()
377 && g.delimiter() == proc_macro2::Delimiter::Brace
378 {
379 match key.as_str() {
380 "name" => {
381 let content = g.stream().into_iter().collect::<Vec<_>>();
383 if let Some(proc_macro2::TokenTree::Literal(lit)) = content.first() {
384 let s = lit.to_string();
385 name = Some(s.trim_matches('"').to_string());
386 }
387 }
388 "template" => {
389 template = Some(g.stream());
390 }
391 _ => {}
392 }
393 }
394 }
395 }
396
397 match (name, template) {
398 (Some(n), Some(t)) => Some(PluginTemplate {
399 name: n,
400 template: t,
401 }),
402 _ => None,
403 }
404}
405
406fn evaluate_template(
408 template: TokenStream,
409 parsed_type: &facet_macro_parse::PType,
410 _facet_crate: &TokenStream,
411) -> TokenStream {
412 let mut evaluator = TemplateEvaluator {
413 parsed_type,
414 output: TokenStream::new(),
415 };
416 evaluator.evaluate(template);
417 evaluator.output
418}
419
420struct TemplateEvaluator<'a> {
422 parsed_type: &'a facet_macro_parse::PType,
423 output: TokenStream,
424}
425
426impl<'a> TemplateEvaluator<'a> {
427 fn evaluate(&mut self, template: TokenStream) {
429 let mut iter = template.into_iter().peekable();
430
431 while let Some(tt) = iter.next() {
432 match &tt {
433 proc_macro2::TokenTree::Punct(p) if p.as_char() == '@' => {
434 if let Some(next) = iter.next() {
436 match &next {
437 proc_macro2::TokenTree::Ident(id) => {
438 let directive = id.to_string();
439
440 match directive.as_str() {
441 "Self" => self.emit_self_type(),
442 "for_variant" => self.handle_for_variant(&mut iter),
443 "if_has_source_field" => {
444 self.handle_if_has_source_field(&mut iter)
445 }
446 "if_has_from_field" => self.handle_if_has_from_field(&mut iter),
447 "variant_name" => { }
449 "variant_pattern" => { }
451 "format_doc_comment" => { }
453 "source_pattern" => { }
455 "source_expr" => { }
456 "from_field_type" => { }
458 _ => {
459 self.output.extend(std::iter::once(tt));
461 self.output.extend(std::iter::once(next.clone()));
462 }
463 }
464 }
465 _ => {
466 self.output.extend(std::iter::once(tt));
468 self.output.extend(std::iter::once(next.clone()));
469 }
470 }
471 } else {
472 self.output.extend(std::iter::once(tt));
474 }
475 }
476 proc_macro2::TokenTree::Group(g) => {
477 let evaluated = {
479 let mut evaluator = TemplateEvaluator {
480 parsed_type: self.parsed_type,
481 output: TokenStream::new(),
482 };
483 evaluator.evaluate(g.stream());
484 evaluator.output
485 };
486 let new_group = proc_macro2::Group::new(g.delimiter(), evaluated);
487 self.output
488 .extend(std::iter::once(proc_macro2::TokenTree::Group(new_group)));
489 }
490 _ => {
491 self.output.extend(std::iter::once(tt));
493 }
494 }
495 }
496 }
497
498 fn emit_self_type(&mut self) {
499 let name = self.parsed_type.name();
500 self.output.extend(quote! { #name });
501 }
502
503 fn handle_for_variant(
504 &mut self,
505 iter: &mut std::iter::Peekable<proc_macro2::token_stream::IntoIter>,
506 ) {
507 if let Some(proc_macro2::TokenTree::Group(g)) = iter.next()
509 && g.delimiter() == proc_macro2::Delimiter::Brace
510 {
511 let body = g.stream();
512
513 if let facet_macro_parse::PType::Enum(e) = self.parsed_type {
515 for variant in &e.variants {
516 let variant_code = self.evaluate_variant_body(body.clone(), variant);
518 self.output.extend(variant_code);
519 }
520 }
521 }
522 }
523
524 fn handle_if_has_source_field(
525 &mut self,
526 iter: &mut std::iter::Peekable<proc_macro2::token_stream::IntoIter>,
527 ) {
528 if let Some(proc_macro2::TokenTree::Group(_g)) = iter.next() {
531 }
533 }
534
535 fn handle_if_has_from_field(
536 &mut self,
537 iter: &mut std::iter::Peekable<proc_macro2::token_stream::IntoIter>,
538 ) {
539 if let Some(proc_macro2::TokenTree::Group(_g)) = iter.next() {
542 }
544 }
545
546 fn evaluate_variant_body(
547 &self,
548 body: TokenStream,
549 variant: &facet_macro_parse::PVariant,
550 ) -> TokenStream {
551 let mut output = TokenStream::new();
552 let mut iter = body.into_iter().peekable();
553
554 while let Some(tt) = iter.next() {
555 match &tt {
556 proc_macro2::TokenTree::Punct(p) if p.as_char() == '@' => {
557 if let Some(next) = iter.next() {
558 if let proc_macro2::TokenTree::Ident(id) = &next {
559 let directive = id.to_string();
560
561 match directive.as_str() {
562 "variant_name" => {
563 if let facet_macro_parse::IdentOrLiteral::Ident(name) =
564 &variant.name.raw
565 {
566 output.extend(quote! { #name });
567 }
568 }
569 "variant_pattern" => {
570 output.extend(self.make_variant_pattern(variant));
571 }
572 "format_doc_comment" => {
573 output.extend(self.make_format_doc_comment(variant));
574 }
575 "if_has_source_field" => {
576 if let Some(proc_macro2::TokenTree::Group(g)) = iter.next()
577 && self.variant_has_source_field(variant)
578 {
579 let evaluated =
580 self.evaluate_source_body(g.stream(), variant);
581 output.extend(evaluated);
582 }
583 }
584 "if_has_from_field" => {
585 if let Some(proc_macro2::TokenTree::Group(g)) = iter.next()
586 && self.variant_has_from_field(variant)
587 {
588 let evaluated =
589 self.evaluate_from_body(g.stream(), variant);
590 output.extend(evaluated);
591 }
592 }
593 _ => {
594 output.extend(std::iter::once(tt));
596 output.extend(std::iter::once(next.clone()));
597 }
598 }
599 } else {
600 output.extend(std::iter::once(tt));
602 output.extend(std::iter::once(next.clone()));
603 }
604 } else {
605 output.extend(std::iter::once(tt));
607 }
608 }
609 proc_macro2::TokenTree::Group(g) => {
610 let evaluated = self.evaluate_variant_body(g.stream(), variant);
612 let new_group = proc_macro2::Group::new(g.delimiter(), evaluated);
613 output.extend(std::iter::once(proc_macro2::TokenTree::Group(new_group)));
614 }
615 _ => {
616 output.extend(std::iter::once(tt));
617 }
618 }
619 }
620
621 output
622 }
623
624 fn make_variant_pattern(&self, variant: &facet_macro_parse::PVariant) -> TokenStream {
625 use facet_macro_parse::{IdentOrLiteral, PVariantKind};
626
627 match &variant.kind {
628 PVariantKind::Unit => quote! {},
629 PVariantKind::Tuple { fields } => {
630 let field_names: Vec<_> = (0..fields.len())
631 .map(|i| quote::format_ident!("v{}", i))
632 .collect();
633 quote! { ( #(#field_names),* ) }
634 }
635 PVariantKind::Struct { fields } => {
636 let field_names: Vec<_> = fields
637 .iter()
638 .filter_map(|f| {
639 if let IdentOrLiteral::Ident(id) = &f.name.raw {
640 Some(quote! { #id })
641 } else {
642 None
643 }
644 })
645 .collect();
646 quote! { { #(#field_names),* } }
647 }
648 }
649 }
650
651 fn make_format_doc_comment(&self, variant: &facet_macro_parse::PVariant) -> TokenStream {
652 use facet_macro_parse::PVariantKind;
653
654 let doc = variant.attrs.doc.join(" ").trim().to_string();
655 let format_str = if doc.is_empty() {
656 variant.name.effective.clone()
657 } else {
658 doc
659 };
660
661 match &variant.kind {
663 PVariantKind::Unit => {
664 quote! { #format_str }
665 }
666 PVariantKind::Tuple { fields } => {
667 if format_str.contains("{0}") {
668 let field_names: Vec<_> = (0..fields.len())
669 .map(|i| quote::format_ident!("v{}", i))
670 .collect();
671 quote! { #format_str, #(#field_names),* }
672 } else {
673 quote! { #format_str }
674 }
675 }
676 PVariantKind::Struct { fields: _ } => {
677 quote! { #format_str }
679 }
680 }
681 }
682
683 fn variant_has_source_field(&self, variant: &facet_macro_parse::PVariant) -> bool {
684 use facet_macro_parse::PVariantKind;
685
686 match &variant.kind {
690 PVariantKind::Tuple { fields } if fields.len() == 1 => {
691 variant.attrs.facet.iter().any(|attr| {
692 if let Some(ns) = &attr.ns {
693 *ns == "error" && (attr.key == "source" || attr.key == "from")
694 } else {
695 false
696 }
697 })
698 }
699 PVariantKind::Struct { fields: _ } => {
700 false
703 }
704 _ => false,
705 }
706 }
707
708 fn variant_has_from_field(&self, variant: &facet_macro_parse::PVariant) -> bool {
709 use facet_macro_parse::PVariantKind;
710
711 matches!(&variant.kind, PVariantKind::Tuple { fields } if fields.len() == 1)
714 && variant.attrs.facet.iter().any(|attr| {
715 if let Some(ns) = &attr.ns {
716 *ns == "error" && attr.key == "from"
717 } else {
718 false
719 }
720 })
721 }
722
723 fn evaluate_source_body(
724 &self,
725 body: TokenStream,
726 variant: &facet_macro_parse::PVariant,
727 ) -> TokenStream {
728 use facet_macro_parse::IdentOrLiteral;
729 let mut output = TokenStream::new();
730 let mut iter = body.into_iter();
731
732 while let Some(tt) = iter.next() {
733 match &tt {
734 proc_macro2::TokenTree::Punct(p) if p.as_char() == '@' => {
735 if let Some(next) = iter.next()
736 && let proc_macro2::TokenTree::Ident(id) = &next
737 {
738 match id.to_string().as_str() {
739 "Self" => {
740 let name = self.parsed_type.name();
741 output.extend(quote! { #name });
742 }
743 "variant_name" => {
744 if let IdentOrLiteral::Ident(name) = &variant.name.raw {
745 output.extend(quote! { #name });
746 }
747 }
748 "source_pattern" => {
749 output.extend(self.make_source_pattern(variant));
750 }
751 "source_expr" => {
752 output.extend(self.make_source_expr(variant));
753 }
754 _ => {
755 output.extend(std::iter::once(tt));
756 output.extend(std::iter::once(next.clone()));
757 }
758 }
759 }
760 }
761 proc_macro2::TokenTree::Group(g) => {
762 let evaluated = self.evaluate_source_body(g.stream(), variant);
763 let new_group = proc_macro2::Group::new(g.delimiter(), evaluated);
764 output.extend(std::iter::once(proc_macro2::TokenTree::Group(new_group)));
765 }
766 _ => {
767 output.extend(std::iter::once(tt));
768 }
769 }
770 }
771
772 output
773 }
774
775 #[allow(clippy::only_used_in_recursion)]
776 fn evaluate_from_body(
777 &self,
778 body: TokenStream,
779 variant: &facet_macro_parse::PVariant,
780 ) -> TokenStream {
781 use facet_macro_parse::{IdentOrLiteral, PVariantKind};
782 let mut output = TokenStream::new();
783 let mut iter = body.into_iter();
784
785 while let Some(tt) = iter.next() {
786 match &tt {
787 proc_macro2::TokenTree::Punct(p) if p.as_char() == '@' => {
788 if let Some(next) = iter.next()
789 && let proc_macro2::TokenTree::Ident(id) = &next
790 {
791 match id.to_string().as_str() {
792 "Self" => {
793 let name = self.parsed_type.name();
794 output.extend(quote! { #name });
795 }
796 "from_field_type" => {
797 if let PVariantKind::Tuple { fields } = &variant.kind
798 && let Some(field) = fields.first()
799 {
800 let ty = &field.ty;
801 output.extend(quote! { #ty });
802 }
803 }
804 "variant_name" => {
805 if let IdentOrLiteral::Ident(name) = &variant.name.raw {
806 output.extend(quote! { #name });
807 }
808 }
809 _ => {
810 output.extend(std::iter::once(tt));
811 output.extend(std::iter::once(next.clone()));
812 }
813 }
814 }
815 }
816 proc_macro2::TokenTree::Group(g) => {
817 let evaluated = self.evaluate_from_body(g.stream(), variant);
818 let new_group = proc_macro2::Group::new(g.delimiter(), evaluated);
819 output.extend(std::iter::once(proc_macro2::TokenTree::Group(new_group)));
820 }
821 _ => {
822 output.extend(std::iter::once(tt));
823 }
824 }
825 }
826
827 output
828 }
829
830 fn make_source_pattern(&self, variant: &facet_macro_parse::PVariant) -> TokenStream {
831 use facet_macro_parse::PVariantKind;
832
833 match &variant.kind {
834 PVariantKind::Tuple { .. } => {
835 quote! { (e) }
837 }
838 PVariantKind::Struct { fields } => {
839 for field in fields {
841 if field.attrs.facet.iter().any(|attr| {
842 if let Some(ns) = &attr.ns {
843 *ns == "error" && (attr.key == "source" || attr.key == "from")
844 } else {
845 false
846 }
847 }) && let facet_macro_parse::IdentOrLiteral::Ident(name) = &field.name.raw
848 {
849 return quote! { { #name, .. } };
850 }
851 }
852 quote! { { .. } }
853 }
854 _ => quote! {},
855 }
856 }
857
858 fn make_source_expr(&self, _variant: &facet_macro_parse::PVariant) -> TokenStream {
859 quote! { e }
861 }
862}
863
864crate::unsynn! {
866 struct FinalizeSectionMarker {
868 _at: crate::At,
869 name: Ident,
870 }
871
872 struct FinalizeSection {
874 marker: FinalizeSectionMarker,
875 content: crate::BraceGroupContaining<TokenStream>,
876 }
877}
878
879#[cfg(test)]
880mod tests {
881 use super::*;
882 use crate::IParse;
883 use quote::quote;
884
885 #[test]
886 fn test_to_snake_case() {
887 assert_eq!(to_snake_case("Error"), "error");
888 assert_eq!(to_snake_case("Display"), "display");
889 assert_eq!(to_snake_case("PartialEq"), "partial_eq");
890 assert_eq!(to_snake_case("FromStr"), "from_str");
891 }
892
893 #[test]
894 fn test_extract_derive_plugins() {
895 let input = quote! {
896 #[derive(Facet, Debug)]
897 #[facet(derive(Error))]
898 #[repr(u8)]
899 pub enum MyError {
900 Disconnect(u32),
901 }
902 };
903
904 let mut iter = input.to_token_iter();
905 let parsed = iter.parse::<crate::Enum>().expect("Failed to parse enum");
906
907 let plugins = extract_derive_plugins(&parsed.attributes);
908 assert_eq!(plugins, vec!["Error"]);
909 }
910
911 #[test]
912 fn test_extract_multiple_plugins() {
913 let input = quote! {
914 #[facet(derive(Error, Display))]
915 pub enum MyError {
916 Unknown,
917 }
918 };
919
920 let mut iter = input.to_token_iter();
921 let parsed = iter.parse::<crate::Enum>().expect("Failed to parse enum");
922
923 let plugins = extract_derive_plugins(&parsed.attributes);
924 assert_eq!(plugins, vec!["Error", "Display"]);
925 }
926}