syncdoc_core/
token_processors.rs

1use crate::omnidoc_impl;
2use proc_macro2::TokenStream;
3use unsynn::*;
4
5use crate::parse::{ImplBlockSig, ModuleContent, ModuleItem, ModuleSig, TraitSig};
6
7pub struct TokenProcessor {
8    input: TokenStream,
9    base_path: String,
10    cfg_attr: Option<String>,
11    context: Vec<String>,
12}
13
14impl TokenProcessor {
15    pub fn new(input: TokenStream, base_path: String, cfg_attr: Option<String>) -> Self {
16        Self {
17            input,
18            base_path,
19            cfg_attr,
20            context: Vec::new(),
21        }
22    }
23
24    pub fn process(self) -> TokenStream {
25        match self
26            .input
27            .clone()
28            .into_token_iter()
29            .parse::<ModuleContent>()
30        {
31            Ok(_parsed) => self.process_module_content(),
32            Err(_) => {
33                // Fallback: if declarative parsing fails, use original input
34                self.input
35            }
36        }
37    }
38
39    fn process_module_content(&self) -> TokenStream {
40        let mut output = TokenStream::new();
41
42        let content = match self
43            .input
44            .clone()
45            .into_token_iter()
46            .parse::<ModuleContent>()
47        {
48            Ok(c) => c,
49            Err(_) => return self.input.clone(),
50        };
51
52        for item in content.items.0 {
53            let processed_item = self.process_module_item(item.value);
54            output.extend(processed_item);
55        }
56
57        output
58    }
59
60    fn process_module_item(&self, item: ModuleItem) -> TokenStream {
61        match item {
62            ModuleItem::TraitMethod(method_sig) => {
63                let mut method_tokens = TokenStream::new();
64                quote::ToTokens::to_tokens(&method_sig, &mut method_tokens);
65                self.inject_doc_into_simple_item(method_tokens, &method_sig.name.to_string())
66            }
67            ModuleItem::Function(func_sig) => {
68                let mut func_tokens = TokenStream::new();
69                quote::ToTokens::to_tokens(&func_sig, &mut func_tokens);
70                self.inject_doc_into_simple_item(func_tokens, &func_sig.name.to_string())
71            }
72            ModuleItem::ImplBlock(impl_block) => self.process_impl_block(impl_block),
73            ModuleItem::Module(module) => self.process_module_block(module),
74            ModuleItem::Trait(trait_def) => self.process_trait_block(trait_def),
75            ModuleItem::Enum(enum_sig) => self.process_enum(enum_sig),
76            ModuleItem::Struct(struct_sig) => self.process_struct(struct_sig),
77            ModuleItem::TypeAlias(type_alias) => {
78                let mut alias_tokens = TokenStream::new();
79                quote::ToTokens::to_tokens(&type_alias, &mut alias_tokens);
80                self.inject_doc_into_simple_item(alias_tokens, &type_alias.name.to_string())
81            }
82            ModuleItem::Const(const_sig) => {
83                let mut const_tokens = TokenStream::new();
84                quote::ToTokens::to_tokens(&const_sig, &mut const_tokens);
85                self.inject_doc_into_simple_item(const_tokens, &const_sig.name.to_string())
86            }
87            ModuleItem::Static(static_sig) => {
88                let mut static_tokens = TokenStream::new();
89                quote::ToTokens::to_tokens(&static_sig, &mut static_tokens);
90                self.inject_doc_into_simple_item(static_tokens, &static_sig.name.to_string())
91            }
92            ModuleItem::Other(token) => {
93                let mut tokens = TokenStream::new();
94                token.to_tokens(&mut tokens);
95                tokens
96            }
97        }
98    }
99
100    fn process_impl_block(&self, impl_block: ImplBlockSig) -> TokenStream {
101        // Check if this is a trait impl (has "for" clause)
102        let context_path = if let Some(for_trait) = &impl_block.for_trait {
103            // This is "impl Trait for Type"
104            // target_type contains the TRAIT name (before "for")
105            let trait_name = extract_type_name(&impl_block.target_type);
106            // for_trait contains "for Type" - extract Type
107            let type_name = extract_first_ident_from_tokens(&for_trait.second);
108            // Context should be: Type/Trait
109            vec![type_name, trait_name]
110        } else {
111            // This is "impl Type"
112            // target_type is the type being implemented
113            let type_name = extract_type_name(&impl_block.target_type);
114            vec![type_name]
115        };
116
117        // Create new processor with updated context
118        let mut new_context = self.context.clone();
119        new_context.extend(context_path);
120
121        // Access parsed items directly
122        let module_content = &impl_block.items.content;
123
124        let new_processor = TokenProcessor {
125            input: TokenStream::new(),
126            base_path: self.base_path.clone(),
127            cfg_attr: self.cfg_attr.clone(),
128            context: new_context,
129        };
130
131        let mut processed_content = TokenStream::new();
132        for item_delimited in &module_content.items.0 {
133            processed_content
134                .extend(new_processor.process_module_item(item_delimited.value.clone()));
135        }
136
137        // Reconstruct impl block
138        let mut output = TokenStream::new();
139        if let Some(attrs) = impl_block.attributes {
140            for attr in attrs.0 {
141                attr.to_tokens(&mut output);
142            }
143        }
144        impl_block._impl.to_tokens(&mut output);
145        if let Some(generics) = impl_block.generics {
146            generics.to_tokens(&mut output);
147        }
148        for item in impl_block.target_type.0 {
149            item.value.second.to_tokens(&mut output);
150        }
151        if let Some(for_part) = impl_block.for_trait {
152            for_part.to_tokens(&mut output);
153        }
154        if let Some(where_clause) = impl_block.where_clause {
155            where_clause.to_tokens(&mut output);
156        }
157
158        // Wrap processed content in braces
159        let group = proc_macro2::Group::new(proc_macro2::Delimiter::Brace, processed_content);
160        output.extend(std::iter::once(proc_macro2::TokenTree::Group(group)));
161
162        output
163    }
164
165    fn process_module_block(&self, module: ModuleSig) -> TokenStream {
166        let mut new_context = self.context.clone();
167        new_context.push(module.name.to_string());
168
169        // Access parsed items directly
170        let module_content = &module.items.content;
171
172        let new_processor = TokenProcessor {
173            input: TokenStream::new(),
174            base_path: self.base_path.clone(),
175            cfg_attr: self.cfg_attr.clone(),
176            context: new_context,
177        };
178
179        let mut processed_content = TokenStream::new();
180        for item_delimited in &module_content.items.0 {
181            processed_content
182                .extend(new_processor.process_module_item(item_delimited.value.clone()));
183        }
184
185        // Reconstruct module
186        let mut output = TokenStream::new();
187        if let Some(attrs) = module.attributes {
188            for attr in attrs.0 {
189                attr.to_tokens(&mut output);
190            }
191        }
192        if let Some(vis) = module.visibility {
193            vis.to_tokens(&mut output);
194        }
195        module._mod.to_tokens(&mut output);
196        module.name.to_tokens(&mut output);
197
198        let group = proc_macro2::Group::new(proc_macro2::Delimiter::Brace, processed_content);
199        output.extend(std::iter::once(proc_macro2::TokenTree::Group(group)));
200
201        output
202    }
203
204    fn process_trait_block(&self, trait_def: TraitSig) -> TokenStream {
205        let mut new_context = self.context.clone();
206        new_context.push(trait_def.name.to_string());
207
208        // Access parsed items directly
209        let trait_content = &trait_def.items.content;
210
211        let new_processor = TokenProcessor {
212            input: TokenStream::new(),
213            base_path: self.base_path.clone(),
214            cfg_attr: self.cfg_attr.clone(),
215            context: new_context,
216        };
217
218        let mut processed_content = TokenStream::new();
219        for item_delimited in &trait_content.items.0 {
220            processed_content
221                .extend(new_processor.process_module_item(item_delimited.value.clone()));
222        }
223
224        // Inject doc for trait itself
225        let mut output = TokenStream::new();
226        if let Some(attrs) = trait_def.attributes {
227            for attr in attrs.0 {
228                attr.to_tokens(&mut output);
229            }
230        }
231        if let Some(vis) = trait_def.visibility {
232            vis.to_tokens(&mut output);
233        }
234        if let Some(unsafe_kw) = trait_def.unsafe_kw {
235            unsafe_kw.to_tokens(&mut output);
236        }
237        trait_def._trait.to_tokens(&mut output);
238        trait_def.name.to_tokens(&mut output);
239        if let Some(generics) = trait_def.generics {
240            generics.to_tokens(&mut output);
241        }
242        if let Some(bounds) = trait_def.bounds {
243            bounds.to_tokens(&mut output);
244        }
245        if let Some(where_clause) = trait_def.where_clause {
246            where_clause.to_tokens(&mut output);
247        }
248
249        let trait_name = trait_def.name.to_string();
250        let trait_with_doc = self.inject_doc_into_simple_item(output, &trait_name);
251
252        // Combine with processed body
253        let mut final_output = trait_with_doc;
254        let group = proc_macro2::Group::new(proc_macro2::Delimiter::Brace, processed_content);
255        final_output.extend(std::iter::once(proc_macro2::TokenTree::Group(group)));
256
257        final_output
258    }
259
260    fn process_struct(&self, struct_sig: crate::parse::StructSig) -> TokenStream {
261        let struct_name = struct_sig.name.to_string();
262
263        // Process struct body for named fields
264        let processed_body = match &struct_sig.body {
265            crate::parse::StructBody::Named(fields_containing) => {
266                if let Some(fields_cdv) = fields_containing.content.as_ref() {
267                    let processed_fields = self.process_struct_fields(fields_cdv, &struct_name);
268                    let group =
269                        proc_macro2::Group::new(proc_macro2::Delimiter::Brace, processed_fields);
270                    let mut ts = TokenStream::new();
271                    ts.extend(std::iter::once(proc_macro2::TokenTree::Group(group)));
272                    ts
273                } else {
274                    let mut ts = TokenStream::new();
275                    unsynn::ToTokens::to_tokens(fields_containing, &mut ts);
276                    ts
277                }
278            }
279            other => {
280                let mut ts = TokenStream::new();
281                quote::ToTokens::to_tokens(other, &mut ts);
282                ts
283            }
284        };
285
286        // Reconstruct struct
287        let mut output = TokenStream::new();
288        if let Some(attrs) = struct_sig.attributes {
289            for attr in attrs.0 {
290                attr.to_tokens(&mut output);
291            }
292        }
293        if let Some(vis) = struct_sig.visibility {
294            vis.to_tokens(&mut output);
295        }
296        struct_sig._struct.to_tokens(&mut output);
297        struct_sig.name.to_tokens(&mut output);
298        if let Some(generics) = struct_sig.generics {
299            generics.to_tokens(&mut output);
300        }
301        if let Some(where_clause) = struct_sig.where_clause {
302            where_clause.to_tokens(&mut output);
303        }
304
305        let struct_with_doc = self.inject_doc_into_simple_item(output, &struct_name);
306        let mut final_output = struct_with_doc;
307        final_output.extend(processed_body);
308
309        final_output
310    }
311
312    fn process_struct_fields(
313        &self,
314        fields_cdv: &CommaDelimitedVec<crate::parse::StructField>,
315        struct_name: &str,
316    ) -> TokenStream {
317        let mut output = TokenStream::new();
318
319        for (idx, field_delimited) in fields_cdv.0.iter().enumerate() {
320            let field = &field_delimited.value;
321            let field_name = field.name.to_string();
322
323            let mut field_tokens = TokenStream::new();
324            quote::ToTokens::to_tokens(field, &mut field_tokens);
325
326            let documented =
327                self.inject_doc_for_struct_field(field_tokens, struct_name, &field_name);
328            output.extend(documented);
329
330            if idx < fields_cdv.0.len() - 1 {
331                output.extend(quote::quote! { , });
332            }
333        }
334
335        output
336    }
337
338    fn inject_doc_for_struct_field(
339        &self,
340        field_tokens: TokenStream,
341        struct_name: &str,
342        field_name: &str,
343    ) -> TokenStream {
344        let mut path_parts = vec![self.base_path.clone()];
345        path_parts.extend(self.context.iter().cloned());
346        path_parts.push(format!("{}/{}.md", struct_name, field_name));
347
348        let full_path = path_parts.join("/");
349        omnidoc_impl(full_path, self.cfg_attr.clone(), field_tokens)
350    }
351
352    fn process_enum(&self, enum_sig: crate::parse::EnumSig) -> TokenStream {
353        let enum_name = enum_sig.name.to_string();
354
355        // Process enum variants
356        let processed_variants = if let Some(variants_cdv) = enum_sig.variants.content.as_ref() {
357            self.process_enum_variants(variants_cdv, &enum_name)
358        } else {
359            TokenStream::new()
360        };
361
362        // Reconstruct enum
363        let mut output = TokenStream::new();
364        if let Some(attrs) = enum_sig.attributes {
365            for attr in attrs.0 {
366                attr.to_tokens(&mut output);
367            }
368        }
369        if let Some(vis) = enum_sig.visibility {
370            vis.to_tokens(&mut output);
371        }
372        enum_sig._enum.to_tokens(&mut output);
373        enum_sig.name.to_tokens(&mut output);
374        if let Some(generics) = enum_sig.generics {
375            generics.to_tokens(&mut output);
376        }
377        if let Some(where_clause) = enum_sig.where_clause {
378            where_clause.to_tokens(&mut output);
379        }
380
381        let enum_with_doc = self.inject_doc_into_simple_item(output, &enum_name);
382        let mut final_output = enum_with_doc;
383        let group = proc_macro2::Group::new(proc_macro2::Delimiter::Brace, processed_variants);
384        final_output.extend(std::iter::once(proc_macro2::TokenTree::Group(group)));
385
386        final_output
387    }
388
389    fn process_enum_variants(
390        &self,
391        variants_cdv: &CommaDelimitedVec<crate::parse::EnumVariant>,
392        enum_name: &str,
393    ) -> TokenStream {
394        let mut output = TokenStream::new();
395
396        for (idx, variant_delimited) in variants_cdv.0.iter().enumerate() {
397            let variant = &variant_delimited.value;
398            let variant_name = variant.name.to_string();
399
400            // Check if this is a struct-valued variant
401            let documented = if let Some(crate::parse::EnumVariantData::Struct(fields_containing)) =
402                &variant.data
403            {
404                // Process struct-valued variant with fields
405                self.process_struct_valued_variant(
406                    variant,
407                    enum_name,
408                    &variant_name,
409                    fields_containing,
410                )
411            } else {
412                // Process simple variant (unit, tuple, or discriminant)
413                let mut variant_tokens = TokenStream::new();
414                quote::ToTokens::to_tokens(variant, &mut variant_tokens);
415                self.inject_doc_for_enum_variant(variant_tokens, enum_name, &variant_name)
416            };
417
418            output.extend(documented);
419
420            if idx < variants_cdv.0.len() - 1 {
421                output.extend(quote::quote! { , });
422            }
423        }
424
425        output
426    }
427
428    fn process_struct_valued_variant(
429        &self,
430        variant: &crate::parse::EnumVariant,
431        enum_name: &str,
432        variant_name: &str,
433        fields_containing: &BraceGroupContaining<
434            Option<CommaDelimitedVec<crate::parse::StructField>>,
435        >,
436    ) -> TokenStream {
437        // First, inject doc for the variant itself
438        let mut variant_header = TokenStream::new();
439
440        // Add variant attributes
441        if let Some(attrs) = &variant.attributes {
442            for attr in &attrs.0 {
443                attr.to_tokens(&mut variant_header);
444            }
445        }
446
447        // Add variant name
448        variant.name.to_tokens(&mut variant_header);
449
450        // Inject doc for variant name
451        let variant_with_doc =
452            self.inject_doc_for_enum_variant(variant_header, enum_name, variant_name);
453
454        // Now process the fields
455        let processed_fields = if let Some(fields_cdv) = fields_containing.content.as_ref() {
456            self.process_enum_variant_fields(fields_cdv, enum_name, variant_name)
457        } else {
458            TokenStream::new()
459        };
460
461        // Combine: variant_name { fields }
462        let mut output = variant_with_doc;
463        let group = proc_macro2::Group::new(proc_macro2::Delimiter::Brace, processed_fields);
464        output.extend(std::iter::once(proc_macro2::TokenTree::Group(group)));
465
466        output
467    }
468
469    fn process_enum_variant_fields(
470        &self,
471        fields_cdv: &CommaDelimitedVec<crate::parse::StructField>,
472        enum_name: &str,
473        variant_name: &str,
474    ) -> TokenStream {
475        let mut output = TokenStream::new();
476
477        for (idx, field_delimited) in fields_cdv.0.iter().enumerate() {
478            let field = &field_delimited.value;
479            let field_name = field.name.to_string();
480
481            let mut field_tokens = TokenStream::new();
482            quote::ToTokens::to_tokens(field, &mut field_tokens);
483
484            let documented = self.inject_doc_for_enum_variant_field(
485                field_tokens,
486                enum_name,
487                variant_name,
488                &field_name,
489            );
490            output.extend(documented);
491
492            if idx < fields_cdv.0.len() - 1 {
493                output.extend(quote::quote! { , });
494            }
495        }
496
497        output
498    }
499
500    fn inject_doc_for_enum_variant_field(
501        &self,
502        field_tokens: TokenStream,
503        enum_name: &str,
504        variant_name: &str,
505        field_name: &str,
506    ) -> TokenStream {
507        let mut path_parts = vec![self.base_path.clone()];
508        path_parts.extend(self.context.iter().cloned());
509        // Path structure: EnumName/VariantName/field_name.md
510        path_parts.push(format!("{}/{}/{}.md", enum_name, variant_name, field_name));
511
512        let full_path = path_parts.join("/");
513        omnidoc_impl(full_path, self.cfg_attr.clone(), field_tokens)
514    }
515
516    fn inject_doc_for_enum_variant(
517        &self,
518        variant_tokens: TokenStream,
519        enum_name: &str,
520        variant_name: &str,
521    ) -> TokenStream {
522        let mut path_parts = vec![self.base_path.clone()];
523        path_parts.extend(self.context.iter().cloned());
524        path_parts.push(format!("{}/{}.md", enum_name, variant_name));
525
526        let full_path = path_parts.join("/");
527        omnidoc_impl(full_path, self.cfg_attr.clone(), variant_tokens)
528    }
529
530    fn inject_doc_into_simple_item(
531        &self,
532        item_tokens: TokenStream,
533        item_name: &str,
534    ) -> TokenStream {
535        let mut path_parts = vec![self.base_path.clone()];
536        path_parts.extend(self.context.iter().cloned());
537        path_parts.push(format!("{}.md", item_name));
538
539        let full_path = path_parts.join("/");
540        omnidoc_impl(full_path, self.cfg_attr.clone(), item_tokens)
541    }
542}
543
544fn extract_type_name(
545    target_type: &unsynn::Many<
546        unsynn::Cons<
547            unsynn::Except<unsynn::Either<crate::parse::KFor, unsynn::BraceGroup>>,
548            proc_macro2::TokenTree,
549        >,
550    >,
551) -> String {
552    if let Some(first) = target_type.0.first() {
553        if let proc_macro2::TokenTree::Ident(ident) = &first.value.second {
554            return ident.to_string();
555        }
556    }
557    "Unknown".to_string()
558}
559
560fn extract_first_ident_from_tokens(
561    tokens: &unsynn::Many<unsynn::Cons<unsynn::Except<unsynn::BraceGroup>, proc_macro2::TokenTree>>,
562) -> String {
563    if let Some(first) = tokens.0.first() {
564        if let proc_macro2::TokenTree::Ident(ident) = &first.value.second {
565            return ident.to_string();
566        }
567    }
568    "Unknown".to_string()
569}
570
571#[cfg(test)]
572#[path = "tests/tok_proc.rs"]
573mod tok_proc_tests;