syncdoc_core/
token_processors.rs

1use crate::{inject_doc_attr, syncdoc_impl};
2use proc_macro2::TokenStream;
3use unsynn::*;
4
5use crate::parse::{ImplBlockSig, ModuleContent, ModuleItem, ModuleSig, TraitSig};
6
7pub struct TokenProcessor {
8    input: TokenStream,
9    base_path: String,
10    cfg_attr: Option<String>,
11    context: Vec<String>,
12}
13
14impl TokenProcessor {
15    pub fn new(input: TokenStream, base_path: String, cfg_attr: Option<String>) -> Self {
16        Self {
17            input,
18            base_path,
19            cfg_attr,
20            context: Vec::new(),
21        }
22    }
23
24    pub fn process(self) -> TokenStream {
25        match self
26            .input
27            .clone()
28            .into_token_iter()
29            .parse::<ModuleContent>()
30        {
31            Ok(_parsed) => self.process_module_content(),
32            Err(_) => {
33                // Fallback: if declarative parsing fails, use original input
34                self.input
35            }
36        }
37    }
38
39    fn process_module_content(&self) -> TokenStream {
40        let mut output = TokenStream::new();
41
42        let content = match self
43            .input
44            .clone()
45            .into_token_iter()
46            .parse::<ModuleContent>()
47        {
48            Ok(c) => c,
49            Err(_) => return self.input.clone(),
50        };
51
52        for item in content.items.0 {
53            let processed_item = self.process_module_item(item.value);
54            output.extend(processed_item);
55        }
56
57        output
58    }
59
60    fn process_module_item(&self, item: ModuleItem) -> TokenStream {
61        match item {
62            ModuleItem::Function(func_sig) => {
63                let mut func_tokens = TokenStream::new();
64                quote::ToTokens::to_tokens(&func_sig, &mut func_tokens);
65                self.inject_doc_into_item(func_tokens, &func_sig.name.to_string())
66            }
67            ModuleItem::ImplBlock(impl_block) => self.process_impl_block(impl_block),
68            ModuleItem::Module(module) => self.process_module_block(module),
69            ModuleItem::Trait(trait_def) => self.process_trait_block(trait_def),
70            ModuleItem::Enum(enum_sig) => self.process_enum(enum_sig),
71            ModuleItem::Struct(struct_sig) => self.process_struct(struct_sig),
72            ModuleItem::TypeAlias(type_alias) => {
73                let mut alias_tokens = TokenStream::new();
74                quote::ToTokens::to_tokens(&type_alias, &mut alias_tokens);
75                self.inject_doc_into_simple_item(alias_tokens, &type_alias.name.to_string())
76            }
77            ModuleItem::Const(const_sig) => {
78                let mut const_tokens = TokenStream::new();
79                quote::ToTokens::to_tokens(&const_sig, &mut const_tokens);
80                self.inject_doc_into_simple_item(const_tokens, &const_sig.name.to_string())
81            }
82            ModuleItem::Static(static_sig) => {
83                let mut static_tokens = TokenStream::new();
84                quote::ToTokens::to_tokens(&static_sig, &mut static_tokens);
85                self.inject_doc_into_simple_item(static_tokens, &static_sig.name.to_string())
86            }
87            ModuleItem::Other(token) => {
88                let mut tokens = TokenStream::new();
89                token.to_tokens(&mut tokens);
90                tokens
91            }
92        }
93    }
94
95    fn process_impl_block(&self, impl_block: ImplBlockSig) -> TokenStream {
96        // Check if this is a trait impl (has "for" clause)
97        let context_path = if let Some(for_trait) = &impl_block.for_trait {
98            // This is "impl Trait for Type"
99            // target_type contains the TRAIT name (before "for")
100            let trait_name = extract_type_name(&impl_block.target_type);
101            // for_trait contains "for Type" - extract Type
102            let type_name = extract_first_ident_from_tokens(&for_trait.second);
103            // Context should be: Type/Trait
104            vec![type_name, trait_name]
105        } else {
106            // This is "impl Type"
107            // target_type is the type being implemented
108            let type_name = extract_type_name(&impl_block.target_type);
109            vec![type_name]
110        };
111
112        // Get the body content as TokenStream
113        let body_stream = {
114            let mut ts = TokenStream::new();
115            impl_block.body.to_tokens(&mut ts);
116            // Extract content from within braces
117            if let Some(proc_macro2::TokenTree::Group(group)) = ts.into_iter().next() {
118                group.stream()
119            } else {
120                TokenStream::new()
121            }
122        };
123
124        // Create new processor with updated context
125        let mut new_context = self.context.clone();
126        new_context.extend(context_path);
127        let new_processor = TokenProcessor {
128            input: body_stream,
129            base_path: self.base_path.clone(),
130            cfg_attr: self.cfg_attr.clone(),
131            context: new_context,
132        };
133
134        let processed_content = new_processor.process();
135        let processed_body = self.wrap_in_braces(processed_content);
136
137        // Reconstruct the impl block with processed body
138        let mut output = TokenStream::new();
139
140        if let Some(attrs) = impl_block.attributes {
141            for attr in attrs.0 {
142                attr.to_tokens(&mut output);
143            }
144        }
145
146        impl_block._impl.to_tokens(&mut output);
147        if let Some(generics) = impl_block.generics {
148            generics.to_tokens(&mut output);
149        }
150
151        for item in impl_block.target_type.0 {
152            item.value.second.to_tokens(&mut output);
153        }
154
155        if let Some(for_part) = impl_block.for_trait {
156            for_part.to_tokens(&mut output);
157        }
158
159        if let Some(where_clause) = impl_block.where_clause {
160            where_clause.to_tokens(&mut output);
161        }
162
163        output.extend(processed_body);
164
165        output
166    }
167
168    fn process_module_block(&self, module: ModuleSig) -> TokenStream {
169        // Get the body content as TokenStream
170        let body_stream = {
171            let mut ts = TokenStream::new();
172            module.body.to_tokens(&mut ts);
173            // Extract content from within braces
174            if let Some(proc_macro2::TokenTree::Group(group)) = ts.into_iter().next() {
175                group.stream()
176            } else {
177                TokenStream::new()
178            }
179        };
180
181        // Create new processor with updated context
182        let mut new_context = self.context.clone();
183        new_context.push(module.name.to_string());
184        let new_processor = TokenProcessor {
185            input: body_stream,
186            base_path: self.base_path.clone(),
187            cfg_attr: self.cfg_attr.clone(),
188            context: new_context,
189        };
190
191        let processed_content = new_processor.process();
192        let processed_body = self.wrap_in_braces(processed_content);
193
194        // Reconstruct the module with processed body
195        let mut output = TokenStream::new();
196
197        if let Some(attrs) = module.attributes {
198            for attr in attrs.0 {
199                attr.to_tokens(&mut output);
200            }
201        }
202
203        if let Some(vis) = module.visibility {
204            vis.to_tokens(&mut output);
205        }
206
207        module._mod.to_tokens(&mut output);
208        module.name.to_tokens(&mut output);
209
210        output.extend(processed_body);
211
212        output
213    }
214
215    fn process_trait_block(&self, trait_def: TraitSig) -> TokenStream {
216        // Get the body content as TokenStream
217        let body_stream = {
218            let mut ts = TokenStream::new();
219            trait_def.body.to_tokens(&mut ts);
220            // Extract content from within braces
221            if let Some(proc_macro2::TokenTree::Group(group)) = ts.into_iter().next() {
222                group.stream()
223            } else {
224                TokenStream::new()
225            }
226        };
227
228        // Create new processor with updated context (traits behave like modules)
229        let mut new_context = self.context.clone();
230        new_context.push(trait_def.name.to_string());
231        let new_processor = TokenProcessor {
232            input: body_stream,
233            base_path: self.base_path.clone(),
234            cfg_attr: self.cfg_attr.clone(),
235            context: new_context,
236        };
237
238        let processed_content = new_processor.process();
239        let processed_body = self.wrap_in_braces(processed_content);
240
241        // Reconstruct the trait with processed body
242        let mut output = TokenStream::new();
243
244        if let Some(attrs) = trait_def.attributes {
245            for attr in attrs.0 {
246                attr.to_tokens(&mut output);
247            }
248        }
249
250        if let Some(vis) = trait_def.visibility {
251            vis.to_tokens(&mut output);
252        }
253
254        if let Some(unsafe_kw) = trait_def.unsafe_kw {
255            unsafe_kw.to_tokens(&mut output);
256        }
257
258        trait_def._trait.to_tokens(&mut output);
259        trait_def.name.to_tokens(&mut output);
260
261        if let Some(generics) = trait_def.generics {
262            generics.to_tokens(&mut output);
263        }
264
265        if let Some(bounds) = trait_def.bounds {
266            bounds.to_tokens(&mut output);
267        }
268
269        if let Some(where_clause) = trait_def.where_clause {
270            where_clause.to_tokens(&mut output);
271        }
272
273        // Inject doc for the trait itself
274        let trait_name = trait_def.name.to_string();
275        let trait_with_doc = self.inject_doc_into_simple_item(output, &trait_name);
276
277        // Combine with processed body
278        let mut final_output = trait_with_doc;
279        final_output.extend(processed_body);
280
281        final_output
282    }
283
284    fn process_struct(&self, struct_sig: crate::parse::StructSig) -> TokenStream {
285        let struct_name = struct_sig.name.to_string();
286
287        // Process the struct body to add docs to fields
288        let processed_body = match &struct_sig.body {
289            crate::parse::StructBody::Named(brace_group) => {
290                // Extract fields from brace group
291                let body_stream = {
292                    let mut ts = TokenStream::new();
293                    brace_group.to_tokens(&mut ts);
294                    // Extract content from within braces
295                    if let Some(proc_macro2::TokenTree::Group(group)) = ts.into_iter().next() {
296                        group.stream()
297                    } else {
298                        TokenStream::new()
299                    }
300                };
301
302                let processed_fields = self.process_struct_fields(body_stream, &struct_name);
303                self.wrap_in_braces(processed_fields)
304            }
305            crate::parse::StructBody::Tuple(tuple) => {
306                // For tuple structs, process tuple fields
307                let mut ts = TokenStream::new();
308                tuple.to_tokens(&mut ts);
309                ts
310            }
311            crate::parse::StructBody::Unit(semi) => {
312                // Unit structs have no fields
313                let mut ts = TokenStream::new();
314                semi.to_tokens(&mut ts);
315                ts
316            }
317        };
318
319        // Reconstruct the struct with doc attribute
320        let mut output = TokenStream::new();
321
322        if let Some(attrs) = struct_sig.attributes {
323            for attr in attrs.0 {
324                attr.to_tokens(&mut output);
325            }
326        }
327
328        if let Some(vis) = struct_sig.visibility {
329            vis.to_tokens(&mut output);
330        }
331
332        struct_sig._struct.to_tokens(&mut output);
333
334        let name_ident = struct_sig.name;
335        name_ident.to_tokens(&mut output);
336
337        if let Some(generics) = struct_sig.generics {
338            generics.to_tokens(&mut output);
339        }
340
341        if let Some(where_clause) = struct_sig.where_clause {
342            where_clause.to_tokens(&mut output);
343        }
344
345        // Inject doc for the struct itself
346        let struct_with_doc = self.inject_doc_into_simple_item(output, &struct_name);
347
348        // Combine struct declaration with processed body
349        let mut final_output = struct_with_doc;
350        final_output.extend(processed_body);
351
352        final_output
353    }
354
355    fn process_struct_fields(&self, fields_stream: TokenStream, struct_name: &str) -> TokenStream {
356        let mut output = TokenStream::new();
357
358        // Parse using StructField parser
359        let fields = match fields_stream
360            .into_token_iter()
361            .parse::<unsynn::CommaDelimitedVec<crate::parse::StructField>>()
362        {
363            Ok(fields) => fields,
364            Err(_) => return output, // Return empty if parsing fails
365        };
366
367        for (idx, field_delimited) in fields.0.iter().enumerate() {
368            let field = &field_delimited.value;
369            let field_name = field.name.to_string();
370
371            // Convert field back to tokens
372            let mut field_tokens = TokenStream::new();
373            quote::ToTokens::to_tokens(field, &mut field_tokens);
374
375            // Inject doc
376            let documented =
377                self.inject_doc_for_struct_field(field_tokens, struct_name, &field_name);
378            output.extend(documented);
379
380            // Add comma if not last field
381            if idx < fields.0.len() - 1 {
382                output.extend(quote::quote! { , });
383            }
384        }
385
386        output
387    }
388
389    fn inject_doc_for_struct_field(
390        &self,
391        field_tokens: TokenStream,
392        struct_name: &str,
393        field_name: &str,
394    ) -> TokenStream {
395        let mut path_parts = vec![self.base_path.clone()];
396        path_parts.extend(self.context.iter().cloned());
397        path_parts.push(format!("{}/{}.md", struct_name, field_name));
398
399        let full_path = path_parts.join("/");
400
401        // Use simpler injection for fields
402        inject_doc_attr(full_path, self.cfg_attr.clone(), field_tokens)
403    }
404
405    fn process_enum(&self, enum_sig: crate::parse::EnumSig) -> TokenStream {
406        let enum_name = enum_sig.name.to_string();
407
408        // Get the body content as TokenStream
409        let body_stream = {
410            let mut ts = TokenStream::new();
411            enum_sig.body.to_tokens(&mut ts);
412            // Extract content from within braces
413            if let Some(proc_macro2::TokenTree::Group(group)) = ts.into_iter().next() {
414                group.stream()
415            } else {
416                TokenStream::new()
417            }
418        };
419
420        // Process enum variants
421        let processed_variants = self.process_enum_variants(body_stream, &enum_name);
422        let processed_body = self.wrap_in_braces(processed_variants);
423
424        // Reconstruct the enum with doc attribute
425        let mut output = TokenStream::new();
426
427        if let Some(attrs) = enum_sig.attributes {
428            for attr in attrs.0 {
429                attr.to_tokens(&mut output);
430            }
431        }
432
433        if let Some(vis) = enum_sig.visibility {
434            vis.to_tokens(&mut output);
435        }
436
437        enum_sig._enum.to_tokens(&mut output);
438
439        let name_ident = enum_sig.name;
440        name_ident.to_tokens(&mut output);
441
442        if let Some(generics) = enum_sig.generics {
443            generics.to_tokens(&mut output);
444        }
445
446        if let Some(where_clause) = enum_sig.where_clause {
447            where_clause.to_tokens(&mut output);
448        }
449
450        // Inject doc for the enum itself using simpler method
451        let enum_with_doc = self.inject_doc_into_simple_item(output, &enum_name);
452
453        // Combine enum declaration with processed body
454        let mut final_output = enum_with_doc;
455        final_output.extend(processed_body);
456
457        final_output
458    }
459
460    fn process_enum_variants(&self, variants_stream: TokenStream, enum_name: &str) -> TokenStream {
461        let mut output = TokenStream::new();
462
463        // Parse using EnumVariant parser
464        let variants = match variants_stream
465            .into_token_iter()
466            .parse::<unsynn::CommaDelimitedVec<crate::parse::EnumVariant>>()
467        {
468            Ok(variants) => variants,
469            Err(_) => return output, // Return empty if parsing fails
470        };
471
472        for (idx, variant_delimited) in variants.0.iter().enumerate() {
473            let variant = &variant_delimited.value;
474            let variant_name = variant.name.to_string();
475
476            // Convert variant back to tokens
477            let mut variant_tokens = TokenStream::new();
478            quote::ToTokens::to_tokens(variant, &mut variant_tokens);
479
480            // Inject doc
481            let documented =
482                self.inject_doc_for_enum_variant(variant_tokens, enum_name, &variant_name);
483            output.extend(documented);
484
485            // Add comma if not last variant
486            if idx < variants.0.len() - 1 {
487                output.extend(quote::quote! { , });
488            }
489        }
490
491        output
492    }
493
494    fn inject_doc_for_enum_variant(
495        &self,
496        variant_tokens: TokenStream,
497        enum_name: &str,
498        variant_name: &str,
499    ) -> TokenStream {
500        let mut path_parts = vec![self.base_path.clone()];
501        path_parts.extend(self.context.iter().cloned());
502        path_parts.push(format!("{}/{}.md", enum_name, variant_name));
503
504        let full_path = path_parts.join("/");
505
506        // Use simpler injection for variants
507        inject_doc_attr(full_path, self.cfg_attr.clone(), variant_tokens)
508    }
509
510    fn wrap_in_braces(&self, content: TokenStream) -> TokenStream {
511        let mut output = TokenStream::new();
512        let group = proc_macro2::Group::new(proc_macro2::Delimiter::Brace, content);
513        output.extend(std::iter::once(proc_macro2::TokenTree::Group(group)));
514        output
515    }
516
517    fn inject_doc_into_item(&self, func_tokens: TokenStream, fn_name: &str) -> TokenStream {
518        // Construct the full path including context
519        let mut path_parts = vec![self.base_path.clone()];
520        path_parts.extend(self.context.iter().cloned());
521        path_parts.push(format!("{}.md", fn_name));
522
523        let full_path = path_parts.join("/");
524
525        // Create args token stream with the constructed path
526        let args = quote::quote! { path = #full_path };
527
528        match syncdoc_impl(args, func_tokens.clone()) {
529            Ok(instrumented) => instrumented,
530            Err(e) => {
531                eprintln!("syncdoc_impl failed: {}", e);
532                func_tokens // fallback to original
533            }
534        }
535    }
536
537    fn inject_doc_into_simple_item(
538        &self,
539        item_tokens: TokenStream,
540        item_name: &str,
541    ) -> TokenStream {
542        // Construct the full path including context
543        let mut path_parts = vec![self.base_path.clone()];
544        path_parts.extend(self.context.iter().cloned());
545        path_parts.push(format!("{}.md", item_name));
546
547        let full_path = path_parts.join("/");
548
549        // Use the simpler injection that doesn't parse
550        inject_doc_attr(full_path, self.cfg_attr.clone(), item_tokens)
551    }
552}
553
554fn extract_type_name(
555    target_type: &unsynn::Many<
556        unsynn::Cons<
557            unsynn::Except<unsynn::Either<crate::parse::KFor, unsynn::BraceGroup>>,
558            proc_macro2::TokenTree,
559        >,
560    >,
561) -> String {
562    // Extract just the type name from the target_type tokens
563    // This is a simplified version - for complex cases we might need more sophistication
564    if let Some(first) = target_type.0.first() {
565        if let proc_macro2::TokenTree::Ident(ident) = &first.value.second {
566            return ident.to_string();
567        }
568    }
569    "Unknown".to_string()
570}
571
572fn extract_first_ident_from_tokens(
573    tokens: &unsynn::Many<unsynn::Cons<unsynn::Except<unsynn::BraceGroup>, proc_macro2::TokenTree>>,
574) -> String {
575    if let Some(first) = tokens.0.first() {
576        if let proc_macro2::TokenTree::Ident(ident) = &first.value.second {
577            return ident.to_string();
578        }
579    }
580    "Unknown".to_string()
581}