syncdoc_core/
token_processors.rs

1use crate::{inject_doc_attr, syncdoc_impl};
2use proc_macro2::TokenStream;
3use unsynn::*;
4
5use crate::parse::{ImplBlockSig, ModuleContent, ModuleItem, ModuleSig, TraitSig};
6
7pub struct TokenProcessor {
8    input: TokenStream,
9    base_path: String,
10    cfg_attr: Option<String>,
11    context: Vec<String>,
12}
13
14impl TokenProcessor {
15    pub fn new(input: TokenStream, base_path: String, cfg_attr: Option<String>) -> Self {
16        Self {
17            input,
18            base_path,
19            cfg_attr,
20            context: Vec::new(),
21        }
22    }
23
24    pub fn process(self) -> TokenStream {
25        match self
26            .input
27            .clone()
28            .into_token_iter()
29            .parse::<ModuleContent>()
30        {
31            Ok(_parsed) => self.process_module_content(),
32            Err(_) => {
33                // Fallback: if declarative parsing fails, use original input
34                self.input
35            }
36        }
37    }
38
39    fn process_module_content(&self) -> TokenStream {
40        let mut output = TokenStream::new();
41
42        let content = match self
43            .input
44            .clone()
45            .into_token_iter()
46            .parse::<ModuleContent>()
47        {
48            Ok(c) => c,
49            Err(_) => return self.input.clone(),
50        };
51
52        for item in content.items.0 {
53            let processed_item = self.process_module_item(item.value);
54            output.extend(processed_item);
55        }
56
57        output
58    }
59
60    fn process_module_item(&self, item: ModuleItem) -> TokenStream {
61        match item {
62            ModuleItem::Function(func_sig) => {
63                let mut func_tokens = TokenStream::new();
64                quote::ToTokens::to_tokens(&func_sig, &mut func_tokens);
65                self.inject_doc_into_item(func_tokens, &func_sig.name.to_string())
66            }
67            ModuleItem::ImplBlock(impl_block) => self.process_impl_block(impl_block),
68            ModuleItem::Module(module) => self.process_module_block(module),
69            ModuleItem::Trait(trait_def) => self.process_trait_block(trait_def),
70            ModuleItem::Enum(enum_sig) => self.process_enum(enum_sig),
71            ModuleItem::Struct(struct_sig) => self.process_struct(struct_sig),
72            ModuleItem::TypeAlias(type_alias) => {
73                let mut alias_tokens = TokenStream::new();
74                quote::ToTokens::to_tokens(&type_alias, &mut alias_tokens);
75                self.inject_doc_into_simple_item(alias_tokens, &type_alias.name.to_string())
76            }
77            ModuleItem::Const(const_sig) => {
78                let mut const_tokens = TokenStream::new();
79                quote::ToTokens::to_tokens(&const_sig, &mut const_tokens);
80                self.inject_doc_into_simple_item(const_tokens, &const_sig.name.to_string())
81            }
82            ModuleItem::Static(static_sig) => {
83                let mut static_tokens = TokenStream::new();
84                quote::ToTokens::to_tokens(&static_sig, &mut static_tokens);
85                self.inject_doc_into_simple_item(static_tokens, &static_sig.name.to_string())
86            }
87            ModuleItem::Other(token) => {
88                let mut tokens = TokenStream::new();
89                token.to_tokens(&mut tokens);
90                tokens
91            }
92        }
93    }
94
95    fn process_impl_block(&self, impl_block: ImplBlockSig) -> TokenStream {
96        // Check if this is a trait impl (has "for" clause)
97        let context_path = if let Some(for_trait) = &impl_block.for_trait {
98            // This is "impl Trait for Type"
99            // target_type contains the TRAIT name (before "for")
100            let trait_name = extract_type_name(&impl_block.target_type);
101            // for_trait contains "for Type" - extract Type
102            let type_name = extract_first_ident_from_tokens(&for_trait.second);
103            // Context should be: Type/Trait
104            vec![type_name, trait_name]
105        } else {
106            // This is "impl Type"
107            // target_type is the type being implemented
108            let type_name = extract_type_name(&impl_block.target_type);
109            vec![type_name]
110        };
111
112        // Get the body content as TokenStream
113        let body_stream = {
114            let mut ts = TokenStream::new();
115            impl_block.body.to_tokens(&mut ts);
116            // Extract content from within braces
117            if let Some(proc_macro2::TokenTree::Group(group)) = ts.into_iter().next() {
118                group.stream()
119            } else {
120                TokenStream::new()
121            }
122        };
123
124        // Create new processor with updated context
125        let mut new_context = self.context.clone();
126        new_context.extend(context_path);
127        let new_processor = TokenProcessor {
128            input: body_stream,
129            base_path: self.base_path.clone(),
130            cfg_attr: self.cfg_attr.clone(),
131            context: new_context,
132        };
133
134        let processed_content = new_processor.process();
135        let processed_body = self.wrap_in_braces(processed_content);
136
137        // Reconstruct the impl block with processed body
138        let mut output = TokenStream::new();
139
140        if let Some(attrs) = impl_block.attributes {
141            for attr in attrs.0 {
142                attr.to_tokens(&mut output);
143            }
144        }
145
146        impl_block._impl.to_tokens(&mut output);
147        if let Some(generics) = impl_block.generics {
148            generics.to_tokens(&mut output);
149        }
150
151        for item in impl_block.target_type.0 {
152            item.value.second.to_tokens(&mut output);
153        }
154
155        if let Some(for_part) = impl_block.for_trait {
156            for_part.to_tokens(&mut output);
157        }
158
159        if let Some(where_clause) = impl_block.where_clause {
160            where_clause.to_tokens(&mut output);
161        }
162
163        output.extend(processed_body);
164
165        output
166    }
167
168    fn process_module_block(&self, module: ModuleSig) -> TokenStream {
169        // Get the body content as TokenStream
170        let body_stream = {
171            let mut ts = TokenStream::new();
172            module.body.to_tokens(&mut ts);
173            // Extract content from within braces
174            if let Some(proc_macro2::TokenTree::Group(group)) = ts.into_iter().next() {
175                group.stream()
176            } else {
177                TokenStream::new()
178            }
179        };
180
181        // Create new processor with updated context
182        let mut new_context = self.context.clone();
183        new_context.push(module.name.to_string());
184        let new_processor = TokenProcessor {
185            input: body_stream,
186            base_path: self.base_path.clone(),
187            cfg_attr: self.cfg_attr.clone(),
188            context: new_context,
189        };
190
191        let processed_content = new_processor.process();
192        let processed_body = self.wrap_in_braces(processed_content);
193
194        // Reconstruct the module with processed body
195        let mut output = TokenStream::new();
196
197        if let Some(attrs) = module.attributes {
198            for attr in attrs.0 {
199                attr.to_tokens(&mut output);
200            }
201        }
202
203        if let Some(vis) = module.visibility {
204            vis.to_tokens(&mut output);
205        }
206
207        module._mod.to_tokens(&mut output);
208        module.name.to_tokens(&mut output);
209
210        output.extend(processed_body);
211
212        output
213    }
214
215    fn process_trait_block(&self, trait_def: TraitSig) -> TokenStream {
216        // Get the body content as TokenStream
217        let body_stream = {
218            let mut ts = TokenStream::new();
219            trait_def.body.to_tokens(&mut ts);
220            // Extract content from within braces
221            if let Some(proc_macro2::TokenTree::Group(group)) = ts.into_iter().next() {
222                group.stream()
223            } else {
224                TokenStream::new()
225            }
226        };
227
228        // Create new processor with updated context (traits behave like modules)
229        let mut new_context = self.context.clone();
230        new_context.push(trait_def.name.to_string());
231        let new_processor = TokenProcessor {
232            input: body_stream,
233            base_path: self.base_path.clone(),
234            cfg_attr: self.cfg_attr.clone(),
235            context: new_context,
236        };
237
238        let processed_content = new_processor.process();
239        let processed_body = self.wrap_in_braces(processed_content);
240
241        // Reconstruct the trait with processed body
242        let mut output = TokenStream::new();
243
244        if let Some(attrs) = trait_def.attributes {
245            for attr in attrs.0 {
246                attr.to_tokens(&mut output);
247            }
248        }
249
250        if let Some(vis) = trait_def.visibility {
251            vis.to_tokens(&mut output);
252        }
253
254        if let Some(unsafe_kw) = trait_def.unsafe_kw {
255            unsafe_kw.to_tokens(&mut output);
256        }
257
258        trait_def._trait.to_tokens(&mut output);
259        trait_def.name.to_tokens(&mut output);
260
261        if let Some(generics) = trait_def.generics {
262            generics.to_tokens(&mut output);
263        }
264
265        if let Some(bounds) = trait_def.bounds {
266            bounds.to_tokens(&mut output);
267        }
268
269        if let Some(where_clause) = trait_def.where_clause {
270            where_clause.to_tokens(&mut output);
271        }
272
273        output.extend(processed_body);
274
275        output
276    }
277
278    fn process_struct(&self, struct_sig: crate::parse::StructSig) -> TokenStream {
279        let struct_name = struct_sig.name.to_string();
280
281        // Process the struct body to add docs to fields
282        let processed_body = match &struct_sig.body {
283            crate::parse::StructBody::Named(brace_group) => {
284                // Extract fields from brace group
285                let body_stream = {
286                    let mut ts = TokenStream::new();
287                    brace_group.to_tokens(&mut ts);
288                    // Extract content from within braces
289                    if let Some(proc_macro2::TokenTree::Group(group)) = ts.into_iter().next() {
290                        group.stream()
291                    } else {
292                        TokenStream::new()
293                    }
294                };
295
296                let processed_fields = self.process_struct_fields(body_stream, &struct_name);
297                self.wrap_in_braces(processed_fields)
298            }
299            crate::parse::StructBody::Tuple(tuple) => {
300                // For tuple structs, process tuple fields
301                let mut ts = TokenStream::new();
302                tuple.to_tokens(&mut ts);
303                ts
304            }
305            crate::parse::StructBody::Unit(semi) => {
306                // Unit structs have no fields
307                let mut ts = TokenStream::new();
308                semi.to_tokens(&mut ts);
309                ts
310            }
311        };
312
313        // Reconstruct the struct with doc attribute
314        let mut output = TokenStream::new();
315
316        if let Some(attrs) = struct_sig.attributes {
317            for attr in attrs.0 {
318                attr.to_tokens(&mut output);
319            }
320        }
321
322        if let Some(vis) = struct_sig.visibility {
323            vis.to_tokens(&mut output);
324        }
325
326        struct_sig._struct.to_tokens(&mut output);
327
328        let name_ident = struct_sig.name;
329        name_ident.to_tokens(&mut output);
330
331        if let Some(generics) = struct_sig.generics {
332            generics.to_tokens(&mut output);
333        }
334
335        if let Some(where_clause) = struct_sig.where_clause {
336            where_clause.to_tokens(&mut output);
337        }
338
339        // Inject doc for the struct itself
340        let struct_with_doc = self.inject_doc_into_simple_item(output, &struct_name);
341
342        // Combine struct declaration with processed body
343        let mut final_output = struct_with_doc;
344        final_output.extend(processed_body);
345
346        final_output
347    }
348
349    fn process_struct_fields(&self, fields_stream: TokenStream, struct_name: &str) -> TokenStream {
350        let mut output = TokenStream::new();
351
352        // Parse using StructField parser
353        let fields = match fields_stream
354            .into_token_iter()
355            .parse::<unsynn::CommaDelimitedVec<crate::parse::StructField>>()
356        {
357            Ok(fields) => fields,
358            Err(_) => return output, // Return empty if parsing fails
359        };
360
361        for (idx, field_delimited) in fields.0.iter().enumerate() {
362            let field = &field_delimited.value;
363            let field_name = field.name.to_string();
364
365            // Convert field back to tokens
366            let mut field_tokens = TokenStream::new();
367            quote::ToTokens::to_tokens(field, &mut field_tokens);
368
369            // Inject doc
370            let documented =
371                self.inject_doc_for_struct_field(field_tokens, struct_name, &field_name);
372            output.extend(documented);
373
374            // Add comma if not last field
375            if idx < fields.0.len() - 1 {
376                output.extend(quote::quote! { , });
377            }
378        }
379
380        output
381    }
382
383    fn inject_doc_for_struct_field(
384        &self,
385        field_tokens: TokenStream,
386        struct_name: &str,
387        field_name: &str,
388    ) -> TokenStream {
389        let mut path_parts = vec![self.base_path.clone()];
390        path_parts.extend(self.context.iter().cloned());
391        path_parts.push(format!("{}/{}.md", struct_name, field_name));
392
393        let full_path = path_parts.join("/");
394
395        // Use simpler injection for fields
396        inject_doc_attr(full_path, self.cfg_attr.clone(), field_tokens)
397    }
398
399    fn process_enum(&self, enum_sig: crate::parse::EnumSig) -> TokenStream {
400        let enum_name = enum_sig.name.to_string();
401
402        // Get the body content as TokenStream
403        let body_stream = {
404            let mut ts = TokenStream::new();
405            enum_sig.body.to_tokens(&mut ts);
406            // Extract content from within braces
407            if let Some(proc_macro2::TokenTree::Group(group)) = ts.into_iter().next() {
408                group.stream()
409            } else {
410                TokenStream::new()
411            }
412        };
413
414        // Process enum variants
415        let processed_variants = self.process_enum_variants(body_stream, &enum_name);
416        let processed_body = self.wrap_in_braces(processed_variants);
417
418        // Reconstruct the enum with doc attribute
419        let mut output = TokenStream::new();
420
421        if let Some(attrs) = enum_sig.attributes {
422            for attr in attrs.0 {
423                attr.to_tokens(&mut output);
424            }
425        }
426
427        if let Some(vis) = enum_sig.visibility {
428            vis.to_tokens(&mut output);
429        }
430
431        enum_sig._enum.to_tokens(&mut output);
432
433        let name_ident = enum_sig.name;
434        name_ident.to_tokens(&mut output);
435
436        if let Some(generics) = enum_sig.generics {
437            generics.to_tokens(&mut output);
438        }
439
440        if let Some(where_clause) = enum_sig.where_clause {
441            where_clause.to_tokens(&mut output);
442        }
443
444        // Inject doc for the enum itself using simpler method
445        let enum_with_doc = self.inject_doc_into_simple_item(output, &enum_name);
446
447        // Combine enum declaration with processed body
448        let mut final_output = enum_with_doc;
449        final_output.extend(processed_body);
450
451        final_output
452    }
453
454    fn process_enum_variants(&self, variants_stream: TokenStream, enum_name: &str) -> TokenStream {
455        let mut output = TokenStream::new();
456
457        // Parse using EnumVariant parser
458        let variants = match variants_stream
459            .into_token_iter()
460            .parse::<unsynn::CommaDelimitedVec<crate::parse::EnumVariant>>()
461        {
462            Ok(variants) => variants,
463            Err(_) => return output, // Return empty if parsing fails
464        };
465
466        for (idx, variant_delimited) in variants.0.iter().enumerate() {
467            let variant = &variant_delimited.value;
468            let variant_name = variant.name.to_string();
469
470            // Convert variant back to tokens
471            let mut variant_tokens = TokenStream::new();
472            quote::ToTokens::to_tokens(variant, &mut variant_tokens);
473
474            // Inject doc
475            let documented =
476                self.inject_doc_for_enum_variant(variant_tokens, enum_name, &variant_name);
477            output.extend(documented);
478
479            // Add comma if not last variant
480            if idx < variants.0.len() - 1 {
481                output.extend(quote::quote! { , });
482            }
483        }
484
485        output
486    }
487
488    fn inject_doc_for_enum_variant(
489        &self,
490        variant_tokens: TokenStream,
491        enum_name: &str,
492        variant_name: &str,
493    ) -> TokenStream {
494        let mut path_parts = vec![self.base_path.clone()];
495        path_parts.extend(self.context.iter().cloned());
496        path_parts.push(format!("{}/{}.md", enum_name, variant_name));
497
498        let full_path = path_parts.join("/");
499
500        // Use simpler injection for variants
501        inject_doc_attr(full_path, self.cfg_attr.clone(), variant_tokens)
502    }
503
504    fn wrap_in_braces(&self, content: TokenStream) -> TokenStream {
505        let mut output = TokenStream::new();
506        let group = proc_macro2::Group::new(proc_macro2::Delimiter::Brace, content);
507        output.extend(std::iter::once(proc_macro2::TokenTree::Group(group)));
508        output
509    }
510
511    fn inject_doc_into_item(&self, func_tokens: TokenStream, fn_name: &str) -> TokenStream {
512        // Construct the full path including context
513        let mut path_parts = vec![self.base_path.clone()];
514        path_parts.extend(self.context.iter().cloned());
515        path_parts.push(format!("{}.md", fn_name));
516
517        let full_path = path_parts.join("/");
518
519        // Create args token stream with the constructed path
520        let args = quote::quote! { path = #full_path };
521
522        match syncdoc_impl(args, func_tokens.clone()) {
523            Ok(instrumented) => instrumented,
524            Err(e) => {
525                eprintln!("syncdoc_impl failed: {}", e);
526                func_tokens // fallback to original
527            }
528        }
529    }
530
531    fn inject_doc_into_simple_item(
532        &self,
533        item_tokens: TokenStream,
534        item_name: &str,
535    ) -> TokenStream {
536        // Construct the full path including context
537        let mut path_parts = vec![self.base_path.clone()];
538        path_parts.extend(self.context.iter().cloned());
539        path_parts.push(format!("{}.md", item_name));
540
541        let full_path = path_parts.join("/");
542
543        // Use the simpler injection that doesn't parse
544        inject_doc_attr(full_path, self.cfg_attr.clone(), item_tokens)
545    }
546}
547
548fn extract_type_name(
549    target_type: &unsynn::Many<
550        unsynn::Cons<
551            unsynn::Except<unsynn::Either<crate::parse::KFor, unsynn::BraceGroup>>,
552            proc_macro2::TokenTree,
553        >,
554    >,
555) -> String {
556    // Extract just the type name from the target_type tokens
557    // This is a simplified version - for complex cases we might need more sophistication
558    if let Some(first) = target_type.0.first() {
559        if let proc_macro2::TokenTree::Ident(ident) = &first.value.second {
560            return ident.to_string();
561        }
562    }
563    "Unknown".to_string()
564}
565
566fn extract_first_ident_from_tokens(
567    tokens: &unsynn::Many<unsynn::Cons<unsynn::Except<unsynn::BraceGroup>, proc_macro2::TokenTree>>,
568) -> String {
569    if let Some(first) = tokens.0.first() {
570        if let proc_macro2::TokenTree::Ident(ident) = &first.value.second {
571            return ident.to_string();
572        }
573    }
574    "Unknown".to_string()
575}