1use crate::{inject_doc_attr, syncdoc_impl};
2use proc_macro2::TokenStream;
3use unsynn::*;
4
5use crate::parse::{ImplBlockSig, ModuleContent, ModuleItem, ModuleSig, TraitSig};
6
7pub struct TokenProcessor {
8 input: TokenStream,
9 base_path: String,
10 cfg_attr: Option<String>,
11 context: Vec<String>,
12}
13
14impl TokenProcessor {
15 pub fn new(input: TokenStream, base_path: String, cfg_attr: Option<String>) -> Self {
16 Self {
17 input,
18 base_path,
19 cfg_attr,
20 context: Vec::new(),
21 }
22 }
23
24 pub fn process(self) -> TokenStream {
25 match self
26 .input
27 .clone()
28 .into_token_iter()
29 .parse::<ModuleContent>()
30 {
31 Ok(_parsed) => self.process_module_content(),
32 Err(_) => {
33 self.input
35 }
36 }
37 }
38
39 fn process_module_content(&self) -> TokenStream {
40 let mut output = TokenStream::new();
41
42 let content = match self
43 .input
44 .clone()
45 .into_token_iter()
46 .parse::<ModuleContent>()
47 {
48 Ok(c) => c,
49 Err(_) => return self.input.clone(),
50 };
51
52 for item in content.items.0 {
53 let processed_item = self.process_module_item(item.value);
54 output.extend(processed_item);
55 }
56
57 output
58 }
59
60 fn process_module_item(&self, item: ModuleItem) -> TokenStream {
61 match item {
62 ModuleItem::Function(func_sig) => {
63 let mut func_tokens = TokenStream::new();
64 quote::ToTokens::to_tokens(&func_sig, &mut func_tokens);
65 self.inject_doc_into_item(func_tokens, &func_sig.name.to_string())
66 }
67 ModuleItem::ImplBlock(impl_block) => self.process_impl_block(impl_block),
68 ModuleItem::Module(module) => self.process_module_block(module),
69 ModuleItem::Trait(trait_def) => self.process_trait_block(trait_def),
70 ModuleItem::Enum(enum_sig) => self.process_enum(enum_sig),
71 ModuleItem::Struct(struct_sig) => self.process_struct(struct_sig),
72 ModuleItem::TypeAlias(type_alias) => {
73 let mut alias_tokens = TokenStream::new();
74 quote::ToTokens::to_tokens(&type_alias, &mut alias_tokens);
75 self.inject_doc_into_simple_item(alias_tokens, &type_alias.name.to_string())
76 }
77 ModuleItem::Const(const_sig) => {
78 let mut const_tokens = TokenStream::new();
79 quote::ToTokens::to_tokens(&const_sig, &mut const_tokens);
80 self.inject_doc_into_simple_item(const_tokens, &const_sig.name.to_string())
81 }
82 ModuleItem::Static(static_sig) => {
83 let mut static_tokens = TokenStream::new();
84 quote::ToTokens::to_tokens(&static_sig, &mut static_tokens);
85 self.inject_doc_into_simple_item(static_tokens, &static_sig.name.to_string())
86 }
87 ModuleItem::Other(token) => {
88 let mut tokens = TokenStream::new();
89 token.to_tokens(&mut tokens);
90 tokens
91 }
92 }
93 }
94
95 fn process_impl_block(&self, impl_block: ImplBlockSig) -> TokenStream {
96 let context_path = if let Some(for_trait) = &impl_block.for_trait {
98 let trait_name = extract_type_name(&impl_block.target_type);
101 let type_name = extract_first_ident_from_tokens(&for_trait.second);
103 vec![type_name, trait_name]
105 } else {
106 let type_name = extract_type_name(&impl_block.target_type);
109 vec![type_name]
110 };
111
112 let mut new_context = self.context.clone();
114 new_context.extend(context_path);
115
116 let module_content = &impl_block.items.content;
118
119 let new_processor = TokenProcessor {
120 input: TokenStream::new(),
121 base_path: self.base_path.clone(),
122 cfg_attr: self.cfg_attr.clone(),
123 context: new_context,
124 };
125
126 let mut processed_content = TokenStream::new();
127 for item_delimited in &module_content.items.0 {
128 processed_content
129 .extend(new_processor.process_module_item(item_delimited.value.clone()));
130 }
131
132 let mut output = TokenStream::new();
134 if let Some(attrs) = impl_block.attributes {
135 for attr in attrs.0 {
136 attr.to_tokens(&mut output);
137 }
138 }
139 impl_block._impl.to_tokens(&mut output);
140 if let Some(generics) = impl_block.generics {
141 generics.to_tokens(&mut output);
142 }
143 for item in impl_block.target_type.0 {
144 item.value.second.to_tokens(&mut output);
145 }
146 if let Some(for_part) = impl_block.for_trait {
147 for_part.to_tokens(&mut output);
148 }
149 if let Some(where_clause) = impl_block.where_clause {
150 where_clause.to_tokens(&mut output);
151 }
152
153 let group = proc_macro2::Group::new(proc_macro2::Delimiter::Brace, processed_content);
155 output.extend(std::iter::once(proc_macro2::TokenTree::Group(group)));
156
157 output
158 }
159
160 fn process_module_block(&self, module: ModuleSig) -> TokenStream {
161 let mut new_context = self.context.clone();
162 new_context.push(module.name.to_string());
163
164 let module_content = &module.items.content;
166
167 let new_processor = TokenProcessor {
168 input: TokenStream::new(),
169 base_path: self.base_path.clone(),
170 cfg_attr: self.cfg_attr.clone(),
171 context: new_context,
172 };
173
174 let mut processed_content = TokenStream::new();
175 for item_delimited in &module_content.items.0 {
176 processed_content
177 .extend(new_processor.process_module_item(item_delimited.value.clone()));
178 }
179
180 let mut output = TokenStream::new();
182 if let Some(attrs) = module.attributes {
183 for attr in attrs.0 {
184 attr.to_tokens(&mut output);
185 }
186 }
187 if let Some(vis) = module.visibility {
188 vis.to_tokens(&mut output);
189 }
190 module._mod.to_tokens(&mut output);
191 module.name.to_tokens(&mut output);
192
193 let group = proc_macro2::Group::new(proc_macro2::Delimiter::Brace, processed_content);
194 output.extend(std::iter::once(proc_macro2::TokenTree::Group(group)));
195
196 output
197 }
198
199 fn process_trait_block(&self, trait_def: TraitSig) -> TokenStream {
200 let mut new_context = self.context.clone();
201 new_context.push(trait_def.name.to_string());
202
203 let trait_content = &trait_def.items.content;
205
206 let new_processor = TokenProcessor {
207 input: TokenStream::new(),
208 base_path: self.base_path.clone(),
209 cfg_attr: self.cfg_attr.clone(),
210 context: new_context,
211 };
212
213 let mut processed_content = TokenStream::new();
214 for item_delimited in &trait_content.items.0 {
215 processed_content
216 .extend(new_processor.process_module_item(item_delimited.value.clone()));
217 }
218
219 let mut output = TokenStream::new();
221 if let Some(attrs) = trait_def.attributes {
222 for attr in attrs.0 {
223 attr.to_tokens(&mut output);
224 }
225 }
226 if let Some(vis) = trait_def.visibility {
227 vis.to_tokens(&mut output);
228 }
229 if let Some(unsafe_kw) = trait_def.unsafe_kw {
230 unsafe_kw.to_tokens(&mut output);
231 }
232 trait_def._trait.to_tokens(&mut output);
233 trait_def.name.to_tokens(&mut output);
234 if let Some(generics) = trait_def.generics {
235 generics.to_tokens(&mut output);
236 }
237 if let Some(bounds) = trait_def.bounds {
238 bounds.to_tokens(&mut output);
239 }
240 if let Some(where_clause) = trait_def.where_clause {
241 where_clause.to_tokens(&mut output);
242 }
243
244 let trait_name = trait_def.name.to_string();
245 let trait_with_doc = self.inject_doc_into_simple_item(output, &trait_name);
246
247 let mut final_output = trait_with_doc;
249 let group = proc_macro2::Group::new(proc_macro2::Delimiter::Brace, processed_content);
250 final_output.extend(std::iter::once(proc_macro2::TokenTree::Group(group)));
251
252 final_output
253 }
254
255 fn process_struct(&self, struct_sig: crate::parse::StructSig) -> TokenStream {
256 let struct_name = struct_sig.name.to_string();
257
258 let processed_body = match &struct_sig.body {
260 crate::parse::StructBody::Named(fields_containing) => {
261 if let Some(fields_cdv) = fields_containing.content.as_ref() {
262 let processed_fields = self.process_struct_fields(fields_cdv, &struct_name);
263 let group =
264 proc_macro2::Group::new(proc_macro2::Delimiter::Brace, processed_fields);
265 let mut ts = TokenStream::new();
266 ts.extend(std::iter::once(proc_macro2::TokenTree::Group(group)));
267 ts
268 } else {
269 let mut ts = TokenStream::new();
270 unsynn::ToTokens::to_tokens(fields_containing, &mut ts);
271 ts
272 }
273 }
274 other => {
275 let mut ts = TokenStream::new();
276 quote::ToTokens::to_tokens(other, &mut ts);
277 ts
278 }
279 };
280
281 let mut output = TokenStream::new();
283 if let Some(attrs) = struct_sig.attributes {
284 for attr in attrs.0 {
285 attr.to_tokens(&mut output);
286 }
287 }
288 if let Some(vis) = struct_sig.visibility {
289 vis.to_tokens(&mut output);
290 }
291 struct_sig._struct.to_tokens(&mut output);
292 struct_sig.name.to_tokens(&mut output);
293 if let Some(generics) = struct_sig.generics {
294 generics.to_tokens(&mut output);
295 }
296 if let Some(where_clause) = struct_sig.where_clause {
297 where_clause.to_tokens(&mut output);
298 }
299
300 let struct_with_doc = self.inject_doc_into_simple_item(output, &struct_name);
301 let mut final_output = struct_with_doc;
302 final_output.extend(processed_body);
303
304 final_output
305 }
306
307 fn process_struct_fields(
308 &self,
309 fields_cdv: &CommaDelimitedVec<crate::parse::StructField>,
310 struct_name: &str,
311 ) -> TokenStream {
312 let mut output = TokenStream::new();
313
314 for (idx, field_delimited) in fields_cdv.0.iter().enumerate() {
315 let field = &field_delimited.value;
316 let field_name = field.name.to_string();
317
318 let mut field_tokens = TokenStream::new();
319 quote::ToTokens::to_tokens(field, &mut field_tokens);
320
321 let documented =
322 self.inject_doc_for_struct_field(field_tokens, struct_name, &field_name);
323 output.extend(documented);
324
325 if idx < fields_cdv.0.len() - 1 {
326 output.extend(quote::quote! { , });
327 }
328 }
329
330 output
331 }
332
333 fn inject_doc_for_struct_field(
334 &self,
335 field_tokens: TokenStream,
336 struct_name: &str,
337 field_name: &str,
338 ) -> TokenStream {
339 let mut path_parts = vec![self.base_path.clone()];
340 path_parts.extend(self.context.iter().cloned());
341 path_parts.push(format!("{}/{}.md", struct_name, field_name));
342
343 let full_path = path_parts.join("/");
344 inject_doc_attr(full_path, self.cfg_attr.clone(), field_tokens)
345 }
346
347 fn process_enum(&self, enum_sig: crate::parse::EnumSig) -> TokenStream {
348 let enum_name = enum_sig.name.to_string();
349
350 let processed_variants = if let Some(variants_cdv) = enum_sig.variants.content.as_ref() {
352 self.process_enum_variants(variants_cdv, &enum_name)
353 } else {
354 TokenStream::new()
355 };
356
357 let mut output = TokenStream::new();
359 if let Some(attrs) = enum_sig.attributes {
360 for attr in attrs.0 {
361 attr.to_tokens(&mut output);
362 }
363 }
364 if let Some(vis) = enum_sig.visibility {
365 vis.to_tokens(&mut output);
366 }
367 enum_sig._enum.to_tokens(&mut output);
368 enum_sig.name.to_tokens(&mut output);
369 if let Some(generics) = enum_sig.generics {
370 generics.to_tokens(&mut output);
371 }
372 if let Some(where_clause) = enum_sig.where_clause {
373 where_clause.to_tokens(&mut output);
374 }
375
376 let enum_with_doc = self.inject_doc_into_simple_item(output, &enum_name);
377 let mut final_output = enum_with_doc;
378 let group = proc_macro2::Group::new(proc_macro2::Delimiter::Brace, processed_variants);
379 final_output.extend(std::iter::once(proc_macro2::TokenTree::Group(group)));
380
381 final_output
382 }
383
384 fn process_enum_variants(
385 &self,
386 variants_cdv: &CommaDelimitedVec<crate::parse::EnumVariant>,
387 enum_name: &str,
388 ) -> TokenStream {
389 let mut output = TokenStream::new();
390
391 for (idx, variant_delimited) in variants_cdv.0.iter().enumerate() {
392 let variant = &variant_delimited.value;
393 let variant_name = variant.name.to_string();
394
395 let mut variant_tokens = TokenStream::new();
396 quote::ToTokens::to_tokens(variant, &mut variant_tokens);
397
398 let documented =
399 self.inject_doc_for_enum_variant(variant_tokens, enum_name, &variant_name);
400 output.extend(documented);
401
402 if idx < variants_cdv.0.len() - 1 {
403 output.extend(quote::quote! { , });
404 }
405 }
406
407 output
408 }
409
410 fn inject_doc_for_enum_variant(
411 &self,
412 variant_tokens: TokenStream,
413 enum_name: &str,
414 variant_name: &str,
415 ) -> TokenStream {
416 let mut path_parts = vec![self.base_path.clone()];
417 path_parts.extend(self.context.iter().cloned());
418 path_parts.push(format!("{}/{}.md", enum_name, variant_name));
419
420 let full_path = path_parts.join("/");
421 inject_doc_attr(full_path, self.cfg_attr.clone(), variant_tokens)
422 }
423
424 fn inject_doc_into_item(&self, func_tokens: TokenStream, fn_name: &str) -> TokenStream {
425 let mut path_parts = vec![self.base_path.clone()];
426 path_parts.extend(self.context.iter().cloned());
427 path_parts.push(format!("{}.md", fn_name));
428
429 let full_path = path_parts.join("/");
430 let args = quote::quote! { path = #full_path };
431
432 match syncdoc_impl(args, func_tokens.clone()) {
433 Ok(instrumented) => instrumented,
434 Err(e) => {
435 eprintln!("syncdoc_impl failed: {}", e);
436 func_tokens
437 }
438 }
439 }
440
441 fn inject_doc_into_simple_item(
442 &self,
443 item_tokens: TokenStream,
444 item_name: &str,
445 ) -> TokenStream {
446 let mut path_parts = vec![self.base_path.clone()];
447 path_parts.extend(self.context.iter().cloned());
448 path_parts.push(format!("{}.md", item_name));
449
450 let full_path = path_parts.join("/");
451 inject_doc_attr(full_path, self.cfg_attr.clone(), item_tokens)
452 }
453}
454
455fn extract_type_name(
456 target_type: &unsynn::Many<
457 unsynn::Cons<
458 unsynn::Except<unsynn::Either<crate::parse::KFor, unsynn::BraceGroup>>,
459 proc_macro2::TokenTree,
460 >,
461 >,
462) -> String {
463 if let Some(first) = target_type.0.first() {
464 if let proc_macro2::TokenTree::Ident(ident) = &first.value.second {
465 return ident.to_string();
466 }
467 }
468 "Unknown".to_string()
469}
470
471fn extract_first_ident_from_tokens(
472 tokens: &unsynn::Many<unsynn::Cons<unsynn::Except<unsynn::BraceGroup>, proc_macro2::TokenTree>>,
473) -> String {
474 if let Some(first) = tokens.0.first() {
475 if let proc_macro2::TokenTree::Ident(ident) = &first.value.second {
476 return ident.to_string();
477 }
478 }
479 "Unknown".to_string()
480}