1use crate::{inject_doc_attr, syncdoc_impl};
2use proc_macro2::TokenStream;
3use unsynn::*;
4
5use crate::parse::{ImplBlockSig, ModuleContent, ModuleItem, ModuleSig, TraitSig};
6
7pub struct TokenProcessor {
8 input: TokenStream,
9 base_path: String,
10 cfg_attr: Option<String>,
11 context: Vec<String>,
12}
13
14impl TokenProcessor {
15 pub fn new(input: TokenStream, base_path: String, cfg_attr: Option<String>) -> Self {
16 Self {
17 input,
18 base_path,
19 cfg_attr,
20 context: Vec::new(),
21 }
22 }
23
24 pub fn process(self) -> TokenStream {
25 match self
26 .input
27 .clone()
28 .into_token_iter()
29 .parse::<ModuleContent>()
30 {
31 Ok(_parsed) => self.process_module_content(),
32 Err(_) => {
33 self.input
35 }
36 }
37 }
38
39 fn process_module_content(&self) -> TokenStream {
40 let mut output = TokenStream::new();
41
42 let content = match self
43 .input
44 .clone()
45 .into_token_iter()
46 .parse::<ModuleContent>()
47 {
48 Ok(c) => c,
49 Err(_) => return self.input.clone(),
50 };
51
52 for item in content.items.0 {
53 let processed_item = self.process_module_item(item.value);
54 output.extend(processed_item);
55 }
56
57 output
58 }
59
60 fn process_module_item(&self, item: ModuleItem) -> TokenStream {
61 match item {
62 ModuleItem::Function(func_sig) => {
63 let mut func_tokens = TokenStream::new();
64 quote::ToTokens::to_tokens(&func_sig, &mut func_tokens);
65 self.inject_doc_into_item(func_tokens, &func_sig.name.to_string())
66 }
67 ModuleItem::ImplBlock(impl_block) => self.process_impl_block(impl_block),
68 ModuleItem::Module(module) => self.process_module_block(module),
69 ModuleItem::Trait(trait_def) => self.process_trait_block(trait_def),
70 ModuleItem::Enum(enum_sig) => self.process_enum(enum_sig),
71 ModuleItem::Struct(struct_sig) => self.process_struct(struct_sig),
72 ModuleItem::TypeAlias(type_alias) => {
73 let mut alias_tokens = TokenStream::new();
74 quote::ToTokens::to_tokens(&type_alias, &mut alias_tokens);
75 self.inject_doc_into_simple_item(alias_tokens, &type_alias.name.to_string())
76 }
77 ModuleItem::Const(const_sig) => {
78 let mut const_tokens = TokenStream::new();
79 quote::ToTokens::to_tokens(&const_sig, &mut const_tokens);
80 self.inject_doc_into_simple_item(const_tokens, &const_sig.name.to_string())
81 }
82 ModuleItem::Static(static_sig) => {
83 let mut static_tokens = TokenStream::new();
84 quote::ToTokens::to_tokens(&static_sig, &mut static_tokens);
85 self.inject_doc_into_simple_item(static_tokens, &static_sig.name.to_string())
86 }
87 ModuleItem::Other(token) => {
88 let mut tokens = TokenStream::new();
89 token.to_tokens(&mut tokens);
90 tokens
91 }
92 }
93 }
94
95 fn process_impl_block(&self, impl_block: ImplBlockSig) -> TokenStream {
96 let context_path = if let Some(for_trait) = &impl_block.for_trait {
98 let trait_name = extract_type_name(&impl_block.target_type);
101 let type_name = extract_first_ident_from_tokens(&for_trait.second);
103 vec![type_name, trait_name]
105 } else {
106 let type_name = extract_type_name(&impl_block.target_type);
109 vec![type_name]
110 };
111
112 let body_stream = {
114 let mut ts = TokenStream::new();
115 impl_block.body.to_tokens(&mut ts);
116 if let Some(proc_macro2::TokenTree::Group(group)) = ts.into_iter().next() {
118 group.stream()
119 } else {
120 TokenStream::new()
121 }
122 };
123
124 let mut new_context = self.context.clone();
126 new_context.extend(context_path);
127 let new_processor = TokenProcessor {
128 input: body_stream,
129 base_path: self.base_path.clone(),
130 cfg_attr: self.cfg_attr.clone(),
131 context: new_context,
132 };
133
134 let processed_content = new_processor.process();
135 let processed_body = self.wrap_in_braces(processed_content);
136
137 let mut output = TokenStream::new();
139
140 if let Some(attrs) = impl_block.attributes {
141 for attr in attrs.0 {
142 attr.to_tokens(&mut output);
143 }
144 }
145
146 impl_block._impl.to_tokens(&mut output);
147 if let Some(generics) = impl_block.generics {
148 generics.to_tokens(&mut output);
149 }
150
151 for item in impl_block.target_type.0 {
152 item.value.second.to_tokens(&mut output);
153 }
154
155 if let Some(for_part) = impl_block.for_trait {
156 for_part.to_tokens(&mut output);
157 }
158
159 if let Some(where_clause) = impl_block.where_clause {
160 where_clause.to_tokens(&mut output);
161 }
162
163 output.extend(processed_body);
164
165 output
166 }
167
168 fn process_module_block(&self, module: ModuleSig) -> TokenStream {
169 let body_stream = {
171 let mut ts = TokenStream::new();
172 module.body.to_tokens(&mut ts);
173 if let Some(proc_macro2::TokenTree::Group(group)) = ts.into_iter().next() {
175 group.stream()
176 } else {
177 TokenStream::new()
178 }
179 };
180
181 let mut new_context = self.context.clone();
183 new_context.push(module.name.to_string());
184 let new_processor = TokenProcessor {
185 input: body_stream,
186 base_path: self.base_path.clone(),
187 cfg_attr: self.cfg_attr.clone(),
188 context: new_context,
189 };
190
191 let processed_content = new_processor.process();
192 let processed_body = self.wrap_in_braces(processed_content);
193
194 let mut output = TokenStream::new();
196
197 if let Some(attrs) = module.attributes {
198 for attr in attrs.0 {
199 attr.to_tokens(&mut output);
200 }
201 }
202
203 if let Some(vis) = module.visibility {
204 vis.to_tokens(&mut output);
205 }
206
207 module._mod.to_tokens(&mut output);
208 module.name.to_tokens(&mut output);
209
210 output.extend(processed_body);
211
212 output
213 }
214
215 fn process_trait_block(&self, trait_def: TraitSig) -> TokenStream {
216 let body_stream = {
218 let mut ts = TokenStream::new();
219 trait_def.body.to_tokens(&mut ts);
220 if let Some(proc_macro2::TokenTree::Group(group)) = ts.into_iter().next() {
222 group.stream()
223 } else {
224 TokenStream::new()
225 }
226 };
227
228 let mut new_context = self.context.clone();
230 new_context.push(trait_def.name.to_string());
231 let new_processor = TokenProcessor {
232 input: body_stream,
233 base_path: self.base_path.clone(),
234 cfg_attr: self.cfg_attr.clone(),
235 context: new_context,
236 };
237
238 let processed_content = new_processor.process();
239 let processed_body = self.wrap_in_braces(processed_content);
240
241 let mut output = TokenStream::new();
243
244 if let Some(attrs) = trait_def.attributes {
245 for attr in attrs.0 {
246 attr.to_tokens(&mut output);
247 }
248 }
249
250 if let Some(vis) = trait_def.visibility {
251 vis.to_tokens(&mut output);
252 }
253
254 if let Some(unsafe_kw) = trait_def.unsafe_kw {
255 unsafe_kw.to_tokens(&mut output);
256 }
257
258 trait_def._trait.to_tokens(&mut output);
259 trait_def.name.to_tokens(&mut output);
260
261 if let Some(generics) = trait_def.generics {
262 generics.to_tokens(&mut output);
263 }
264
265 if let Some(bounds) = trait_def.bounds {
266 bounds.to_tokens(&mut output);
267 }
268
269 if let Some(where_clause) = trait_def.where_clause {
270 where_clause.to_tokens(&mut output);
271 }
272
273 let trait_name = trait_def.name.to_string();
275 let trait_with_doc = self.inject_doc_into_simple_item(output, &trait_name);
276
277 let mut final_output = trait_with_doc;
279 final_output.extend(processed_body);
280
281 final_output
282 }
283
284 fn process_struct(&self, struct_sig: crate::parse::StructSig) -> TokenStream {
285 let struct_name = struct_sig.name.to_string();
286
287 let processed_body = match &struct_sig.body {
289 crate::parse::StructBody::Named(brace_group) => {
290 let body_stream = {
292 let mut ts = TokenStream::new();
293 brace_group.to_tokens(&mut ts);
294 if let Some(proc_macro2::TokenTree::Group(group)) = ts.into_iter().next() {
296 group.stream()
297 } else {
298 TokenStream::new()
299 }
300 };
301
302 let processed_fields = self.process_struct_fields(body_stream, &struct_name);
303 self.wrap_in_braces(processed_fields)
304 }
305 crate::parse::StructBody::Tuple(tuple) => {
306 let mut ts = TokenStream::new();
308 tuple.to_tokens(&mut ts);
309 ts
310 }
311 crate::parse::StructBody::Unit(semi) => {
312 let mut ts = TokenStream::new();
314 semi.to_tokens(&mut ts);
315 ts
316 }
317 };
318
319 let mut output = TokenStream::new();
321
322 if let Some(attrs) = struct_sig.attributes {
323 for attr in attrs.0 {
324 attr.to_tokens(&mut output);
325 }
326 }
327
328 if let Some(vis) = struct_sig.visibility {
329 vis.to_tokens(&mut output);
330 }
331
332 struct_sig._struct.to_tokens(&mut output);
333
334 let name_ident = struct_sig.name;
335 name_ident.to_tokens(&mut output);
336
337 if let Some(generics) = struct_sig.generics {
338 generics.to_tokens(&mut output);
339 }
340
341 if let Some(where_clause) = struct_sig.where_clause {
342 where_clause.to_tokens(&mut output);
343 }
344
345 let struct_with_doc = self.inject_doc_into_simple_item(output, &struct_name);
347
348 let mut final_output = struct_with_doc;
350 final_output.extend(processed_body);
351
352 final_output
353 }
354
355 fn process_struct_fields(&self, fields_stream: TokenStream, struct_name: &str) -> TokenStream {
356 let mut output = TokenStream::new();
357
358 let fields = match fields_stream
360 .into_token_iter()
361 .parse::<unsynn::CommaDelimitedVec<crate::parse::StructField>>()
362 {
363 Ok(fields) => fields,
364 Err(_) => return output, };
366
367 for (idx, field_delimited) in fields.0.iter().enumerate() {
368 let field = &field_delimited.value;
369 let field_name = field.name.to_string();
370
371 let mut field_tokens = TokenStream::new();
373 quote::ToTokens::to_tokens(field, &mut field_tokens);
374
375 let documented =
377 self.inject_doc_for_struct_field(field_tokens, struct_name, &field_name);
378 output.extend(documented);
379
380 if idx < fields.0.len() - 1 {
382 output.extend(quote::quote! { , });
383 }
384 }
385
386 output
387 }
388
389 fn inject_doc_for_struct_field(
390 &self,
391 field_tokens: TokenStream,
392 struct_name: &str,
393 field_name: &str,
394 ) -> TokenStream {
395 let mut path_parts = vec![self.base_path.clone()];
396 path_parts.extend(self.context.iter().cloned());
397 path_parts.push(format!("{}/{}.md", struct_name, field_name));
398
399 let full_path = path_parts.join("/");
400
401 inject_doc_attr(full_path, self.cfg_attr.clone(), field_tokens)
403 }
404
405 fn process_enum(&self, enum_sig: crate::parse::EnumSig) -> TokenStream {
406 let enum_name = enum_sig.name.to_string();
407
408 let body_stream = {
410 let mut ts = TokenStream::new();
411 enum_sig.body.to_tokens(&mut ts);
412 if let Some(proc_macro2::TokenTree::Group(group)) = ts.into_iter().next() {
414 group.stream()
415 } else {
416 TokenStream::new()
417 }
418 };
419
420 let processed_variants = self.process_enum_variants(body_stream, &enum_name);
422 let processed_body = self.wrap_in_braces(processed_variants);
423
424 let mut output = TokenStream::new();
426
427 if let Some(attrs) = enum_sig.attributes {
428 for attr in attrs.0 {
429 attr.to_tokens(&mut output);
430 }
431 }
432
433 if let Some(vis) = enum_sig.visibility {
434 vis.to_tokens(&mut output);
435 }
436
437 enum_sig._enum.to_tokens(&mut output);
438
439 let name_ident = enum_sig.name;
440 name_ident.to_tokens(&mut output);
441
442 if let Some(generics) = enum_sig.generics {
443 generics.to_tokens(&mut output);
444 }
445
446 if let Some(where_clause) = enum_sig.where_clause {
447 where_clause.to_tokens(&mut output);
448 }
449
450 let enum_with_doc = self.inject_doc_into_simple_item(output, &enum_name);
452
453 let mut final_output = enum_with_doc;
455 final_output.extend(processed_body);
456
457 final_output
458 }
459
460 fn process_enum_variants(&self, variants_stream: TokenStream, enum_name: &str) -> TokenStream {
461 let mut output = TokenStream::new();
462
463 let variants = match variants_stream
465 .into_token_iter()
466 .parse::<unsynn::CommaDelimitedVec<crate::parse::EnumVariant>>()
467 {
468 Ok(variants) => variants,
469 Err(_) => return output, };
471
472 for (idx, variant_delimited) in variants.0.iter().enumerate() {
473 let variant = &variant_delimited.value;
474 let variant_name = variant.name.to_string();
475
476 let mut variant_tokens = TokenStream::new();
478 quote::ToTokens::to_tokens(variant, &mut variant_tokens);
479
480 let documented =
482 self.inject_doc_for_enum_variant(variant_tokens, enum_name, &variant_name);
483 output.extend(documented);
484
485 if idx < variants.0.len() - 1 {
487 output.extend(quote::quote! { , });
488 }
489 }
490
491 output
492 }
493
494 fn inject_doc_for_enum_variant(
495 &self,
496 variant_tokens: TokenStream,
497 enum_name: &str,
498 variant_name: &str,
499 ) -> TokenStream {
500 let mut path_parts = vec![self.base_path.clone()];
501 path_parts.extend(self.context.iter().cloned());
502 path_parts.push(format!("{}/{}.md", enum_name, variant_name));
503
504 let full_path = path_parts.join("/");
505
506 inject_doc_attr(full_path, self.cfg_attr.clone(), variant_tokens)
508 }
509
510 fn wrap_in_braces(&self, content: TokenStream) -> TokenStream {
511 let mut output = TokenStream::new();
512 let group = proc_macro2::Group::new(proc_macro2::Delimiter::Brace, content);
513 output.extend(std::iter::once(proc_macro2::TokenTree::Group(group)));
514 output
515 }
516
517 fn inject_doc_into_item(&self, func_tokens: TokenStream, fn_name: &str) -> TokenStream {
518 let mut path_parts = vec![self.base_path.clone()];
520 path_parts.extend(self.context.iter().cloned());
521 path_parts.push(format!("{}.md", fn_name));
522
523 let full_path = path_parts.join("/");
524
525 let args = quote::quote! { path = #full_path };
527
528 match syncdoc_impl(args, func_tokens.clone()) {
529 Ok(instrumented) => instrumented,
530 Err(e) => {
531 eprintln!("syncdoc_impl failed: {}", e);
532 func_tokens }
534 }
535 }
536
537 fn inject_doc_into_simple_item(
538 &self,
539 item_tokens: TokenStream,
540 item_name: &str,
541 ) -> TokenStream {
542 let mut path_parts = vec![self.base_path.clone()];
544 path_parts.extend(self.context.iter().cloned());
545 path_parts.push(format!("{}.md", item_name));
546
547 let full_path = path_parts.join("/");
548
549 inject_doc_attr(full_path, self.cfg_attr.clone(), item_tokens)
551 }
552}
553
554fn extract_type_name(
555 target_type: &unsynn::Many<
556 unsynn::Cons<
557 unsynn::Except<unsynn::Either<crate::parse::KFor, unsynn::BraceGroup>>,
558 proc_macro2::TokenTree,
559 >,
560 >,
561) -> String {
562 if let Some(first) = target_type.0.first() {
565 if let proc_macro2::TokenTree::Ident(ident) = &first.value.second {
566 return ident.to_string();
567 }
568 }
569 "Unknown".to_string()
570}
571
572fn extract_first_ident_from_tokens(
573 tokens: &unsynn::Many<unsynn::Cons<unsynn::Except<unsynn::BraceGroup>, proc_macro2::TokenTree>>,
574) -> String {
575 if let Some(first) = tokens.0.first() {
576 if let proc_macro2::TokenTree::Ident(ident) = &first.value.second {
577 return ident.to_string();
578 }
579 }
580 "Unknown".to_string()
581}