bonfida_autodoc/
lib.rs

1use std::{collections::HashMap, fmt::Write, time::Instant};
2
3use clap::{crate_name, Arg, ArgMatches, Command};
4use convert_case::{Case, Casing};
5use proc_macro2::Span;
6use quote::{quote, ToTokens};
7use syn::{
8    punctuated::Punctuated, token::Comma, Field, Item, ItemEnum, Token, Type, TypePath,
9    TypeReference, Variant,
10};
11use utils::{
12    boolean_to_emoji, find_struct, get_constraints_and_doc, get_struct_fields, strip_docs,
13};
14
15use crate::utils::generate_table;
16
17pub mod utils;
18
19const LITS: &[&str] = &["N", "M", "P", "Q", "R"];
20
21pub fn command() -> Command<'static> {
22    Command::new(crate_name!())
23        .version("0.1")
24        .author("Bonfida")
25        .about("Autogenerate Rust documentation for instructions")
26        .arg(
27            Arg::with_name("instructions_path")
28                .takes_value(true)
29                .default_value("src/processor"),
30        )
31        .arg(
32            Arg::with_name("instructions_enum_path")
33                .takes_value(true)
34                .default_value("src/instruction.rs"),
35        )
36}
37
38pub fn process(matches: &ArgMatches) {
39    let instructions_path = matches.value_of("instructions_path").unwrap();
40    let instructions_enum_path = matches.value_of("instructions_enum_path").unwrap();
41    generate(
42        instructions_path,
43        instructions_enum_path,
44        "src/instruction_auto.rs",
45    );
46}
47
48pub fn generate(instructions_path: &str, instructions_enum_path: &str, output_path: &str) {
49    let now = Instant::now();
50    let path = std::path::Path::new(instructions_path);
51    let directory = std::fs::read_dir(path).unwrap();
52    let accounts_table_columns = [
53        "Index".to_owned(),
54        "Writable".to_owned(),
55        "Signer".to_owned(),
56        "Description".to_owned(),
57    ];
58    let mut instruction_docs = HashMap::new();
59    for d in directory {
60        let file = d.unwrap();
61        let module_name = std::path::Path::new(&file.file_name())
62            .file_stem()
63            .unwrap()
64            .to_str()
65            .unwrap()
66            .to_owned();
67        let (module_doc, accounts_descriptors) = parse_instruction(file.path().to_str().unwrap());
68        let table = generate_table(&accounts_table_columns, &accounts_descriptors);
69        let instruction_documentation =
70            [&module_doc as &[String], &["".to_owned()], &table].concat();
71        instruction_docs.insert(snake_to_pascal(&module_name), instruction_documentation);
72    }
73
74    process_instructions(instructions_enum_path, output_path, &instruction_docs);
75
76    // let mut out_file = File::create(output_path).unwrap();
77    // out_file.write_all(output.as_bytes()).unwrap();
78
79    let elapsed = now.elapsed();
80    println!("✨  Done in {:.2?}", elapsed);
81}
82
83fn process_instructions(
84    instructions_path: &str,
85    out_path: &str,
86    instruction_docs: &HashMap<String, Vec<String>>,
87) {
88    let path = std::path::Path::new(instructions_path);
89    let raw_file = std::fs::read_to_string(path).unwrap();
90    let mut file_ast: syn::File = syn::parse_str(&raw_file).unwrap();
91    let instructions_enum = find_enum(&mut file_ast);
92    let enum_variants = get_enum_variants(instructions_enum);
93
94    for Variant {
95        attrs,
96        ident,
97        fields: _,
98        discriminant: _,
99    } in enum_variants
100    {
101        let instruction_doc = instruction_docs.get(&ident.to_string()).unwrap();
102        *attrs = strip_docs(attrs);
103        for d in instruction_doc {
104            attrs.push(syn::Attribute {
105                pound_token: <Token![#]>::default(),
106                style: syn::AttrStyle::Outer,
107                bracket_token: syn::token::Bracket {
108                    span: Span::call_site(),
109                },
110                path: syn::Path::from(syn::PathSegment::from(syn::Ident::new(
111                    "doc",
112                    Span::call_site(),
113                ))),
114                tokens: quote!(= #d),
115            });
116        }
117    }
118    let mut t = file_ast.to_token_stream().to_string();
119    // std::fs::write(out_path, t).unwrap();
120    t = rustfmt_wrapper::rustfmt(&t).unwrap();
121    let mut processed = "".to_owned();
122    for l in t.lines() {
123        if l.matches("#[doc = ").next().is_some() {
124            let mut o = l.replace("#[doc = \"", "/// ");
125            o.truncate(o.len() - 2);
126            processed.write_str(&o).unwrap();
127        } else {
128            processed.write_str(l).unwrap()
129        }
130        processed.write_char('\n').unwrap();
131    }
132    std::fs::write(out_path, processed).unwrap();
133}
134
135fn parse_instruction(instruction_path: &str) -> (Vec<String>, Vec<Vec<String>>) {
136    let path = std::path::Path::new(instruction_path);
137    let raw_file = std::fs::read_to_string(path).unwrap();
138    let file_ast: syn::File = syn::parse_str(&raw_file).unwrap();
139    let (_, _, file_doc) = get_constraints_and_doc(&file_ast.attrs);
140    let accounts_struct = find_struct("Accounts", &file_ast);
141    let accounts_fields = get_struct_fields(accounts_struct);
142    let mut accounts_descriptors = Vec::with_capacity(accounts_fields.len());
143    let mut current_lit_offset = 0;
144    for (
145        f_idx,
146        Field {
147            attrs,
148            vis: _,
149            ident: _,
150            colon_token: _,
151            ty,
152        },
153    ) in accounts_fields.iter().enumerate()
154    {
155        let (writable, signer, doc) = get_constraints_and_doc(attrs);
156        let mut index = if current_lit_offset == 0 {
157            f_idx.to_string()
158        } else {
159            format!("{} + {}", f_idx, LITS[..current_lit_offset].join(" + "))
160        };
161        if is_slice(ty) {
162            current_lit_offset += 1;
163            index
164                .write_str(&format!(
165                    "..{} + {}",
166                    f_idx,
167                    LITS[..current_lit_offset].join(" + "),
168                ))
169                .unwrap();
170        }
171        accounts_descriptors.push(vec![
172            index,
173            boolean_to_emoji(writable).to_string(),
174            boolean_to_emoji(signer).to_string(),
175            doc.into_iter().next().unwrap_or_else(|| "".to_owned()), // TODO: multi-line comments?
176        ]);
177    }
178    (file_doc, accounts_descriptors)
179}
180
181fn snake_to_pascal(s: &str) -> String {
182    s.from_case(Case::Snake).to_case(Case::Pascal)
183}
184
185fn find_enum(file_ast: &mut syn::File) -> &mut Item {
186    file_ast
187        .items
188        .iter_mut()
189        .find(|a| matches!(a, Item::Enum(_)))
190        .unwrap()
191}
192
193fn get_enum_variants(s: &mut Item) -> &mut Punctuated<Variant, Comma> {
194    if let Item::Enum(ItemEnum {
195        attrs: _,
196        vis: _,
197        enum_token: _,
198        ident: _,
199        generics: _,
200        brace_token: _,
201        variants,
202    }) = s
203    {
204        variants
205    } else {
206        unreachable!()
207    }
208}
209
210fn is_slice(ty: &Type) -> bool {
211    if let Type::Reference(TypeReference {
212        and_token: _,
213        lifetime: _,
214        mutability: _,
215        elem,
216    }) = ty
217    {
218        let ty = *elem.clone();
219        if let Type::Slice(_) = ty {
220            return true;
221        }
222    }
223    false
224}
225
226fn _is_option(ty: &Type) -> bool {
227    if let Type::Path(TypePath { qself: _, path }) = ty {
228        let seg = path.segments.iter().next().unwrap();
229        if seg.ident != "Option" {
230            unimplemented!()
231        }
232        return true;
233    }
234    false
235}