doc_since/
lib.rs

1use proc_macro::{Group, Ident, Literal, Punct, Span, TokenStream, TokenTree};
2use quote::{quote, ToTokens};
3use syn::parse_macro_input;
4
5#[proc_macro_attribute]
6pub fn doc_since(args: TokenStream, input: TokenStream) -> TokenStream {
7    let mut out = TokenStream::new();
8
9    let args = parse_macro_input!(args as syn::AttributeArgs);
10    let since_version = match args.first() {
11        Some(syn::NestedMeta::Lit(syn::Lit::Str(lit))) => lit.value(),
12        _ => todo!("macro input not legal"),
13    };
14
15    let mut ast = match syn::parse::<syn::ItemFn>(input.clone()) {
16        Ok(ast) => ast,
17        // on parse error, make IDEs happy; see fn docs
18        Err(err) => return input_and_compile_error(input, err),
19    };
20
21    let mut insert_doc_at = None;
22
23    for (i, attr) in ast.attrs.iter().enumerate() {
24        if attr_is_empty_doc(attr) {
25            insert_doc_at = Some(i);
26        }
27    }
28
29    if let Some(i) = insert_doc_at {
30        out.extend(
31            ast.attrs
32                .drain(..i)
33                .map(ToTokens::into_token_stream)
34                .map(TokenStream::from),
35        );
36
37        out.extend(doc_since_token_stream(&since_version))
38    }
39
40    out.extend([TokenStream::from(quote! { #ast })]);
41
42    out
43}
44
45fn attr_is_empty_doc(attr: &syn::Attribute) -> bool {
46    let path_is_doc = {
47        if let Some(seg) = attr.path.segments.first() {
48            seg.ident == "doc"
49        } else {
50            false
51        }
52    };
53
54    if path_is_doc {
55        let tt_iter = attr.tokens.clone().into_iter();
56        let tt = tt_iter.last().unwrap();
57        if let proc_macro2::TokenTree::Literal(lit) = tt {
58            // literal is empty doc comment string
59            if lit.to_string() == "\"\"" {
60                return true;
61            }
62        }
63    }
64
65    false
66}
67
68fn doc_since_token_stream(since: &str) -> impl IntoIterator<Item = TokenTree> {
69    [
70        TokenTree::Punct(Punct::new('#', proc_macro::Spacing::Alone)),
71        TokenTree::Group(Group::new(
72            proc_macro::Delimiter::Bracket,
73            TokenStream::from_iter([
74                TokenTree::Ident(Ident::new("doc", Span::call_site())),
75                TokenTree::Punct(Punct::new('=', proc_macro::Spacing::Alone)),
76                TokenTree::Literal(Literal::string("")),
77            ]),
78        )),
79        TokenTree::Punct(Punct::new('#', proc_macro::Spacing::Alone)),
80        TokenTree::Group(Group::new(
81            proc_macro::Delimiter::Bracket,
82            TokenStream::from_iter([
83                TokenTree::Ident(Ident::new("doc", Span::call_site())),
84                TokenTree::Punct(Punct::new('=', proc_macro::Spacing::Alone)),
85                TokenTree::Literal(Literal::string(&format!(
86                    " Available since crate version: **{}**",
87                    since
88                ))),
89            ]),
90        )),
91    ]
92}
93
94/// Converts the error to a token stream and appends it to the original input.
95///
96/// Returning the original input in addition to the error is good for IDEs which can gracefully
97/// recover and show more precise errors within the macro body.
98///
99/// See <https://github.com/rust-analyzer/rust-analyzer/issues/10468> for more info.
100fn input_and_compile_error(mut item: TokenStream, err: syn::Error) -> TokenStream {
101    let compile_err = TokenStream::from(err.to_compile_error());
102    item.extend(compile_err);
103    item
104}