stringify_inner/
lib.rs

1#![doc = include_str!("../README.md")]
2
3use core::{convert::identity, str::FromStr as _};
4
5use proc_macro::{
6    Delimiter, Group, Ident, Literal, Punct, Spacing::*, Span, TokenStream,
7    TokenTree,
8};
9
10#[must_use]
11fn stream<I>(iter: I) -> TokenStream
12where I: IntoIterator,
13      TokenStream: FromIterator<I::Item>,
14{
15    TokenStream::from_iter(iter)
16}
17
18fn err<T>(msg: &str, span: Span) -> Result<T, TokenStream> {
19    let s = |mut t: TokenTree| {
20        t.set_span(span);
21        t
22    };
23    Err(stream([
24        s(Punct::new(':', Joint).into()),
25        s(Punct::new(':', Joint).into()),
26        s(Ident::new("core", span).into()),
27        s(Punct::new(':', Joint).into()),
28        s(Punct::new(':', Joint).into()),
29        s(Ident::new("compile_error", span).into()),
30        s(Punct::new('!', Joint).into()),
31        s(Group::new(Delimiter::Brace, stream([
32            s(Literal::string(msg).into()),
33        ])).into()),
34    ]))
35}
36
37fn extract_str(s: &str, span: Span) -> Result<String, TokenStream> {
38    if !s.ends_with(['"', '#']) {
39        return err("invalid string suffix", span);
40    }
41    if s.starts_with('"') {
42        return Ok(s[1..s.len()-1].to_owned());
43    }
44    if !s.starts_with('r') {
45        return err("invalid string literal", span);
46    }
47    let mut s = &s[1..];
48    while s.starts_with('#') {
49        s = &s[1..s.len()-1];
50    }
51    let escaped = format!("{:?}", &s[1..s.len()-1]);
52    debug_assert!(escaped.starts_with('"') && escaped.ends_with('"'), "{escaped}");
53    Ok(escaped[1..escaped.len()-1].to_string())
54}
55
56fn merge_str(a: &Literal, b: &Literal) -> Result<Literal, TokenStream> {
57    let (sa, sb) = (a.span(), b.span());
58    let (a, b) = (a.to_string(), b.to_string());
59    let (a, b) = (extract_str(&a, sa)?, extract_str(&b, sb)?);
60    let mut lit = Literal::from_str(&format!("\"{a}{b}\"")).unwrap();
61    lit.set_span(sa);
62    Ok(lit)
63}
64
65fn do_operation(
66    tok: TokenTree,
67    iter: &mut impl Iterator<Item = TokenTree>,
68) -> Result<TokenStream, TokenStream> {
69    let Some(op) = iter.next() else {
70        return Ok(stream([tok]));
71    };
72    let degrade = || stream([tok.clone(), op.clone()]);
73    Ok(match op {
74        TokenTree::Punct(ref punct)
75            if punct.as_char() == '#' =>
76        {
77            stream([op])
78        },
79        TokenTree::Punct(_) => {
80            degrade()
81        },
82        TokenTree::Group(_) => {
83            stream([
84                stream([tok]),
85                expr_impl(stream([op]))?,
86            ])
87        },
88        TokenTree::Literal(_) => degrade(),
89        TokenTree::Ident(ref ident) => {
90            let Some(param) = iter.next() else {
91                return Ok(degrade());
92            };
93            let TokenTree::Group(param) = param else {
94                return Ok(degrade());
95            };
96            let gspan = param.span();
97
98            match &*ident.to_string() {
99                "stringify" => {
100                    let out_span = param.stream().into_iter().next()
101                        .map_or(param.span(), |t| t.span());
102
103                    let s = param.stream().to_string();
104                    let mut tt = Literal::string(&s);
105                    tt.set_span(out_span);
106                    stream([TokenTree::from(tt)])
107                },
108                "concat" => {
109                    let param = expr_impl(param.stream())?;
110                    let mut s = Literal::string("");
111                    let mut span = None;
112                    let mut iter = param.into_iter().peekable();
113
114                    while let Some(tt) = iter.next() {
115                        iter.next_if(|p| matches!(p,
116                                TokenTree::Punct(p) if p.as_char() == ','));
117                        let TokenTree::Literal(lit) = tt else {
118                            return err("is not a literal", tt.span());
119                        };
120                        span.get_or_insert(lit.span());
121                        s = merge_str(&s, &lit)?;
122                    }
123
124                    s.set_span(span.unwrap_or(gspan));
125                    stream([TokenTree::from(s)])
126                },
127                _ => return Ok(degrade()),
128            }
129        },
130    })
131}
132
133fn expr_impl(stream: TokenStream) -> Result<TokenStream, TokenStream> {
134    let mut result = TokenStream::new();
135    let mut iter = stream.into_iter();
136
137    while let Some(tok) = iter.next() {
138        match tok {
139            proc_macro::TokenTree::Group(group) => {
140                let mut new_group = Group::new(
141                    group.delimiter(),
142                    expr_impl(group.stream())?,
143                );
144                new_group.set_span(group.span());
145                result.extend([TokenTree::from(new_group)]);
146            },
147            proc_macro::TokenTree::Punct(ref punct)
148                if punct.as_char() == '#' =>
149            {
150                result.extend(do_operation(tok, &mut iter)?);
151            },
152            _ => result.extend([tok]),
153        }
154    }
155
156    Ok(result)
157}
158
159/// Run string expressions on expressions
160///
161/// - `#stringify(...)`: like `stringify!(...)`
162/// - `#concat(...)`: like `concat!(...)`
163/// - `##`: like `#`
164/// - `#[...]`: like `#[...]`
165///
166/// # Examples
167/// ```
168/// use stringify_inner::sexpr;
169///
170/// assert_eq!(sexpr!(#stringify(foo)), "foo");
171/// assert_eq!(sexpr!(&#stringify(foo)[1..]), "oo");
172/// assert_eq!(sexpr!(#concat(#stringify(foo), "bar")), "foobar");
173/// ```
174#[proc_macro]
175pub fn sexpr(stream: TokenStream) -> TokenStream {
176    expr_impl(stream).map_or_else(identity, identity)
177}
178
179/// Run string expressions on attribute
180///
181/// - `#stringify(...)`: like `stringify!(...)`
182/// - `#concat(...)`: like `concat!(...)`
183/// - `##`: like `#`
184/// - `#[...]`: like `#[...]`
185///
186/// # Examples
187/// ```
188/// use stringify_inner::sexpr_attr;
189///
190/// #[sexpr_attr(doc(alias = #stringify(bar)))]
191/// fn foo() {}
192/// ```
193#[proc_macro_attribute]
194pub fn sexpr_attr(attr: TokenStream, item: TokenStream) -> TokenStream {
195    stream([
196        expr_impl(stream([
197            TokenTree::from(Punct::new('#', Joint)),
198            Group::new(Delimiter::Bracket, attr).into(),
199        ])).map_or_else(identity, identity),
200        item,
201    ])
202}