1
  2
  3
  4
  5
  6
  7
  8
  9
 10
 11
 12
 13
 14
 15
 16
 17
 18
 19
 20
 21
 22
 23
 24
 25
 26
 27
 28
 29
 30
 31
 32
 33
 34
 35
 36
 37
 38
 39
 40
 41
 42
 43
 44
 45
 46
 47
 48
 49
 50
 51
 52
 53
 54
 55
 56
 57
 58
 59
 60
 61
 62
 63
 64
 65
 66
 67
 68
 69
 70
 71
 72
 73
 74
 75
 76
 77
 78
 79
 80
 81
 82
 83
 84
 85
 86
 87
 88
 89
 90
 91
 92
 93
 94
 95
 96
 97
 98
 99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
#![allow(clippy::or_fun_call)]
#![allow(clippy::useless_conversion)]

extern crate proc_macro;

#[cfg(not(feature = "proc_macro2_"))]
use proc_macro as used_proc_macro;

#[cfg(feature = "proc_macro2_")]
use proc_macro2 as used_proc_macro;

use std::iter;

#[allow(unused_imports)]
use used_proc_macro::{
    Delimiter, Group, Ident, Literal, Punct, Spacing, Span, TokenStream, TokenTree,
};

#[cfg(feature = "syn_")]
mod use_syn;

#[cfg(not(feature = "syn_"))]
mod non_syn_parsing;

#[cfg(not(feature = "const_generics"))]
mod nested_tuple_compute;

mod utils;

#[cfg(all(feature = "min_const_generics", not(feature = "const_generics")))]
mod min_const_generics;

#[cfg(all(feature = "min_const_generics", not(feature = "const_generics")))]
use min_const_generics::output_tstr_param;

#[cfg(not(feature = "min_const_generics"))]
mod no_const_generics;

#[cfg(not(feature = "min_const_generics"))]
use no_const_generics::output_tstr_param;

#[doc(hidden)]
#[proc_macro]
pub fn __ts_impl(input_tokens: proc_macro::TokenStream) -> proc_macro::TokenStream {
    use crate::utils::{paren, punct_token};

    let input_tokens = TokenStream::from(input_tokens);

    #[cfg(feature = "syn_")]
    let parsed = syn::parse2::<Inputs>(input_tokens);

    #[cfg(not(feature = "syn_"))]
    let parsed = non_syn_parsing::parse_inputs(input_tokens);

    match parsed {
        Ok(Inputs {
            crate_path,
            strings,
        }) => {
            let mut out = TokenStream::new();
            if strings.len() == 1 {
                output_tstr(&crate_path, &strings[0], &mut out);
            } else {
                let tt = paren(Span::call_site(), |out| {
                    for tstr in &strings {
                        output_tstr(&crate_path, tstr, out);
                        out.extend(punct_token(',', tstr.span));
                    }
                });
                out.extend(iter::once(tt));
            }
            out
        }
        Err(e) => e.to_compile_error(),
    }
    .into()
}

fn output_tstr(crate_path: &TokenStream, tstr: &TStr, out: &mut TokenStream) {
    use crate::utils::{colon2_token, ident_token, punct_token};

    let span = tstr.span;
    out.extend(crate_path.clone());
    out.extend(colon2_token(span));
    out.extend(ident_token("TStr", span));
    out.extend(punct_token('<', span));

    #[cfg(feature = "const_generics")]
    {
        out.extend(crate_path.clone());
        out.extend(colon2_token(span));
        out.extend(ident_token("___", span));
        out.extend(punct_token('<', span));
    }

    output_tstr_param(crate_path, tstr, out);

    #[cfg(feature = "const_generics")]
    {
        out.extend(punct_token('>', span));
    }

    out.extend(punct_token('>', span));
}

#[cfg(feature = "const_generics")]
fn output_tstr_param(_crate_path: &TokenStream, tstr: &TStr, out: &mut TokenStream) {
    let string = tstr.string.as_str();
    let span = tstr.span;

    let mut lit = Literal::string(&string);
    lit.set_span(span);
    out.extend(iter::once(TokenTree::from(lit)));
}

struct Inputs {
    crate_path: TokenStream,
    strings: Vec<TStr>,
}

struct TStr {
    string: String,
    span: Span,
}