1
  2
  3
  4
  5
  6
  7
  8
  9
 10
 11
 12
 13
 14
 15
 16
 17
 18
 19
 20
 21
 22
 23
 24
 25
 26
 27
 28
 29
 30
 31
 32
 33
 34
 35
 36
 37
 38
 39
 40
 41
 42
 43
 44
 45
 46
 47
 48
 49
 50
 51
 52
 53
 54
 55
 56
 57
 58
 59
 60
 61
 62
 63
 64
 65
 66
 67
 68
 69
 70
 71
 72
 73
 74
 75
 76
 77
 78
 79
 80
 81
 82
 83
 84
 85
 86
 87
 88
 89
 90
 91
 92
 93
 94
 95
 96
 97
 98
 99
100
101
102
103
104
105
extern crate proc_macro;

use ::proc_macro::{*,
    TokenTree as TT,
};

fn map (input: TokenStream)
  -> TokenStream
{
    let mut tokens = input.into_iter().peekable();
    let mut ret = TokenStream::new();
    while let Some(tt) = tokens.next() {
        ret.extend(Some(match tt {
            | TT::Punct(ref p)
                if p.as_char() == '@'
            => match tokens.peek() {
                | Some(&TT::Group(ref group))
                    if group.delimiter() == Delimiter::None
                => {
                    ret.extend(map(group.stream()));
                    drop(tokens.next());
                    continue;
                },
                | Some(TT::Punct(ref p))
                    if p.as_char() == '@'
                => {
                    tokens.next().unwrap()
                }
                | _ => continue,
            },
            | TT::Group(group) => {
                Group::new(group.delimiter(),  map(group.stream()))
                    .into()
            },
            | _ => tt,
        }));
    }
    ret
}

/** Not part of the public API **/ #[doc(hidden)]
#[proc_macro] pub
fn __item__ (input: TokenStream)
  -> TokenStream
{
    map(input)
}

/** Not part of the public API **/ #[doc(hidden)]
#[proc_macro_derive(__expr_hack__)] pub
fn __expr_hack__ (input: TokenStream)
  -> TokenStream
{
    // enum
    // EnumName
    // {
    //     VariantName
    //     =
    //     (
    //         stringify
    //         !
    //         (
    //             <input>
    //         )
    // , 0).1,}

    let mut tokens = input.into_iter();
    // `enum EnumName`
    let _ = tokens.by_ref().take(2).for_each(drop);
    // `{ <tokens> }`
    let mut tokens = if let Some(TT::Group(it)) = tokens.next() { it } else {
        panic!()
    }.stream().into_iter();
    // `VariantName =`
    let _ = tokens.by_ref().take(2).for_each(drop);
    // `( <tokens> )`
    let mut tokens = if let Some(TT::Group(it)) = tokens.next() { it } else {
        panic!()
    }.stream().into_iter();
    // `stringify !`
    let _ = tokens.by_ref().take(2).for_each(drop);
    // `( <input> )`
    let input = if let Some(TT::Group(it)) = tokens.next() { it } else {
        panic!()
    }.stream();
    let ret = map(input);
    let span = Span::call_site();
    vec![
        TT::Ident(Ident::new("macro_rules", span)),
        TT::Punct(Punct::new('!', Spacing::Alone)),
        TT::Ident(Ident::new("__defile__Hack__", span)),
        TT::Group(Group::new(
            Delimiter::Brace,
            vec![
                TT::Group(Group::new(Delimiter::Parenthesis, TokenStream::new())),
                TT::Punct(Punct::new('=', Spacing::Joint)),
                TT::Punct(Punct::new('>', Spacing::Alone)),
                TT::Group(Group::new(
                    Delimiter::Parenthesis,
                    ret,
                )),
            ].into_iter().collect(),
        )),
    ].into_iter().collect()
}