rio_proc_macros/
lib.rs

1// https://github.com/alacritty/vte/blob/master/vte_generate_state_changes/Cargo.toml
2// By Christian Duerr <contact@christianduerr.com>
3
4#![deny(clippy::all, clippy::if_not_else, clippy::enum_glob_use)]
5
6extern crate proc_macro;
7
8use std::iter::Peekable;
9
10use proc_macro2::TokenTree::{Group, Literal, Punct};
11use proc_macro2::{token_stream, TokenStream, TokenTree};
12use quote::quote;
13
14/// Create a `const fn` which will return an array with all state changes.
15#[proc_macro]
16pub fn generate_state_changes(item: proc_macro::TokenStream) -> proc_macro::TokenStream {
17    // Convert from proc_macro -> proc_macro2
18    let item: TokenStream = item.into();
19    let mut iter = item.into_iter().peekable();
20
21    // Determine output function name
22    let fn_name = iter.next().unwrap();
23
24    // Separator between name and body with state changes
25    expect_punct(&mut iter, ',');
26
27    // Create token stream to assign each state change to the array
28    let assignments_stream = states_stream(&mut iter);
29
30    quote!(
31        const fn #fn_name() -> [[u8; 256]; 13] {
32            let mut state_changes = [[0; 256]; 13];
33
34            #assignments_stream
35
36            state_changes
37        }
38    )
39    .into()
40}
41
42/// Generate the array assignment statements for all origin states.
43fn states_stream(iter: &mut impl Iterator<Item = TokenTree>) -> TokenStream {
44    let mut states_stream = next_group(iter).into_iter().peekable();
45
46    // Loop over all origin state entries
47    let mut tokens = quote!();
48    while states_stream.peek().is_some() {
49        // Add all mappings for this state
50        tokens.extend(state_entry_stream(&mut states_stream));
51
52        // Allow trailing comma
53        optional_punct(&mut states_stream, ',');
54    }
55    tokens
56}
57
58/// Generate the array assignment statements for one origin state.
59fn state_entry_stream(iter: &mut Peekable<token_stream::IntoIter>) -> TokenStream {
60    // Origin state name
61    let state = iter.next().unwrap();
62
63    // Token stream with all the byte->target mappings
64    let mut changes_stream = next_group(iter).into_iter().peekable();
65
66    let mut tokens = quote!();
67    while changes_stream.peek().is_some() {
68        // Add next mapping for this state
69        tokens.extend(change_stream(&mut changes_stream, &state));
70
71        // Allow trailing comma
72        optional_punct(&mut changes_stream, ',');
73    }
74    tokens
75}
76
77/// Generate the array assignment statement for a single byte->target mapping
78/// for one state.
79fn change_stream(
80    iter: &mut Peekable<token_stream::IntoIter>,
81    state: &TokenTree,
82) -> TokenStream {
83    // Start of input byte range
84    let start = next_usize(iter);
85
86    // End of input byte range
87    let end = if optional_punct(iter, '.') {
88        // Read inclusive end of range
89        expect_punct(iter, '.');
90        expect_punct(iter, '=');
91        next_usize(iter)
92    } else {
93        // Without range, end is equal to start
94        start
95    };
96
97    // Separator between byte input range and output state
98    expect_punct(iter, '=');
99    expect_punct(iter, '>');
100
101    // Token stream with target state and action
102    let mut target_change_stream = next_group(iter).into_iter().peekable();
103
104    let mut tokens = quote!();
105    while target_change_stream.peek().is_some() {
106        // Target state/action for all bytes in the range
107        let (target_state, target_action) = target_change(&mut target_change_stream);
108
109        // Create a new entry for every byte in the range
110        for byte in start..=end {
111            tokens.extend(quote!(
112                state_changes[State::#state as usize][#byte] =
113                    pack(State::#target_state, Action::#target_action);
114            ));
115        }
116    }
117    tokens
118}
119
120/// Get next target state and action.
121fn target_change(iter: &mut Peekable<token_stream::IntoIter>) -> (TokenTree, TokenTree) {
122    let target_state = iter.next().unwrap();
123
124    // Separator between state and action
125    expect_punct(iter, ',');
126
127    let target_action = iter.next().unwrap();
128
129    (target_state, target_action)
130}
131
132/// Check if next token matches specific punctuation.
133fn optional_punct(iter: &mut Peekable<token_stream::IntoIter>, c: char) -> bool {
134    match iter.peek() {
135        Some(Punct(punct)) if punct.as_char() == c => iter.next().is_some(),
136        _ => false,
137    }
138}
139
140/// Ensure next token matches specific punctuation.
141///
142/// # Panics
143///
144/// Panics if the punctuation does not match.
145fn expect_punct(iter: &mut impl Iterator<Item = TokenTree>, c: char) {
146    match iter.next() {
147        Some(Punct(ref punct)) if punct.as_char() == c => (),
148        token => panic!("Expected punctuation '{c}', but got {token:?}"),
149    }
150}
151
152/// Get next token as [`usize`].
153///
154/// # Panics
155///
156/// Panics if the next token is not a [`usize`] in hex or decimal literal
157/// format.
158fn next_usize(iter: &mut impl Iterator<Item = TokenTree>) -> usize {
159    match iter.next() {
160        Some(Literal(literal)) => {
161            let literal = literal.to_string();
162            if let Some(prefix) = literal.strip_prefix("0x") {
163                usize::from_str_radix(prefix, 16).unwrap()
164            } else {
165                literal.parse::<usize>().unwrap()
166            }
167        }
168        token => panic!("Expected literal, but got {token:?}"),
169    }
170}
171
172/// Get next token as [`Group`].
173///
174/// # Panics
175///
176/// Panics if the next token is not a [`Group`].
177fn next_group(iter: &mut impl Iterator<Item = TokenTree>) -> TokenStream {
178    match iter.next() {
179        Some(Group(group)) => group.stream(),
180        token => panic!("Expected group, but got {token:?}"),
181    }
182}