flexi_parse/proc_macro/
mod.rs1use crate::error::Error;
2use crate::group::parse_delimiters;
3use crate::group::Braces;
4use crate::group::Brackets;
5use crate::group::Parentheses;
6use crate::scanner;
7use crate::to_tokens::ToTokens;
8use crate::token::Ident;
9use crate::token::LitChar;
10use crate::token::LitFloat;
11use crate::token::LitInt;
12use crate::token::LitStrDoubleQuote;
13use crate::token::LitStrSingleQuote;
14use crate::token::PunctKind;
15use crate::token::SingleCharPunct;
16use crate::token::Spacing;
17use crate::Entry;
18use crate::ParseBuffer;
19use crate::Result;
20use crate::SourceFile;
21use crate::Span;
22use crate::TokenStream;
23
24use std::sync::Arc;
25
26use proc_macro2::Delimiter;
27use proc_macro2::Group;
28use proc_macro2::Ident as Ident2;
29use proc_macro2::Literal;
30use proc_macro2::Punct as Punct2;
31use proc_macro2::Spacing as Spacing2;
32use proc_macro2::Span as Span2;
33use proc_macro2::TokenStream as TokenStream2;
34use proc_macro2::TokenTree as TokenTree2;
35
36use quote::TokenStreamExt;
37
38#[cfg(feature = "proc-macro")]
39mod proc_macro1;
40
41impl From<Spacing2> for Spacing {
42 fn from(value: Spacing2) -> Self {
43 match value {
44 Spacing2::Alone => Spacing::Alone,
45 Spacing2::Joint => Spacing::Joint,
46 }
47 }
48}
49
50impl From<Spacing> for Spacing2 {
51 fn from(value: Spacing) -> Self {
52 match value {
53 Spacing::Alone => Spacing2::Alone,
54 Spacing::Joint => Spacing2::Joint,
55 }
56 }
57}
58
59fn tree_to_trees(token: &TokenTree2) -> Vec<Entry> {
60 let mut tokens = vec![];
61 let span = Span {
62 start: 0,
63 end: 0,
64 source: Arc::new(SourceFile {
65 name: String::new(),
66 path: None,
67 contents: String::new(),
68 }),
69 };
70 match token {
71 TokenTree2::Group(group) => {
72 let delimiters = match group.delimiter() {
73 Delimiter::Parenthesis => Some(('(', ')')),
74 Delimiter::Bracket => Some(('[', ']')),
75 Delimiter::Brace => Some(('{', '}')),
76 Delimiter::None => None,
77 };
78 if let Some((start, _)) = delimiters {
79 tokens.push(Entry::Punct(SingleCharPunct {
80 kind: start.try_into().unwrap(),
81 spacing: Spacing::Alone,
82 span: span.clone(),
83 }));
84 }
85 for token in group.stream() {
86 tokens.append(&mut tree_to_trees(&token));
87 }
88 if let Some((_, end)) = delimiters {
89 tokens.push(Entry::Punct(SingleCharPunct {
90 kind: end.try_into().unwrap(),
91 spacing: Spacing::Alone,
92 span: span.clone(),
93 }));
94 }
95 }
96 TokenTree2::Ident(ident) => {
97 let string = ident.to_string();
98 tokens.push(Entry::Ident(Ident {
99 string,
100 span: span.clone(),
101 }));
102 }
103 TokenTree2::Literal(literal) => {
104 tokens.extend(
105 scanner::scan(
106 Arc::new(SourceFile {
107 name: String::new(),
108 path: None,
109 contents: literal.to_string(),
110 }),
111 0,
112 None,
113 )
114 .0
115 .tokens
116 .into_iter()
117 .map(|mut token| {
118 token.set_span(span.clone());
119 token
120 }),
121 );
122 }
123 TokenTree2::Punct(punct) => {
124 let kind = punct.as_char().try_into().unwrap();
125 let spacing = punct.spacing().into();
126 tokens.push(Entry::Punct(SingleCharPunct {
127 kind,
128 spacing,
129 span: span.clone(),
130 }));
131 }
132 }
133 tokens
134}
135
136impl From<TokenStream2> for TokenStream {
137 fn from(value: TokenStream2) -> Self {
138 let mut tokens = vec![];
139 let contents = value.to_string();
140 let source = Arc::new(SourceFile::new("<TokenStream>".to_string(), contents));
141 for token in value {
142 tokens.append(&mut tree_to_trees(&token));
143 }
144 let span = Span {
145 start: 0,
146 end: source.contents.len(),
147 source: Arc::clone(&source),
148 };
149 for token in &mut tokens {
150 token.set_span(span.clone());
151 }
152 TokenStream::new(tokens, Some(source))
153 }
154}
155
156fn token_stream_to_token_stream_2(tokens: &TokenStream) -> Result<TokenStream2> {
157 let mut token_trees = vec![];
158 let buf = ParseBuffer::from(tokens);
159 while let Ok(entry) = buf.current() {
160 match entry {
161 Entry::Error(_) => {
162 panic!("cannot convert a failed scan to a `proc_macro2::TokenStream`")
163 }
164 Entry::Ident(ident) => {
165 if buf.peek(LitFloat) {
166 let float: LitFloat = buf.parse()?;
167 token_trees.push(TokenTree2::Literal(Literal::f64_unsuffixed(float.value())));
168 } else if buf.peek(LitInt) {
169 let int: LitInt = buf.parse()?;
170 token_trees.push(TokenTree2::Literal(Literal::u64_unsuffixed(int.value())));
171 } else {
172 token_trees.push(TokenTree2::Ident(Ident2::new(
173 &ident.string,
174 Span2::call_site(),
175 )));
176 }
177 }
178 Entry::Punct(punct) => match punct.kind {
179 PunctKind::LeftParen => {
180 let (_, _, tokens) = parse_delimiters::<Parentheses>(&buf)?;
181 token_trees.push(TokenTree2::Group(Group::new(
182 Delimiter::Parenthesis,
183 token_stream_to_token_stream_2(&tokens)?,
184 )));
185 }
186 PunctKind::LeftBracket => {
187 let (_, _, tokens) = parse_delimiters::<Brackets>(&buf)?;
188 token_trees.push(TokenTree2::Group(Group::new(
189 Delimiter::Bracket,
190 token_stream_to_token_stream_2(&tokens)?,
191 )));
192 }
193 PunctKind::LeftBrace => {
194 let (_, _, tokens) = parse_delimiters::<Braces>(&buf)?;
195 token_trees.push(TokenTree2::Group(Group::new(
196 Delimiter::Brace,
197 token_stream_to_token_stream_2(&tokens)?,
198 )));
199 }
200 PunctKind::DoubleQuote => {
201 let str: LitStrDoubleQuote = buf.parse()?;
202 token_trees.push(TokenTree2::Literal(Literal::string(str.string())));
203 }
204 PunctKind::SingleQuote => {
205 let ch: LitChar = buf.parse()?;
206 token_trees.push(TokenTree2::Literal(Literal::character(ch.ch())));
207 }
208 kind => token_trees.push(TokenTree2::Punct(Punct2::new(
209 kind.into(),
210 punct.spacing.into(),
211 ))),
212 },
213 Entry::WhiteSpace(_) => {}
214 #[cfg(feature = "scan-strings")]
215 Entry::LitStrDoubleQuote(str) => {
216 token_trees.push(TokenTree2::Literal(Literal::string(str.string())));
217 }
218 #[cfg(feature = "scan-strings")]
219 Entry::LitStrSingleQuote(str) => {
220 assert!(
221 str.string().len() == 1,
222 "invalid char literal '{}'",
223 str.string()
224 );
225 token_trees.push(TokenTree2::Literal(Literal::character(
226 str.string().chars().next().unwrap(),
227 )));
228 }
229 }
230 buf.next_raw();
231 }
232 Ok(token_trees.into_iter().collect())
233}
234
235impl<'a> TryFrom<&'a TokenStream> for TokenStream2 {
236 type Error = Error;
237
238 fn try_from(value: &'a TokenStream) -> std::prelude::v1::Result<Self, Self::Error> {
239 token_stream_to_token_stream_2(value)
240 }
241}
242
243impl TryFrom<TokenStream> for TokenStream2 {
244 type Error = Error;
245
246 fn try_from(value: TokenStream) -> Result<Self> {
247 token_stream_to_token_stream_2(&value)
248 }
249}
250
251impl ToTokens for TokenStream2 {
252 fn to_tokens(&self, tokens: &mut TokenStream) {
253 tokens.append(&mut TokenStream::from(self.clone()));
254 }
255
256 fn to_token_stream(&self) -> TokenStream {
257 TokenStream::from(self.clone())
258 }
259
260 fn into_token_stream(self) -> TokenStream {
261 TokenStream::from(self)
262 }
263}
264
265#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
268pub struct ToTokensWrapper<T: ?Sized>(pub T);
269
270impl<T: ToTokens> quote::ToTokens for ToTokensWrapper<T> {
271 fn to_tokens(&self, tokens: &mut TokenStream2) {
272 let new_tokens = self.0.to_token_stream();
273 tokens.append_all(TokenStream2::try_from(new_tokens).unwrap());
274 }
275
276 fn to_token_stream(&self) -> TokenStream2 {
277 self.0.to_token_stream().try_into().unwrap()
278 }
279
280 fn into_token_stream(self) -> TokenStream2 {
281 self.0.into_token_stream().try_into().unwrap()
282 }
283}
284
285impl quote::ToTokens for TokenStream {
286 fn to_tokens(&self, tokens: &mut TokenStream2) {
287 tokens.append_all(TokenStream2::try_from(self).unwrap());
288 }
289
290 fn to_token_stream(&self) -> TokenStream2 {
291 self.try_into().unwrap()
292 }
293
294 fn into_token_stream(self) -> TokenStream2 {
295 self.try_into().unwrap()
296 }
297}
298
299macro_rules! impl_literals {
300 [$($ty:ty),+ $(,)?] => {
301 $(
302 impl quote::ToTokens for $ty {
303 fn to_tokens(&self, tokens: &mut TokenStream2) {
304 tokens.append_all(
305 TokenStream2::try_from(ToTokens::to_token_stream(self)).unwrap()
306 );
307 }
308
309 fn to_token_stream(&self) -> TokenStream2 {
310 TokenStream2::try_from(ToTokens::to_token_stream(self)).unwrap()
311 }
312
313 fn into_token_stream(self) -> TokenStream2 {
314 TokenStream2::try_from(ToTokens::into_token_stream(self)).unwrap()
315 }
316 }
317 )+
318 };
319}
320
321impl_literals![
322 LitStrDoubleQuote,
323 LitStrSingleQuote,
324 LitChar,
325 LitInt,
326 LitFloat,
327 Ident,
328];