1#![doc(html_root_url = "https://docs.rs/paste/1.0.14")]
141#![allow(
142 clippy::derive_partial_eq_without_eq,
143 clippy::doc_markdown,
144 clippy::match_same_arms,
145 clippy::module_name_repetitions,
146 clippy::needless_doctest_main,
147 clippy::too_many_lines
148)]
149
150extern crate proc_macro;
151
152mod attr;
153mod error;
154mod segment;
155
156use crate::attr::expand_attr;
157use crate::error::{Error, Result};
158use crate::segment::Segment;
159use proc_macro::{Delimiter, Group, Ident, Punct, Spacing, Span, TokenStream, TokenTree};
160use std::char;
161use std::collections::hash_map::DefaultHasher;
162use std::hash::{Hash, Hasher};
163use std::iter;
164use std::panic;
165
166use once_cell::sync::Lazy;
167
168#[proc_macro]
169pub fn paste(input: TokenStream) -> TokenStream {
170 let mut contains_paste = false;
171 let flatten_single_interpolation = true;
172 match expand(
173 input.clone(),
174 &mut contains_paste,
175 flatten_single_interpolation,
176 ) {
177 Ok(expanded) => {
178 if contains_paste {
179 expanded
180 } else {
181 input
182 }
183 }
184 Err(err) => err.to_compile_error(),
185 }
186}
187
188#[doc(hidden)]
189#[proc_macro]
190pub fn item(input: TokenStream) -> TokenStream {
191 paste(input)
192}
193
194#[doc(hidden)]
195#[proc_macro]
196pub fn expr(input: TokenStream) -> TokenStream {
197 paste(input)
198}
199
200fn expand(
201 input: TokenStream,
202 contains_paste: &mut bool,
203 flatten_single_interpolation: bool,
204) -> Result<TokenStream> {
205 let mut expanded = TokenStream::new();
206 let mut lookbehind = Lookbehind::Other;
207 let mut prev_none_group = None::<Group>;
208 let mut tokens = input.into_iter().peekable();
209 loop {
210 let token = tokens.next();
211 if let Some(group) = prev_none_group.take() {
212 if match (&token, tokens.peek()) {
213 (Some(TokenTree::Punct(fst)), Some(TokenTree::Punct(snd))) => {
214 fst.as_char() == ':' && snd.as_char() == ':' && fst.spacing() == Spacing::Joint
215 }
216 _ => false,
217 } {
218 expanded.extend(group.stream());
219 *contains_paste = true;
220 } else {
221 expanded.extend(iter::once(TokenTree::Group(group)));
222 }
223 }
224 match token {
225 Some(TokenTree::Group(group)) => {
226 let delimiter = group.delimiter();
227 let content = group.stream();
228 let span = group.span();
229 if delimiter == Delimiter::Bracket && is_paste_operation(&content) {
230 let segments = parse_bracket_as_segments(content, span)?;
231 let pasted = segment::paste(&segments)?;
232 let tokens = pasted_to_tokens(pasted, span)?;
233 expanded.extend(tokens);
234 *contains_paste = true;
235 } else if flatten_single_interpolation
236 && delimiter == Delimiter::None
237 && is_single_interpolation_group(&content)
238 {
239 expanded.extend(content);
240 *contains_paste = true;
241 } else {
242 let mut group_contains_paste = false;
243 let is_attribute = delimiter == Delimiter::Bracket
244 && (lookbehind == Lookbehind::Pound || lookbehind == Lookbehind::PoundBang);
245 let mut nested = expand(
246 content,
247 &mut group_contains_paste,
248 flatten_single_interpolation && !is_attribute,
249 )?;
250 if is_attribute {
251 nested = expand_attr(nested, span, &mut group_contains_paste)?;
252 }
253 let group = if group_contains_paste {
254 let mut group = Group::new(delimiter, nested);
255 group.set_span(span);
256 *contains_paste = true;
257 group
258 } else {
259 group.clone()
260 };
261 if delimiter != Delimiter::None {
262 expanded.extend(iter::once(TokenTree::Group(group)));
263 } else if lookbehind == Lookbehind::DoubleColon {
264 expanded.extend(group.stream());
265 *contains_paste = true;
266 } else {
267 prev_none_group = Some(group);
268 }
269 }
270 lookbehind = Lookbehind::Other;
271 }
272 Some(TokenTree::Punct(punct)) => {
273 lookbehind = match punct.as_char() {
274 ':' if lookbehind == Lookbehind::JointColon => Lookbehind::DoubleColon,
275 ':' if punct.spacing() == Spacing::Joint => Lookbehind::JointColon,
276 '#' => Lookbehind::Pound,
277 '!' if lookbehind == Lookbehind::Pound => Lookbehind::PoundBang,
278 _ => Lookbehind::Other,
279 };
280 expanded.extend(iter::once(TokenTree::Punct(punct)));
281 }
282 Some(other) => {
283 lookbehind = Lookbehind::Other;
284 expanded.extend(iter::once(other));
285 }
286 None => {
287 return Ok(expanded);
288 }
289 }
290 }
291}
292
293#[derive(PartialEq)]
294enum Lookbehind {
295 JointColon,
296 DoubleColon,
297 Pound,
298 PoundBang,
299 Other,
300}
301
302fn is_single_interpolation_group(input: &TokenStream) -> bool {
304 #[derive(PartialEq)]
305 enum State {
306 Init,
307 Ident,
308 Literal,
309 Apostrophe,
310 Lifetime,
311 Colon1,
312 Colon2,
313 }
314
315 let mut state = State::Init;
316 for tt in input.clone() {
317 state = match (state, &tt) {
318 (State::Init, TokenTree::Ident(_)) => State::Ident,
319 (State::Init, TokenTree::Literal(_)) => State::Literal,
320 (State::Init, TokenTree::Punct(punct)) if punct.as_char() == '\'' => State::Apostrophe,
321 (State::Apostrophe, TokenTree::Ident(_)) => State::Lifetime,
322 (State::Ident, TokenTree::Punct(punct))
323 if punct.as_char() == ':' && punct.spacing() == Spacing::Joint =>
324 {
325 State::Colon1
326 }
327 (State::Colon1, TokenTree::Punct(punct))
328 if punct.as_char() == ':' && punct.spacing() == Spacing::Alone =>
329 {
330 State::Colon2
331 }
332 (State::Colon2, TokenTree::Ident(_)) => State::Ident,
333 _ => return false,
334 };
335 }
336
337 state == State::Ident || state == State::Literal || state == State::Lifetime
338}
339
340fn is_paste_operation(input: &TokenStream) -> bool {
341 let mut tokens = input.clone().into_iter();
342
343 match &tokens.next() {
344 Some(TokenTree::Punct(punct)) if punct.as_char() == '<' => {}
345 _ => return false,
346 }
347
348 let mut has_token = false;
349 loop {
350 match &tokens.next() {
351 Some(TokenTree::Punct(punct)) if punct.as_char() == '>' => {
352 return has_token && tokens.next().is_none();
353 }
354 Some(_) => has_token = true,
355 None => return false,
356 }
357 }
358}
359
360fn parse_bracket_as_segments(input: TokenStream, scope: Span) -> Result<Vec<Segment>> {
361 let mut tokens = input.into_iter().peekable();
362
363 match &tokens.next() {
364 Some(TokenTree::Punct(punct)) if punct.as_char() == '<' => {}
365 Some(wrong) => return Err(Error::new(wrong.span(), "expected `<`")),
366 None => return Err(Error::new(scope, "expected `[< ... >]`")),
367 }
368
369 let mut segments = segment::parse(&mut tokens)?;
370
371 match &tokens.next() {
372 Some(TokenTree::Punct(punct)) if punct.as_char() == '>' => {}
373 Some(wrong) => return Err(Error::new(wrong.span(), "expected `>`")),
374 None => return Err(Error::new(scope, "expected `[< ... >]`")),
375 }
376
377 if let Some(unexpected) = tokens.next() {
378 return Err(Error::new(
379 unexpected.span(),
380 "unexpected input, expected `[< ... >]`",
381 ));
382 }
383
384 for segment in &mut segments {
385 if let Segment::String(string) = segment {
386 if string.value.starts_with("'\\u{") {
387 let hex = &string.value[4..string.value.len() - 2];
388 if let Ok(unsigned) = u32::from_str_radix(hex, 16) {
389 if let Some(ch) = char::from_u32(unsigned) {
390 string.value.clear();
391 string.value.push(ch);
392 continue;
393 }
394 }
395 }
396 if string.value.contains(&['#', '\\', '.', '+'][..])
397 || string.value.starts_with("b'")
398 || string.value.starts_with("b\"")
399 || string.value.starts_with("br\"")
400 {
401 return Err(Error::new(string.span, "unsupported literal"));
402 }
403 let mut range = 0..string.value.len();
404 if string.value.starts_with("r\"") {
405 range.start += 2;
406 range.end -= 1;
407 } else if string.value.starts_with(&['"', '\''][..]) {
408 range.start += 1;
409 range.end -= 1;
410 }
411 string.value = string.value[range].replace('-', "_");
412 }
413 }
414
415 Ok(segments)
416}
417
418fn pasted_to_tokens(mut pasted: String, span: Span) -> Result<TokenStream> {
419 let mut tokens = TokenStream::new();
420
421 #[cfg(not(no_literal_fromstr))]
422 {
423 use proc_macro::{LexError, Literal};
424 use std::str::FromStr;
425
426 if pasted.starts_with(|ch: char| ch.is_ascii_digit()) {
427 let literal = match panic::catch_unwind(|| Literal::from_str(&pasted)) {
428 Ok(Ok(literal)) => TokenTree::Literal(literal),
429 Ok(Err(LexError { .. })) | Err(_) => {
430 return Err(Error::new(
431 span,
432 &format!("`{:?}` is not a valid literal", pasted),
433 ));
434 }
435 };
436 tokens.extend(iter::once(literal));
437 return Ok(tokens);
438 }
439 }
440
441 if pasted.starts_with('\'') {
442 let mut apostrophe = TokenTree::Punct(Punct::new('\'', Spacing::Joint));
443 apostrophe.set_span(span);
444 tokens.extend(iter::once(apostrophe));
445 pasted.remove(0);
446 }
447
448 let ident = match panic::catch_unwind(|| Ident::new(&pasted, span)) {
449 Ok(ident) => TokenTree::Ident(ident),
450 Err(_) => {
451 return Err(Error::new(
452 span,
453 &format!("`{:?}` is not a valid identifier", pasted),
454 ));
455 }
456 };
457
458 tokens.extend(iter::once(ident));
459 Ok(tokens)
460}
461
462#[proc_macro]
463pub fn unique_paste(input: TokenStream) -> TokenStream {
464 let mut contains_paste = false;
465 let flatten_single_interpolation = true;
466 match unique_expand(
467 input.clone(),
468 &mut contains_paste,
469 flatten_single_interpolation,
470 false,
471 ) {
472 Ok(expanded) => {
473 if contains_paste {
474 expanded
475 } else {
476 input
477 }
478 }
479 Err(err) => err.to_compile_error(),
480 }
481}
482
483fn unique_expand(
484 input: TokenStream,
485 contains_paste: &mut bool,
486 flatten_single_interpolation: bool,
487 recurse: bool,
488) -> Result<TokenStream> {
489 let mut expanded = TokenStream::new();
490 let mut lookbehind = Lookbehind::Other;
491 let mut prev_none_group = None::<Group>;
492 let mut tokens = input.into_iter().peekable();
493 loop {
494 let token = tokens.next();
495 if let Some(group) = prev_none_group.take() {
496 if match (&token, tokens.peek()) {
497 (Some(TokenTree::Punct(fst)), Some(TokenTree::Punct(snd))) => {
498 fst.as_char() == ':' && snd.as_char() == ':' && fst.spacing() == Spacing::Joint
499 }
500 _ => false,
501 } {
502 expanded.extend(group.stream());
503 *contains_paste = true;
504 } else {
505 expanded.extend(iter::once(TokenTree::Group(group)));
506 }
507 }
508 match token {
509 Some(TokenTree::Group(group)) => {
510 let delimiter = group.delimiter();
511 let content = group.stream();
512 let span = group.span();
513 if delimiter == Delimiter::Bracket && is_paste_operation(&content) {
514 let segments = parse_bracket_as_segments(content, span)?;
515 let mut pasted = segment::paste(&segments)?;
516 unsafe {
517 pasted.push_str(&(calculate_hash(&*NONCE)).to_string());
518 }
519 let tokens = pasted_to_tokens(pasted, span)?;
520 expanded.extend(tokens);
521 *contains_paste = true;
522 } else if flatten_single_interpolation
523 && delimiter == Delimiter::None
524 && is_single_interpolation_group(&content)
525 {
526 expanded.extend(content);
527 *contains_paste = true;
528 } else {
529 let mut group_contains_paste = false;
530 let is_attribute = delimiter == Delimiter::Bracket
531 && (lookbehind == Lookbehind::Pound || lookbehind == Lookbehind::PoundBang);
532 let mut nested = unique_expand(
533 content,
534 &mut group_contains_paste,
535 flatten_single_interpolation && !is_attribute,
536 true,
537 )?;
538 if is_attribute {
539 nested = expand_attr(nested, span, &mut group_contains_paste)?;
540 }
541 let group = if group_contains_paste {
542 let mut group = Group::new(delimiter, nested);
543 group.set_span(span);
544 *contains_paste = true;
545 group
546 } else {
547 group.clone()
548 };
549 if delimiter != Delimiter::None {
550 expanded.extend(iter::once(TokenTree::Group(group)));
551 } else if lookbehind == Lookbehind::DoubleColon {
552 expanded.extend(group.stream());
553 *contains_paste = true;
554 } else {
555 prev_none_group = Some(group);
556 }
557 }
558 lookbehind = Lookbehind::Other;
559 }
560 Some(TokenTree::Punct(punct)) => {
561 lookbehind = match punct.as_char() {
562 ':' if lookbehind == Lookbehind::JointColon => Lookbehind::DoubleColon,
563 ':' if punct.spacing() == Spacing::Joint => Lookbehind::JointColon,
564 '#' => Lookbehind::Pound,
565 '!' if lookbehind == Lookbehind::Pound => Lookbehind::PoundBang,
566 _ => Lookbehind::Other,
567 };
568 expanded.extend(iter::once(TokenTree::Punct(punct)));
569 }
570 Some(other) => {
571 lookbehind = Lookbehind::Other;
572 expanded.extend(iter::once(other));
573 }
574 None => {
575 if !recurse {
576 increase_nonce();
577 }
578 return Ok(expanded);
579 }
580 }
581 }
582}
583
584static mut NONCE: Lazy<i32> = Lazy::new(|| 0);
585
586fn increase_nonce() {
587 unsafe {
588 *NONCE += 1;
589 }
590}
591
592fn calculate_hash<T: Hash>(t: &T) -> u64 {
593 let mut s = DefaultHasher::new();
594 t.hash(&mut s);
595 s.finish()
596}