Skip to main content

reifydb_macro_impl/
generate.rs

1// SPDX-License-Identifier: Apache-2.0
2// Copyright (c) 2025 ReifyDB
3
4//! Token generation helpers for building TokenStreams without external dependencies.
5
6use proc_macro2::{Delimiter, Group, Ident, Literal, Punct, Spacing, Span, TokenStream, TokenTree};
7
8/// Creates an identifier token.
9pub fn ident(name: &str) -> TokenTree {
10	TokenTree::Ident(Ident::new(name, Span::call_site()))
11}
12
13/// Creates an underscore token for type inference placeholders.
14/// This is needed because Ident::new("_") panics in proc_macro v1.
15pub fn underscore() -> TokenTree {
16	"_".parse::<TokenStream>().unwrap().into_iter().next().unwrap()
17}
18
19/// Creates an identifier token, handling raw identifiers (r#keyword).
20#[allow(dead_code)]
21pub fn ident_raw(name: &str) -> TokenTree {
22	if let Some(stripped) = name.strip_prefix("r#") {
23		TokenTree::Ident(Ident::new_raw(stripped, Span::call_site()))
24	} else {
25		TokenTree::Ident(Ident::new(name, Span::call_site()))
26	}
27}
28
29/// Creates a punctuation token with Alone spacing.
30pub fn punct(ch: char) -> TokenTree {
31	TokenTree::Punct(Punct::new(ch, Spacing::Alone))
32}
33
34/// Creates a punctuation token with Joint spacing (for multi-char punctuation like ::).
35pub fn punct_joint(ch: char) -> TokenTree {
36	TokenTree::Punct(Punct::new(ch, Spacing::Joint))
37}
38
39/// Creates a string literal token.
40pub fn literal_str(s: &str) -> TokenTree {
41	TokenTree::Literal(Literal::string(s))
42}
43
44/// Creates an integer literal token.
45pub fn literal_usize(n: usize) -> TokenTree {
46	TokenTree::Literal(Literal::usize_unsuffixed(n))
47}
48
49/// Creates a group (delimited tokens).
50pub fn group(delimiter: Delimiter, tokens: impl IntoIterator<Item = TokenTree>) -> TokenTree {
51	TokenTree::Group(Group::new(delimiter, tokens.into_iter().collect()))
52}
53
54/// Creates parentheses group: (tokens)
55pub fn parens(tokens: impl IntoIterator<Item = TokenTree>) -> TokenTree {
56	group(Delimiter::Parenthesis, tokens)
57}
58
59/// Creates brace group: {tokens}
60pub fn braces(tokens: impl IntoIterator<Item = TokenTree>) -> TokenTree {
61	group(Delimiter::Brace, tokens)
62}
63
64/// Creates bracket group: [tokens]
65pub fn brackets(tokens: impl IntoIterator<Item = TokenTree>) -> TokenTree {
66	group(Delimiter::Bracket, tokens)
67}
68
69/// Emits `::` (path separator).
70pub fn path_sep() -> impl Iterator<Item = TokenTree> {
71	[punct_joint(':'), punct(':')].into_iter()
72}
73
74/// Emits a path like `::reifydb_type::FromFrame`.
75/// Pass an empty first segment for a global path (::foo::bar).
76pub fn path(segments: &[&str]) -> Vec<TokenTree> {
77	let mut tokens = Vec::new();
78	let mut prev_was_empty = false;
79
80	for (i, seg) in segments.iter().enumerate() {
81		// Determine if we need to emit ::
82		// - First segment: emit :: only if segment is empty (global path marker)
83		// - Later segments: emit :: only if previous segment was NOT empty
84		let need_sep = if i == 0 {
85			seg.is_empty() // Leading :: for global paths
86		} else {
87			!prev_was_empty // Add :: only if prev was not the empty global marker
88		};
89
90		if need_sep {
91			tokens.extend(path_sep());
92		}
93
94		if !seg.is_empty() {
95			tokens.push(ident(seg));
96		}
97
98		prev_was_empty = seg.is_empty();
99	}
100	tokens
101}
102
103/// Emits `->` (return type arrow).
104pub fn arrow() -> impl Iterator<Item = TokenTree> {
105	[punct_joint('-'), punct('>')].into_iter()
106}
107
108/// Emits `=>` (match arm arrow).
109pub fn fat_arrow() -> impl Iterator<Item = TokenTree> {
110	[punct_joint('='), punct('>')].into_iter()
111}
112
113/// Emits `<` and `>` around tokens for generics.
114#[allow(dead_code)]
115pub fn generics(inner: impl IntoIterator<Item = TokenTree>) -> Vec<TokenTree> {
116	let mut tokens = vec![punct('<')];
117	tokens.extend(inner);
118	tokens.push(punct('>'));
119	tokens
120}
121
122/// Convenience: extends a Vec with an iterator.
123#[allow(dead_code)]
124pub fn extend(tokens: &mut Vec<TokenTree>, iter: impl IntoIterator<Item = TokenTree>) {
125	tokens.extend(iter);
126}
127
128/// Creates a compile_error!("message") invocation.
129pub fn compile_error(message: &str) -> TokenStream {
130	let tokens = vec![ident("compile_error"), punct('!'), parens([literal_str(message)])];
131	tokens.into_iter().collect()
132}