1#![doc = include_str!("./README.md")]
2
3pub use self_rust_tokenize_derive::SelfRustTokenize;
4
5pub mod helpers {
7 pub use proc_macro2::{self, TokenStream};
8 pub use quote::{quote, ToTokens as QuoteToTokens, TokenStreamExt};
9}
10
11use proc_macro2::{Delimiter, Group, Ident, Literal, Punct, Spacing, Span, TokenStream, TokenTree};
12use quote::{ToTokens as QuoteToTokens, TokenStreamExt};
13use std::ops::Deref;
14
15pub trait SelfRustTokenize {
19 fn to_tokens(&self) -> TokenStream {
21 let mut ts = TokenStream::new();
22 Self::append_to_token_stream(self, &mut ts);
23 ts
24 }
25
26 fn append_to_token_stream(&self, token_stream: &mut TokenStream);
27}
28
29macro_rules! implement_using_quote_to_tokens {
30 ($($T:ty),*) => {
31 $(
32 impl SelfRustTokenize for $T {
33 fn append_to_token_stream(&self, token_stream: &mut TokenStream) {
34 QuoteToTokens::to_tokens(self, token_stream)
35 }
36 }
37 )*
38 };
39}
40
41implement_using_quote_to_tokens!(
42 u8,
43 u16,
44 u32,
45 u64,
46 u128,
47 i8,
48 i16,
49 i32,
50 i64,
51 i128,
52 f32,
53 f64,
54 char,
55 bool,
56 &'static str
57);
58
59fn append_path(segments: &[&'static str], token_stream: &mut TokenStream, leading_colons: bool) {
60 for (idx, segment) in segments.iter().enumerate() {
61 if leading_colons || idx != 0 {
62 token_stream.append(Punct::new(':', Spacing::Joint));
63 token_stream.append(Punct::new(':', Spacing::Alone));
64 }
65 token_stream.append(Ident::new(segment, Span::call_site()))
66 }
67}
68
69impl<T: SelfRustTokenize> SelfRustTokenize for Box<T> {
71 fn append_to_token_stream(&self, token_stream: &mut TokenStream) {
72 append_path(&["std", "boxed", "Box", "new"], token_stream, true);
73 token_stream.append(Group::new(
74 Delimiter::Parenthesis,
75 Deref::deref(self).to_tokens(),
76 ));
77 }
78}
79
80impl<T> SelfRustTokenize for std::marker::PhantomData<T> {
81 fn append_to_token_stream(&self, token_stream: &mut TokenStream) {
82 append_path(
83 &["std", "marker", "PhantomData", "default"],
84 token_stream,
85 true,
86 );
87 token_stream.append(Group::new(Delimiter::Parenthesis, Default::default()));
89 }
90}
91
92impl<T: SelfRustTokenize> SelfRustTokenize for Vec<T> {
93 fn append_to_token_stream(&self, token_stream: &mut TokenStream) {
94 append_path(&["std", "vec"], token_stream, true);
95 token_stream.append(Punct::new('!', Spacing::Alone));
96 let mut inner_token_stream = TokenStream::default();
97 for (idx, inner) in self.iter().enumerate() {
98 inner.append_to_token_stream(&mut inner_token_stream);
99 if idx != self.len() - 1 {
100 inner_token_stream.append(Punct::new(',', Spacing::Alone));
101 }
102 }
103 token_stream.append(Group::new(Delimiter::Bracket, inner_token_stream))
104 }
105}
106
107impl<T: SelfRustTokenize> SelfRustTokenize for Option<T> {
108 fn append_to_token_stream(&self, token_stream: &mut TokenStream) {
109 match self {
110 Some(value) => {
111 append_path(&["std", "option", "Option", "Some"], token_stream, true);
112 token_stream.append(Group::new(
113 Delimiter::Parenthesis,
114 SelfRustTokenize::to_tokens(value),
115 ))
116 }
117 None => {
118 append_path(&["std", "option", "Option", "None"], token_stream, true);
119 }
120 }
121 }
122}
123
124impl SelfRustTokenize for String {
125 fn append_to_token_stream(&self, token_stream: &mut TokenStream) {
126 append_path(&["std", "string", "String", "from"], token_stream, true);
127 let stream = TokenStream::from(TokenTree::from(Literal::string(self.as_str())));
128 token_stream.append(Group::new(Delimiter::Parenthesis, stream))
129 }
130}
131
132impl<T: SelfRustTokenize, const N: usize> SelfRustTokenize for [T; N] {
133 fn append_to_token_stream(&self, token_stream: &mut TokenStream) {
134 let mut inner_token_stream = TokenStream::new();
135 for (idx, inner) in self.iter().enumerate() {
136 inner.append_to_token_stream(&mut inner_token_stream);
137 if idx != self.len() - 1 {
138 inner_token_stream.append(Punct::new(',', Spacing::Alone));
139 }
140 }
141 token_stream.append(Group::new(Delimiter::Bracket, inner_token_stream));
142 }
143}
144
145impl<T: SelfRustTokenize> SelfRustTokenize for [T] {
146 fn append_to_token_stream(&self, token_stream: &mut TokenStream) {
147 token_stream.append(Punct::new('&', Spacing::Alone));
148 let mut inner_token_stream = TokenStream::new();
149 for (idx, inner) in self.iter().enumerate() {
150 inner.append_to_token_stream(&mut inner_token_stream);
151 if idx != self.len() - 1 {
152 inner_token_stream.append(Punct::new(',', Spacing::Alone));
153 }
154 }
155 token_stream.append(Group::new(Delimiter::Bracket, inner_token_stream));
156 }
157}
158
159impl SelfRustTokenize for () {
160 fn append_to_token_stream(&self, token_stream: &mut TokenStream) {
161 token_stream.append(Group::new(Delimiter::Parenthesis, Default::default()));
162 }
163}
164
165macro_rules! tuple_impls {
167 ( $( $name:ident )+ ) => {
168 impl<$($name: SelfRustTokenize),+> SelfRustTokenize for ($($name,)+)
169 {
170 fn append_to_token_stream(&self, token_stream: &mut TokenStream) {
171 #[allow(non_snake_case)]
172 let ($($name,)+) = self;
173 let mut inner_token_stream = TokenStream::new();
174 $(
175 SelfRustTokenize::append_to_token_stream($name, &mut inner_token_stream);
176 inner_token_stream.append(Punct::new(',', Spacing::Alone));
177 )*
178 token_stream.append(Group::new(Delimiter::Parenthesis, inner_token_stream));
179 }
180 }
181 };
182}
183
184tuple_impls! { A }
185tuple_impls! { A B }
186tuple_impls! { A B C }
187tuple_impls! { A B C D }
188tuple_impls! { A B C D E }
189tuple_impls! { A B C D E F }
190tuple_impls! { A B C D E F G }
191tuple_impls! { A B C D E F G H }
192tuple_impls! { A B C D E F G H I }
193tuple_impls! { A B C D E F G H I J }
194tuple_impls! { A B C D E F G H I J K }
195tuple_impls! { A B C D E F G H I J K L }
196
197#[cfg(feature = "references")]
198mod references {
199 use super::{SelfRustTokenize, TokenStream};
200 use proc_macro2::{Delimiter, Group, Ident, Punct, Spacing, Span};
201 use quote::TokenStreamExt;
202
203 impl<'a, T: SelfRustTokenize> SelfRustTokenize for &'a T {
204 fn append_to_token_stream(&self, token_stream: &mut TokenStream) {
205 token_stream.append(Punct::new('&', Spacing::Alone));
206 (*self).append_to_token_stream(token_stream);
207 }
208 }
209
210 impl<'a, T: SelfRustTokenize> SelfRustTokenize for &'a mut T {
211 fn append_to_token_stream(&self, token_stream: &mut TokenStream) {
212 token_stream.append(Punct::new('&', Spacing::Alone));
213 token_stream.append(Ident::new("mut", Span::call_site()));
214 (**self).append_to_token_stream(token_stream);
215 }
216 }
217
218 impl<'a, T: SelfRustTokenize> SelfRustTokenize for &'a [T] {
219 fn append_to_token_stream(&self, token_stream: &mut TokenStream) {
220 token_stream.append(Punct::new('&', Spacing::Alone));
221 let mut inner_token_stream = TokenStream::new();
222 for (idx, inner) in self.iter().enumerate() {
223 inner.append_to_token_stream(&mut inner_token_stream);
224 if idx != self.len() - 1 {
225 inner_token_stream.append(Punct::new(',', Spacing::Alone));
226 }
227 }
228 token_stream.append(Group::new(Delimiter::Bracket, inner_token_stream));
229 }
230 }
231
232 impl<'a, T: SelfRustTokenize> SelfRustTokenize for &'a mut [T] {
233 fn append_to_token_stream(&self, token_stream: &mut TokenStream) {
234 token_stream.append(Punct::new('&', Spacing::Alone));
235 token_stream.append(Ident::new("mut", Span::call_site()));
236 let mut inner_token_stream = TokenStream::new();
237 for (idx, inner) in self.iter().enumerate() {
238 inner.append_to_token_stream(&mut inner_token_stream);
239 if idx != self.len() - 1 {
240 inner_token_stream.append(Punct::new(',', Spacing::Alone));
241 }
242 }
243 token_stream.append(Group::new(Delimiter::Bracket, inner_token_stream));
244 }
245 }
246}
247
248#[cfg(feature = "smallvec")]
249impl<T: smallvec::Array> SelfRustTokenize for smallvec::SmallVec<T>
250where
251 T::Item: SelfRustTokenize,
252{
253 fn append_to_token_stream(&self, token_stream: &mut TokenStream) {
254 append_path(&["smallvec", "smallvec"], token_stream, true);
255 token_stream.append(Punct::new('!', Spacing::Alone));
256 let mut inner_token_stream = TokenStream::new();
257 for (idx, inner) in self.iter().enumerate() {
258 inner.append_to_token_stream(&mut inner_token_stream);
259 if idx != self.len() - 1 {
260 inner_token_stream.append(Punct::new(',', Spacing::Alone));
261 }
262 }
263 token_stream.append(Group::new(Delimiter::Bracket, inner_token_stream));
264 }
265}
266
267#[doc(hidden)]
268pub mod _private {
269 use proc_macro2::{Delimiter, Group, Ident, Punct, Spacing, Span, TokenStream};
270 use quote::TokenStreamExt;
271
272 use crate::append_path;
273
274 pub fn add_named_constructor_body(
275 ts: &mut proc_macro2::TokenStream,
276 segments: &[&'static str],
277 items: Vec<(&'static str, TokenStream)>,
278 ) {
279 append_path(segments, ts, false);
280
281 let mut arguments = TokenStream::new();
282 for (name, item) in items.into_iter() {
283 arguments.append(Ident::new(name, Span::call_site()));
284 arguments.append(Punct::new(':', Spacing::Alone));
285 arguments.extend(item);
286 arguments.append(Punct::new(',', Spacing::Alone));
287 }
288 ts.append(Group::new(Delimiter::Brace, arguments));
289 }
290
291 pub fn add_unnamed_constructor_body(
292 ts: &mut proc_macro2::TokenStream,
293 segments: &[&'static str],
294 items: Vec<TokenStream>,
295 ) {
296 append_path(segments, ts, false);
297
298 let mut arguments = TokenStream::new();
299 for item in items.into_iter() {
300 arguments.extend(item);
301 arguments.append(Punct::new(',', Spacing::Alone));
302 }
303 ts.append(Group::new(Delimiter::Parenthesis, arguments));
304 }
305
306 pub fn add_unit_constructor_body(ts: &mut proc_macro2::TokenStream, segments: &[&'static str]) {
307 append_path(segments, ts, false);
308 }
309}