1#![deny(missing_docs)]
2
3use proc_macro::{
31 TokenStream,
32 TokenTree,
33 Span,
34 Delimiter,
35 Spacing,
36 Group,
37 Ident,
38 Literal,
39 Punct,
40};
41
42#[proc_macro_attribute]
64pub fn capture(_attr: TokenStream, item: TokenStream) -> TokenStream {
65 let with_capture = try_capture(item.clone());
68 match with_capture {
72 Ok(item) => item,
73 Err(e) => TokenStream::from_iter(e.to_compile_error().into_iter().chain(item)),
74 }
75}
76
77fn try_capture(item: TokenStream) -> Result<TokenStream, Error> {
78 let mut tokens: Vec<TokenTree> = item.into_iter().collect();
79 let mut tokens_it = tokens.iter();
80
81 for t in tokens_it.by_ref() {
82 if let TokenTree::Ident(i) = t {
83 match i.to_string().as_str() {
84 "const" => return Err(Error::new("Const functions can't be a capture scope.", t.span())),
85 "async" => return Err(Error::new("Async functions can't be a capture scope, yet.", t.span())),
88 "fn" => break,
89 _ => continue,
90 }
91 }
92 }
93
94 let Some(TokenTree::Ident(i)) = tokens_it.next() else {
98 let span = tokens.first().unwrap().span();
99 return Err(Error::new("Only functions can be a capture scope.", span));
100 };
101
102 let name = i.to_string();
103 let name = name.strip_prefix("r#").unwrap_or(&name);
106
107 let body = match tokens.pop() {
109 Some(TokenTree::Group(g)) if g.delimiter() == Delimiter::Brace => g,
110 Some(t) => return Err(Error::new("Function without a body can't be a capture scope.", t.span())),
111 _ => unreachable!(),
112 };
113
114 let augmented_body = vec![
115 make_start_capture(),
116 make_zone(name),
119 body.stream(),
120 ]
121 .into_iter()
122 .collect();
123 tokens.push(TokenTree::Group(Group::new(Delimiter::Brace, augmented_body)));
124
125 Ok(TokenStream::from_iter(tokens))
126}
127
128#[proc_macro_attribute]
179pub fn instrument(attr: TokenStream, item: TokenStream) -> TokenStream {
180 let instrumented = try_instrument(attr, item.clone());
183 match instrumented {
187 Ok(item) => item,
188 Err(e) => TokenStream::from_iter(e.to_compile_error().into_iter().chain(item)),
189 }
190}
191
192fn try_instrument(attr: TokenStream, item: TokenStream) -> Result<TokenStream, Error> {
193 let prefix = if let Some(TokenTree::Literal(s)) = attr.into_iter().next() {
199 Some(s.to_string())
200 } else {
201 None
202 };
203 let prefix = prefix.as_ref().and_then(|p| try_parse_str_literal(p));
204
205 let mut tokens: Vec<TokenTree> = item.into_iter().collect();
206 let mut tokens_it = tokens.iter();
207
208 for t in tokens_it.by_ref() {
209 if let TokenTree::Ident(i) = t {
210 match i.to_string().as_str() {
211 "const" => return Err(Error::new("Const functions can't be instrumented.", t.span())),
212 "async" => return Err(Error::new("Async functions can't be instrumented, yet.", t.span())),
215 "fn" => break,
216 _ => continue,
217 }
218 }
219 }
220
221 let Some(TokenTree::Ident(i)) = tokens_it.next() else {
225 let span = tokens.first().unwrap().span();
226 return Err(Error::new("Only functions can be instrumented.", span));
227 };
228
229 let name = i.to_string();
230 let name = name.strip_prefix("r#").unwrap_or(&name);
233
234 let prefixed_name = prefix.map(|p| format!("{p}::{name}"));
235 let name = if let Some(ref name) = prefixed_name {
236 name
237 } else {
238 name
239 };
240
241 let body = match tokens.pop() {
243 Some(TokenTree::Group(g)) if g.delimiter() == Delimiter::Brace => g,
244 Some(t) => return Err(Error::new("Function without a body can't be instrumented.", t.span())),
245 _ => unreachable!(),
246 };
247
248 let instrumented_body = vec![make_zone(name), body.stream()]
249 .into_iter()
250 .collect();
251 tokens.push(TokenTree::Group(Group::new(Delimiter::Brace, instrumented_body)));
252
253 Ok(TokenStream::from_iter(tokens))
254}
255
256fn try_parse_str_literal(s: &str) -> Option<&str> {
257 let s = s.as_bytes();
258 if s.len() >= 2 && s[0] == b'"' {
259 Some(unsafe { std::str::from_utf8_unchecked(&s[1..s.len() - 1]) })
261 } else {
262 None
263 }
264}
265
266fn make_start_capture() -> TokenStream {
268 TokenStream::from_iter([
269 TokenTree::Ident(Ident::new("let", Span::call_site())),
270 TokenTree::Ident(Ident::new("_tracy", Span::mixed_site())),
271 TokenTree::Punct(Punct::new('=', Spacing::Alone)),
272 TokenTree::Punct(Punct::new(':', Spacing::Joint)),
273 TokenTree::Punct(Punct::new(':', Spacing::Alone)),
274 TokenTree::Ident(Ident::new("tracy_gizmos", Span::call_site())),
275 TokenTree::Punct(Punct::new(':', Spacing::Joint)),
276 TokenTree::Punct(Punct::new(':', Spacing::Alone)),
277 TokenTree::Ident(Ident::new("start_capture", Span::call_site())),
278 TokenTree::Group(
279 Group::new(
280 Delimiter::Parenthesis,
281 TokenStream::new(),
282 )
283 ),
284 TokenTree::Punct(Punct::new(';', Spacing::Alone)),
285 ])
286}
287
288fn make_zone(name: &str) -> TokenStream {
290 TokenStream::from_iter([
291 TokenTree::Punct(Punct::new(':', Spacing::Joint)),
292 TokenTree::Punct(Punct::new(':', Spacing::Alone)),
293 TokenTree::Ident(Ident::new("tracy_gizmos", Span::call_site())),
294 TokenTree::Punct(Punct::new(':', Spacing::Joint)),
295 TokenTree::Punct(Punct::new(':', Spacing::Alone)),
296 TokenTree::Ident(Ident::new("zone", Span::call_site())),
297 TokenTree::Punct(Punct::new('!', Spacing::Alone)),
298 TokenTree::Group(
299 Group::new(
300 Delimiter::Parenthesis,
301 TokenStream::from_iter([
302 TokenTree::Literal(Literal::string(name)),
303 ])
304 )
305 ),
306 TokenTree::Punct(Punct::new(';', Spacing::Alone)),
307 ])
308}
309
310struct Error {
311 text: &'static str,
312 start: Span,
313 end: Span,
314}
315
316impl Error {
317 fn new(text: &'static str, s: Span) -> Self {
318 Self { text, start: s, end: s }
319 }
320
321 fn to_compile_error(&self) -> TokenStream {
322 fn punct(c: char, s: Spacing, span: Span) -> TokenTree {
323 TokenTree::Punct({
324 let mut p = Punct::new(c, s);
325 p.set_span(span);
326 p
327 })
328 }
329
330 TokenStream::from_iter([
331 punct(':', Spacing::Joint, self.start),
332 punct(':', Spacing::Alone, self.start),
333 TokenTree::Ident(Ident::new("core", self.start)),
334 punct(':', Spacing::Joint, self.start),
335 punct(':', Spacing::Alone, self.start),
336 TokenTree::Ident(Ident::new("compile_error", self.start)),
337 punct('!', Spacing::Alone, self.start),
338 TokenTree::Group({
339 let mut g = Group::new(
340 Delimiter::Brace,
341 TokenStream::from_iter([
342 TokenTree::Literal({
343 let mut s = Literal::string(self.text);
344 s.set_span(self.end);
345 s
346 })
347 ])
348 );
349 g.set_span(self.end);
350 g
351 }),
352 ])
353 }
354}