tracy_gizmos_attributes/
lib.rs

1#![deny(missing_docs)]
2
3//! A procedural macro attribute for instrumenting functions with
4//! [`tracy-gizmos`] zones.
5//!
6//! ## Usage
7//!
8//! In the `Cargo.toml`:
9//!
10//! ```toml
11//! [dependencies]
12//! tracy-gizmos-attributes = "0.0.1"
13//! ```
14//!
15//! The [`#[instrument]`][instrument] attribute can now be added to a
16//! function to automatically create and enter a `tracy-gizmos` [zone]
17//! when that function is called. For example:
18//!
19//! ```no_run
20//! #[tracy_gizmos_attributes::instrument]
21//! fn work() {
22//!     // do stuff
23//! }
24//! ```
25//!
26//! [`tracy-gizmos`]: https://crates.io/crates/tracy-gizmos
27//! [zone]: https://docs.rs/tracy-gizmos/latest/tracy_gizmos/struct.Zone.html
28//! [instrument]: macro@self::instrument
29
30use proc_macro::{
31	TokenStream,
32	TokenTree,
33	Span,
34	Delimiter,
35	Spacing,
36	Group,
37	Ident,
38	Literal,
39	Punct,
40};
41
42/// Instruments a function to create and start a profiling capture
43/// session.
44///
45/// Session will end automatically at the end of the function' scope.
46///
47/// *Note*: This will also [`macro@instrument`] the function automatically.
48///
49/// ## Examples
50///
51/// ```
52/// # use tracy_gizmos_attributes::{capture, instrument};
53/// #[capture]
54/// fn main() {
55///     work();
56/// }
57///
58/// #[instrument]
59/// fn work() {
60///    // do stuff
61/// }
62/// ```
63#[proc_macro_attribute]
64pub fn capture(_attr: TokenStream, item: TokenStream) -> TokenStream {
65	// Cloning a `TokenStream` is cheap since it's reference counted
66	// internally.
67	let with_capture = try_capture(item.clone());
68	// We chain both error and original item, to prevent the
69	// generation of two compilation errors: one from us and another
70	// one (or multiple, even) caused by original item being skipped.
71	match with_capture {
72		Ok(item) => item,
73		Err(e)   => TokenStream::from_iter(e.to_compile_error().into_iter().chain(item)),
74	}
75}
76
77fn try_capture(item: TokenStream) -> Result<TokenStream, Error> {
78	let mut tokens: Vec<TokenTree> = item.into_iter().collect();
79	let mut tokens_it              = tokens.iter();
80
81	for t in tokens_it.by_ref() {
82		if let TokenTree::Ident(i) = t {
83			match i.to_string().as_str() {
84				"const" => return Err(Error::new("Const functions can't be a capture scope.", t.span())),
85				// Could be supported when fibers are implemented. Then, we can
86				// just generate a fiber-zone or whatever.
87				"async" => return Err(Error::new("Async functions can't be a capture scope, yet.", t.span())),
88				"fn"    => break,
89				_       => continue,
90			}
91		}
92	}
93
94	// Here, either iterator is empty now or we've just consumed the
95	// `fn` and ready to get the function name.
96
97	let Some(TokenTree::Ident(i)) = tokens_it.next() else {
98		let span = tokens.first().unwrap().span();
99		return Err(Error::new("Only functions can be a capture scope.", span));
100	};
101
102	let name = i.to_string();
103	// r# is only important for the rustc, Tracy zone name can be
104	// whatever.
105	let name = name.strip_prefix("r#").unwrap_or(&name);
106
107	// The function body should be the last token tree.
108	let body = match tokens.pop() {
109		Some(TokenTree::Group(g)) if g.delimiter() == Delimiter::Brace => g,
110		Some(t) => return Err(Error::new("Function without a body can't be a capture scope.", t.span())),
111		_ => unreachable!(),
112	};
113
114	let augmented_body = vec![
115			make_start_capture(),
116			// This should strictly go *after* the capture start,
117			// behaviour is undefined, otherwise.
118			make_zone(name),
119			body.stream(),
120		]
121		.into_iter()
122		.collect();
123	tokens.push(TokenTree::Group(Group::new(Delimiter::Brace, augmented_body)));
124
125	Ok(TokenStream::from_iter(tokens))
126}
127
128/// Instruments a function to create and enter a zone every time the
129/// function is called.
130///
131/// The generated zone's name will be the name of the function.
132///
133/// ## Examples
134///
135/// ```
136/// # use tracy_gizmos_attributes::instrument;
137/// #[instrument]
138/// fn work() {
139///    // do stuff
140/// }
141/// ```
142///
143/// ### Zone customization
144///
145/// The generated zone's name could be prefixed:
146///
147/// ```
148/// # use tracy_gizmos_attributes::instrument;
149/// #[instrument("Heavy")]
150/// fn work() {
151///    // will contain a zone named "Heavy::work"
152/// }
153/// ```
154///
155/// ### Unsupported cases
156///
157/// `const fn` cannot be instrumented, and will result in a compilation
158/// failure:
159///
160/// ```compile_fail
161/// # use tracy_gizmos_attributes::instrument;
162/// #[instrument]
163/// const fn work() {
164///    // do stuff
165/// }
166/// ```
167///
168/// `async fn` cannot be instrumented, *yet*, and will result in a
169/// compilation failure:
170///
171/// ```compile_fail
172/// # use tracy_gizmos_attributes::instrument
173/// #[instrument]
174/// async fn work() {
175///    // do stuff
176/// }
177/// ```
178#[proc_macro_attribute]
179pub fn instrument(attr: TokenStream, item: TokenStream) -> TokenStream {
180	// Cloning a `TokenStream` is cheap since it's reference counted
181	// internally.
182	let instrumented = try_instrument(attr, item.clone());
183	// We chain both error and original item, to prevent the
184	// generation of two compilation errors: one from us and another
185	// one (or multiple, even) caused by original item being skipped.
186	match instrumented {
187		Ok(item) => item,
188		Err(e)   => TokenStream::from_iter(e.to_compile_error().into_iter().chain(item)),
189	}
190}
191
192fn try_instrument(attr: TokenStream, item: TokenStream) -> Result<TokenStream, Error> {
193	// Function item's grammar:
194	// https://doc.rust-lang.org/reference/items/functions.html
195	// Put simply, it boils down to:
196	// ... const? async? fn $name:ident ... {}?
197
198	let prefix = if let Some(TokenTree::Literal(s)) = attr.into_iter().next() {
199		Some(s.to_string())
200	} else {
201		None
202	};
203	let prefix = prefix.as_ref().and_then(|p| try_parse_str_literal(p));
204
205	let mut tokens: Vec<TokenTree> = item.into_iter().collect();
206	let mut tokens_it              = tokens.iter();
207
208	for t in tokens_it.by_ref() {
209		if let TokenTree::Ident(i) = t {
210			match i.to_string().as_str() {
211				"const" => return Err(Error::new("Const functions can't be instrumented.", t.span())),
212				// Could be supported when fibers are implemented. Then, we can
213				// just generate a fiber-zone or whatever.
214				"async" => return Err(Error::new("Async functions can't be instrumented, yet.", t.span())),
215				"fn"    => break,
216				_       => continue,
217			}
218		}
219	}
220
221	// Here, either iterator is empty now or we've just consumed the
222	// `fn` and ready to get the function name.
223
224	let Some(TokenTree::Ident(i)) = tokens_it.next() else {
225		let span = tokens.first().unwrap().span();
226		return Err(Error::new("Only functions can be instrumented.", span));
227	};
228
229	let name = i.to_string();
230	// r# is only important for the rustc, Tracy zone name can be
231	// whatever.
232	let name = name.strip_prefix("r#").unwrap_or(&name);
233
234	let prefixed_name = prefix.map(|p| format!("{p}::{name}"));
235	let name = if let Some(ref name) = prefixed_name {
236		name
237	} else {
238		name
239	};
240
241	// The function body should be the last token tree.
242	let body = match tokens.pop() {
243		Some(TokenTree::Group(g)) if g.delimiter() == Delimiter::Brace => g,
244		Some(t) => return Err(Error::new("Function without a body can't be instrumented.", t.span())),
245		_ => unreachable!(),
246	};
247
248	let instrumented_body = vec![make_zone(name), body.stream()]
249		.into_iter()
250		.collect();
251	tokens.push(TokenTree::Group(Group::new(Delimiter::Brace, instrumented_body)));
252
253	Ok(TokenStream::from_iter(tokens))
254}
255
256fn try_parse_str_literal(s: &str) -> Option<&str> {
257	let s = s.as_bytes();
258	if s.len() >= 2 && s[0] == b'"' {
259		// SAFETY: Source of bytes is always a correct utf-8 string.
260		Some(unsafe { std::str::from_utf8_unchecked(&s[1..s.len() - 1]) })
261	} else {
262		None
263	}
264}
265
266// let _tracy = tracy_gizmos::start_capture();
267fn make_start_capture() -> TokenStream {
268	TokenStream::from_iter([
269		TokenTree::Ident(Ident::new("let",    Span::call_site())),
270		TokenTree::Ident(Ident::new("_tracy", Span::mixed_site())),
271		TokenTree::Punct(Punct::new('=', Spacing::Alone)),
272		TokenTree::Punct(Punct::new(':', Spacing::Joint)),
273		TokenTree::Punct(Punct::new(':', Spacing::Alone)),
274		TokenTree::Ident(Ident::new("tracy_gizmos", Span::call_site())),
275		TokenTree::Punct(Punct::new(':', Spacing::Joint)),
276		TokenTree::Punct(Punct::new(':', Spacing::Alone)),
277		TokenTree::Ident(Ident::new("start_capture", Span::call_site())),
278		TokenTree::Group(
279			Group::new(
280				Delimiter::Parenthesis,
281				TokenStream::new(),
282			)
283		),
284		TokenTree::Punct(Punct::new(';', Spacing::Alone)),
285	])
286}
287
288// ::tracy_gizmos::zone!($text);
289fn make_zone(name: &str) -> TokenStream {
290	TokenStream::from_iter([
291		TokenTree::Punct(Punct::new(':', Spacing::Joint)),
292		TokenTree::Punct(Punct::new(':', Spacing::Alone)),
293		TokenTree::Ident(Ident::new("tracy_gizmos", Span::call_site())),
294		TokenTree::Punct(Punct::new(':', Spacing::Joint)),
295		TokenTree::Punct(Punct::new(':', Spacing::Alone)),
296		TokenTree::Ident(Ident::new("zone", Span::call_site())),
297		TokenTree::Punct(Punct::new('!', Spacing::Alone)),
298		TokenTree::Group(
299			Group::new(
300				Delimiter::Parenthesis,
301				TokenStream::from_iter([
302					TokenTree::Literal(Literal::string(name)),
303				])
304			)
305		),
306		TokenTree::Punct(Punct::new(';', Spacing::Alone)),
307	])
308}
309
310struct Error {
311	text:  &'static str,
312	start: Span,
313	end:   Span,
314}
315
316impl Error {
317	fn new(text: &'static str, s: Span) -> Self {
318		Self { text, start: s, end: s }
319	}
320
321	fn to_compile_error(&self) -> TokenStream {
322		fn punct(c: char, s: Spacing, span: Span) -> TokenTree {
323			TokenTree::Punct({
324				let mut p = Punct::new(c, s);
325				p.set_span(span);
326				p
327			})
328		}
329
330		TokenStream::from_iter([
331			punct(':', Spacing::Joint, self.start),
332			punct(':', Spacing::Alone, self.start),
333			TokenTree::Ident(Ident::new("core", self.start)),
334			punct(':', Spacing::Joint, self.start),
335			punct(':', Spacing::Alone, self.start),
336			TokenTree::Ident(Ident::new("compile_error", self.start)),
337			punct('!', Spacing::Alone, self.start),
338			TokenTree::Group({
339				let mut g = Group::new(
340					Delimiter::Brace,
341					TokenStream::from_iter([
342						TokenTree::Literal({
343							let mut s = Literal::string(self.text);
344							s.set_span(self.end);
345							s
346						})
347					])
348				);
349				g.set_span(self.end);
350				g
351			}),
352		])
353	}
354}