tagged_dispatch/lib.rs
1#![doc = include_str!("../README.md")]
2#![cfg_attr(not(feature = "std"), no_std)]
3
4#[cfg(not(feature = "std"))]
5extern crate alloc;
6
7use core::marker::PhantomData;
8
9#[cfg(not(feature = "std"))]
10use alloc::boxed::Box;
11#[cfg(feature = "std")]
12use std::boxed::Box;
13
14// Re-export the macro
15pub use tagged_dispatch_macros::tagged_dispatch;
16
17// Re-export allocator crates when their features are enabled
18#[cfg(feature = "allocator-bumpalo")]
19pub use bumpalo;
20
21#[cfg(feature = "allocator-typed-arena")]
22pub use typed_arena;
23
24/// The core tagged pointer type used internally.
25///
26/// Uses the top 7 bits of a 64-bit pointer for type tagging,
27/// supporting up to 128 different types while maintaining an 8-byte size.
28///
29/// # Platform Optimizations
30///
31/// On Apple Silicon (macOS ARM64), this implementation leverages the hardware's
32/// Top Byte Ignore (TBI) feature. TBI allows the processor to automatically
33/// ignore the top byte of pointers during memory access, eliminating the need
34/// for manual masking operations. This provides a measurable performance
35/// improvement by reducing instructions on the critical path of every trait
36/// method dispatch.
37#[repr(transparent)]
38pub struct TaggedPtr<T> {
39 ptr: usize,
40 _phantom: PhantomData<T>,
41}
42
43impl<T> TaggedPtr<T> {
44 const TAG_BITS: usize = 7;
45 const TAG_SHIFT: usize = 64 - Self::TAG_BITS;
46 const TAG_MASK: usize = ((1 << Self::TAG_BITS) - 1) << Self::TAG_SHIFT;
47 #[cfg(not(all(target_os = "macos", target_arch = "aarch64")))]
48 const PTR_MASK: usize = !Self::TAG_MASK;
49
50 /// Maximum number of variants supported (2^7 = 128)
51 pub const MAX_VARIANTS: usize = 1 << Self::TAG_BITS;
52
53 /// Create a new tagged pointer
54 #[inline(always)]
55 pub fn new(ptr: *mut T, tag: u8) -> Self {
56 debug_assert!(
57 tag < Self::MAX_VARIANTS as u8,
58 "Tag must be less than 128 (7 bits)"
59 );
60
61 let addr = ptr as usize;
62 debug_assert_eq!(
63 addr & Self::TAG_MASK,
64 0,
65 "Pointer already has high bits set!"
66 );
67
68 Self {
69 ptr: addr | ((tag as usize) << Self::TAG_SHIFT),
70 _phantom: PhantomData,
71 }
72 }
73
74 /// Get the tag value
75 #[inline(always)]
76 pub fn tag(&self) -> u8 {
77 ((self.ptr & Self::TAG_MASK) >> Self::TAG_SHIFT) as u8
78 }
79
80 /// Get the untagged pointer.
81 ///
82 /// # Safety
83 /// The returned pointer is only valid if the original pointer passed to `new` is still valid.
84 ///
85 /// # Platform Optimization
86 /// On macOS ARM64 (Apple Silicon), this method leverages the hardware's Top Byte Ignore (TBI)
87 /// feature, which automatically masks the top byte during memory access. This eliminates the
88 /// need for software masking, providing a performance improvement.
89 #[cfg(all(target_os = "macos", target_arch = "aarch64"))]
90 #[inline(always)]
91 pub fn ptr(&self) -> *mut T {
92 self.ptr as *mut T
93 }
94
95 /// Get the untagged pointer (standard implementation).
96 ///
97 /// # Safety
98 /// The returned pointer is only valid if the original pointer passed to `new` is still valid.
99 #[cfg(not(all(target_os = "macos", target_arch = "aarch64")))]
100 #[inline(always)]
101 pub fn ptr(&self) -> *mut T {
102 // Standard implementation: manually mask off the tag bits
103 (self.ptr & Self::PTR_MASK) as *mut T
104 }
105
106 /// Get the untagged pointer for deallocation.
107 ///
108 /// This always masks off the tag bits, even on platforms with TBI support,
109 /// because memory allocators require the original untagged pointer.
110 ///
111 /// # Safety
112 /// The returned pointer is only valid if the original pointer passed to `new` is still valid.
113 #[doc(hidden)]
114 #[inline(always)]
115 pub fn untagged_ptr(&self) -> *mut T {
116 const PTR_MASK: usize = !(0x7F << 57);
117 (self.ptr & PTR_MASK) as *mut T
118 }
119
120 /// Get a reference to the pointed value.
121 ///
122 /// # Safety
123 /// The caller must ensure that:
124 /// - The pointer is valid and points to a properly initialized `T`
125 /// - The pointed-to value is not being concurrently mutated
126 #[inline(always)]
127 pub unsafe fn as_ref(&self) -> &T {
128 unsafe { &*self.ptr() }
129 }
130
131 /// Get a mutable reference to the pointed value.
132 ///
133 /// # Safety
134 /// The caller must ensure that:
135 /// - The pointer is valid and points to a properly initialized `T`
136 /// - No other references to the pointed-to value exist
137 #[inline(always)]
138 pub unsafe fn as_mut(&mut self) -> &mut T {
139 unsafe { &mut *self.ptr() }
140 }
141
142 /// Check if the pointer is null (ignoring the tag)
143 #[inline(always)]
144 pub fn is_null(&self) -> bool {
145 self.ptr() as usize == 0
146 }
147}
148
149// Safety: TaggedPtr is Send/Sync if T is Send/Sync
150unsafe impl<T: Send> Send for TaggedPtr<T> {}
151unsafe impl<T: Sync> Sync for TaggedPtr<T> {}
152
153impl<T> Clone for TaggedPtr<T> {
154 fn clone(&self) -> Self {
155 *self
156 }
157}
158
159impl<T> Copy for TaggedPtr<T> {}
160
161impl<T> core::fmt::Debug for TaggedPtr<T> {
162 fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
163 f.debug_struct("TaggedPtr")
164 .field("tag", &self.tag())
165 .field("ptr", &format_args!("{:p}", self.ptr()))
166 .finish()
167 }
168}
169
170impl<T> core::cmp::PartialEq for TaggedPtr<T> {
171 fn eq(&self, other: &Self) -> bool {
172 // Compare the raw pointer values (tag + address)
173 self.ptr == other.ptr
174 }
175}
176
177impl<T> core::cmp::Eq for TaggedPtr<T> {}
178
179impl<T> core::cmp::PartialOrd for TaggedPtr<T> {
180 fn partial_cmp(&self, other: &Self) -> Option<core::cmp::Ordering> {
181 Some(self.cmp(other))
182 }
183}
184
185impl<T> core::cmp::Ord for TaggedPtr<T> {
186 fn cmp(&self, other: &Self) -> core::cmp::Ordering {
187 // Compare the raw pointer values (tag is in high bits, so this
188 // naturally orders by tag first, then by address)
189 self.ptr.cmp(&other.ptr)
190 }
191}
192
193/// Allocator trait for arena-allocated tagged pointers.
194///
195/// This trait should be implemented by arena allocators to enable
196/// the arena version of tagged dispatch.
197///
198/// # Example
199///
200/// ```rust
201/// use tagged_dispatch::TaggedAllocator;
202/// use bumpalo::Bump;
203///
204/// // Bumpalo automatically implements TaggedAllocator when the feature is enabled
205/// let arena = Bump::new();
206/// let ptr = arena.alloc(42);
207/// ```
208pub trait TaggedAllocator {
209 /// Allocate space for a value and return a pointer to it.
210 ///
211 /// The allocated memory should have the same lifetime as the allocator.
212 fn alloc<T>(&self, value: T) -> *mut T;
213}
214
215// Implement TaggedAllocator for common arena allocators when their features are enabled
216
217#[cfg(feature = "bumpalo")]
218impl TaggedAllocator for bumpalo::Bump {
219 #[inline]
220 fn alloc<T>(&self, value: T) -> *mut T {
221 bumpalo::Bump::alloc(self, value) as *mut T
222 }
223}
224
225// Note: typed_arena doesn't implement TaggedAllocator directly
226// because it can only allocate values of a single type T.
227// Instead, the arena builder pattern generates separate arenas
228// for each variant type when typed_arena is enabled.
229
230/// Statistics for arena memory usage.
231#[derive(Debug, Clone, Copy, Default)]
232pub struct ArenaStats {
233 /// Total bytes currently allocated
234 pub allocated_bytes: usize,
235 /// Total capacity of all chunks
236 pub chunk_capacity: usize,
237}
238
239/// Trait for arena builders generated by the macro.
240///
241/// Provides memory management capabilities for arena-allocated
242/// tagged dispatch types.
243pub trait ArenaBuilder<'a>: Sized {
244 /// Create a new builder with default settings.
245 ///
246 /// When both allocators are available, this prefers bumpalo
247 /// for its superior flexibility.
248 fn new() -> Self;
249
250 /// Reset all allocations, invalidating existing references.
251 ///
252 /// # Safety
253 ///
254 /// This invalidates all references previously allocated from this builder.
255 /// Using any such references after reset is undefined behavior.
256 fn reset(&mut self);
257
258 /// Clear allocations and attempt to reclaim memory.
259 ///
260 /// More aggressive than reset, this tries to return memory to the OS.
261 fn clear(&mut self);
262
263 /// Get current memory usage statistics.
264 fn stats(&self) -> ArenaStats;
265}
266
267/// A simple box allocator for owned tagged pointers.
268///
269/// This is used internally by the owned version of tagged dispatch.
270pub struct BoxAllocator;
271
272impl TaggedAllocator for BoxAllocator {
273 #[inline]
274 fn alloc<T>(&self, value: T) -> *mut T {
275 Box::into_raw(Box::new(value))
276 }
277}
278
279// Module with helper utilities
280#[doc(hidden)]
281pub mod __private {
282 pub use core::mem;
283 pub use core::ptr;
284 pub use core::marker::PhantomData;
285}
286
287#[cfg(test)]
288mod tests {
289 use super::*;
290
291 #[test]
292 fn test_tag_extraction() {
293 let ptr = core::ptr::null_mut::<u32>();
294 let tagged = TaggedPtr::new(ptr, 127);
295 assert_eq!(tagged.tag(), 127);
296
297 // On macOS ARM64 with TBI, the pointer retains the tag bits
298 // because the hardware ignores them automatically
299 #[cfg(all(target_os = "macos", target_arch = "aarch64"))]
300 {
301 // The returned pointer should have the tag in the high byte
302 let returned_ptr = tagged.ptr() as usize;
303 let expected = ptr as usize | (127usize << TaggedPtr::<u32>::TAG_SHIFT);
304 assert_eq!(returned_ptr, expected);
305 }
306
307 #[cfg(not(all(target_os = "macos", target_arch = "aarch64")))]
308 {
309 assert_eq!(tagged.ptr(), ptr);
310 }
311 }
312
313 #[test]
314 fn test_tag_preservation() {
315 let value = Box::new(42u32);
316 let ptr = Box::into_raw(value);
317
318 for tag in 0..128u8 {
319 let tagged = TaggedPtr::new(ptr, tag);
320 assert_eq!(tagged.tag(), tag);
321
322 // On macOS ARM64 with TBI, the pointer retains the tag bits
323 #[cfg(all(target_os = "macos", target_arch = "aarch64"))]
324 {
325 let returned_ptr = tagged.ptr() as usize;
326 let expected = ptr as usize | ((tag as usize) << TaggedPtr::<u32>::TAG_SHIFT);
327 assert_eq!(returned_ptr, expected);
328 }
329
330 #[cfg(not(all(target_os = "macos", target_arch = "aarch64")))]
331 {
332 assert_eq!(tagged.ptr(), ptr);
333 }
334 }
335
336 // Clean up - need to use the original untagged pointer for deallocation
337 unsafe { let _ = Box::from_raw(ptr); }
338 }
339
340 #[test]
341 fn test_size() {
342 assert_eq!(core::mem::size_of::<TaggedPtr<()>>(), 8);
343 }
344
345 #[test]
346 #[should_panic(expected = "Tag must be less than 128")]
347 fn test_tag_overflow() {
348 let ptr = core::ptr::null_mut::<u32>();
349 let _tagged = TaggedPtr::new(ptr, 128);
350 }
351
352 #[cfg(feature = "bumpalo")]
353 #[test]
354 fn test_bumpalo_allocator() {
355 use bumpalo::Bump;
356
357 let arena = Bump::new();
358 let value = 42u32;
359 let ptr = arena.alloc(value);
360
361 // Should be able to create a tagged pointer with arena allocation
362 let tagged = TaggedPtr::new(ptr, 5);
363 assert_eq!(tagged.tag(), 5);
364 unsafe {
365 assert_eq!(*tagged.as_ref(), 42);
366 }
367 }
368}