manually_static/ptr.rs
1use std::ops::Deref;
2#[cfg(debug_assertions)]
3use std::sync::atomic::AtomicUsize;
4#[cfg(debug_assertions)]
5use std::sync::atomic::{AtomicBool, Ordering};
6#[cfg(debug_assertions)]
7use std::sync::Arc;
8
9/// `ManuallyStaticPtr<T>` allocates a value `T` on the heap and provides
10/// a raw pointer to it. It requires manual deallocation
11/// via the [`free`](ManuallyStaticPtr::free) method.
12///
13/// In debug builds, it tracks if the pointer has already been freed,
14/// causing a panic if [`free`](ManuallyStaticPtr::free) is called multiple times or if the pointer
15/// is dereferenced after being freed.
16///
17/// # Example
18///
19/// ```rust
20/// use manually_static::ManuallyStaticPtr;
21/// use std::sync::Mutex;
22/// use std::array;
23///
24/// const N: usize = 10280;
25/// const PAR: usize = 16;
26///
27/// #[allow(dead_code, reason = "It is an example.")]
28/// struct Pool(Mutex<([Vec<u8>; N], usize)>);
29///
30/// fn main() {
31/// let pool = ManuallyStaticPtr::new(Pool(Mutex::new((array::from_fn(|_| Vec::new()), 0))));
32/// let mut joins = Vec::with_capacity(PAR);
33///
34/// for _ in 0..PAR {
35/// #[allow(unused_variables, reason = "It is an example.")]
36/// let pool = pool.clone();
37///
38/// joins.push(std::thread::spawn(move || {
39/// /* ... do some work ... */
40/// }));
41/// }
42///
43/// for join in joins {
44/// join.join().unwrap();
45/// }
46///
47/// unsafe { pool.free(); }
48/// }
49/// ```
50pub struct ManuallyStaticPtr<T> {
51 ptr: *mut T,
52 /// This flag is only present in debug builds (`cfg(debug_assertions)`).
53 /// It is set to `true` when the `ManuallyStaticPtr` instance is freed.
54 #[cfg(debug_assertions)]
55 is_freed: Arc<AtomicBool>,
56 #[cfg(debug_assertions)]
57 ref_count: Arc<AtomicUsize>,
58}
59
60impl<T> ManuallyStaticPtr<T> {
61 /// Allocates a new `ManuallyStaticPtr` instance by moving `value` to the heap.
62 ///
63 /// # Examples
64 ///
65 /// ```
66 /// use manually_static::ManuallyStaticPtr;
67 ///
68 /// let my_ptr = ManuallyStaticPtr::new(42);
69 ///
70 /// assert_eq!(*my_ptr, 42);
71 ///
72 /// // Don't forget to call `free` when done!
73 /// unsafe { my_ptr.free(); }
74 /// ```
75 pub fn new(value: T) -> Self {
76 Self {
77 ptr: Box::into_raw(Box::new(value)),
78 #[cfg(debug_assertions)]
79 is_freed: Arc::new(AtomicBool::new(false)),
80 #[cfg(debug_assertions)]
81 ref_count: Arc::new(AtomicUsize::new(1)),
82 }
83 }
84
85 /// Deallocates the memory associated with this `ManuallyStaticPtr`.
86 ///
87 /// # Safety
88 ///
89 /// This function is `unsafe` because:
90 /// - It must be called exactly once for each `ManuallyStaticPtr` instance.
91 /// Calling it more than once will result in a double-free, leading to
92 /// undefined behavior. In debug builds, this will panic.
93 /// - Not calling `free` will result in a memory leak.
94 /// - The raw pointer must not be aliased or used after `free` is called.
95 ///
96 /// # Panics
97 ///
98 /// In debug builds, this function will panic if the pointer
99 /// has already been freed.
100 ///
101 /// # Examples
102 ///
103 /// ```
104 /// use manually_static::ManuallyStaticPtr;
105 ///
106 /// let my_ptr = ManuallyStaticPtr::new(vec![1, 2, 3]);
107 ///
108 /// // ... use my_ptr ...
109 ///
110 /// unsafe { my_ptr.free(); } // Explicitly free the memory
111 ///
112 /// // my_ptr is now consumed and cannot be used
113 /// ```
114 pub unsafe fn free(self) {
115 #[cfg(debug_assertions)]
116 {
117 assert!(
118 !self.is_freed.swap(true, Ordering::AcqRel),
119 "Attempted to double free ManuallyStaticPtr!"
120 );
121 }
122
123 drop(Box::from_raw(self.ptr));
124 }
125}
126
127impl<T> Deref for ManuallyStaticPtr<T> {
128 type Target = T;
129
130 fn deref(&self) -> &T {
131 #[cfg(debug_assertions)]
132 {
133 assert!(
134 !self.is_freed.load(Ordering::Acquire),
135 "ManuallyStaticPtr: Attempted to dereference a freed pointer!"
136 );
137 }
138
139 unsafe { &*self.ptr }
140 }
141}
142
143impl<T> Clone for ManuallyStaticPtr<T> {
144 fn clone(&self) -> Self {
145 #[cfg(debug_assertions)]
146 {
147 self.ref_count.fetch_add(1, Ordering::AcqRel);
148 }
149
150 Self {
151 ptr: self.ptr,
152 #[cfg(debug_assertions)]
153 is_freed: self.is_freed.clone(),
154 #[cfg(debug_assertions)]
155 ref_count: self.ref_count.clone(),
156 }
157 }
158}
159
160unsafe impl<T: Send> Send for ManuallyStaticPtr<T> {}
161unsafe impl<T: Sync> Sync for ManuallyStaticPtr<T> {}
162
163#[cfg(debug_assertions)]
164impl<T> Drop for ManuallyStaticPtr<T> {
165 fn drop(&mut self) {
166 let prev = self.ref_count.fetch_sub(1, Ordering::AcqRel);
167
168 if prev == 1 {
169 assert!(
170 self.is_freed.load(Ordering::Acquire),
171 "Attempted to drop the last ManuallyStaticPtr instance before it was freed!"
172 );
173 }
174 }
175}
176
177#[cfg(test)]
178mod tests {
179 use super::*;
180
181 #[test]
182 fn test_manually_static_ptr_creation_and_deref() {
183 let ptr = ManuallyStaticPtr::new(42);
184
185 assert_eq!(*ptr, 42);
186
187 unsafe {
188 ptr.free();
189 }
190 }
191
192 #[test]
193 #[cfg(debug_assertions)]
194 #[should_panic(expected = "Attempted to double free ManuallyStaticPtr!")]
195 fn test_manually_static_ptr_double_free_panics() {
196 let ptr = ManuallyStaticPtr::new(1);
197 let ptr2 = ptr.clone();
198
199 unsafe {
200 ptr.free();
201 }
202
203 // This second call should panic in debug mode
204 unsafe {
205 ptr2.free();
206 }
207 }
208
209 #[test]
210 #[cfg(debug_assertions)]
211 #[should_panic(expected = "ManuallyStaticPtr: Attempted to dereference a freed pointer!")]
212 fn test_manually_static_ptr_deref_after_free_panics() {
213 let ptr = ManuallyStaticPtr::new(2);
214 let ptr2 = ptr.clone();
215
216 unsafe {
217 ptr.free();
218 }
219
220 let _ = *ptr2;
221 }
222
223 #[test]
224 #[cfg(debug_assertions)]
225 #[should_panic(
226 expected = "Attempted to drop the last ManuallyStaticPtr instance before it was freed!"
227 )]
228 fn test_manually_static_ptr_drop_without_free_panics() {
229 // Create a ManuallyStaticPtr, but don't call `free()`
230 let _ptr = ManuallyStaticPtr::new(3);
231 }
232}