tinypointers/
lib.rs

1#![doc = include_str!("../README.md")]
2use std::{marker::PhantomData, ptr::NonNull};
3
4#[cfg(all(feature="1byteid", feature="2byteid"))]
5compile_error!("Cannot enable both 1byteid and 2byteid features");
6
7use parking_lot::{RwLock, Mutex};
8
9#[cfg(feature="2byteid")]
10type RawId = std::num::NonZeroU16;
11#[cfg(feature="1byteid")]
12type RawId = std::num::NonZeroU8;
13
14mod boxed;
15mod sync;
16
17pub use boxed::TinyBox;
18pub use sync::{TinyArc, TinyWeak};
19
20#[derive(Debug, PartialEq, Eq, Hash)]
21#[repr(transparent)]
22/// A tiny pointer to a mutable value of type `T`. As with all types of this crate, memory is allocated on the heap.
23/// ```rust
24/// use tinypointers::TinyPtr;
25///
26/// let x = TinyPtr::new(42);
27/// println!("{}", unsafe { *x.get() }); // prints 42
28/// ```
29pub struct TinyPtr<T>(RawId, PhantomData<*mut T>);
30
31impl<T> Clone for TinyPtr<T> {
32    fn clone(&self) -> Self {
33        *self
34    }
35}
36
37impl<T> Copy for TinyPtr<T> {}
38
39
40impl<T> TinyPtr<T> {
41    pub fn new(value: T) -> Self {
42        MEMORY.insert_value(Value::from(Box::from(value)))
43    }
44}
45
46impl<T> TinyPtr<T> {
47    pub fn as_ptr(&self) -> *const T {
48        unsafe { MEMORY.access(self) }
49    }
50    pub fn as_mut_ptr(&mut self) -> *mut T {
51        unsafe { MEMORY.access(self) }
52    }
53    pub unsafe fn get<'a, 'b>(&'b self) -> &'a T {
54        &*MEMORY.access(self)
55    }
56    pub unsafe fn get_mut<'a, 'b>(&'b mut self) -> &'a mut T {
57        &mut *MEMORY.access(self)
58    }
59    /// Takes ownership of the value and returns it.
60    ///
61    /// The underlying memory is freed.
62    pub fn take(self) -> T {
63        unsafe { MEMORY.take(self) }
64    }
65
66    /// Returns the internal id of the pointer.
67    ///
68    /// This is used for debugging purposes.
69    pub fn id(&self) -> RawId {
70        self.0
71    }
72}
73
74impl<T> From<Box<T>> for TinyPtr<T> {
75    fn from(value: Box<T>) -> Self {
76        MEMORY.insert_value(Value::from(value))
77    }
78}
79
80struct Value {
81    val: NonNull<()>,
82}
83
84unsafe impl Send for Value {}
85unsafe impl Sync for Value {}
86
87impl<T> From<Box<T>> for Value {
88    fn from(value: Box<T>) -> Self {
89        Self {
90            val: NonNull::from(Box::leak(value)).cast(),
91        }
92    }
93}
94
95impl Value {
96    unsafe fn get<T>(&self) -> *mut T {
97        std::mem::transmute(self.val)
98    }
99    unsafe fn into_box<T>(self) -> Box<T> {
100        Box::from_raw(self.val.as_ptr() as *mut T)
101    }
102}
103
104#[derive(Default)]
105struct Memory {
106    available: Mutex<Vec<RawId>>,
107    map: RwLock<Vec<Option<Value>>>,
108}
109
110impl Memory {
111    pub const fn new() -> Self {
112        Self { available: Mutex::new(Vec::new()), map: RwLock::new(Vec::new()) }
113    }
114    fn insert_value<T>(&self, value: Value) -> TinyPtr<T> {
115        if self.remaing_slots() == 0 {
116            panic!("No more slots available. Consider increasing the id size.")
117        }
118        let mut map = self.map.write();
119        let idx = match self.available.lock().pop() {
120            None => {
121                map.push(value.into());
122                RawId::new(map.len() as _).unwrap()
123            },
124            Some(idx) => {
125                map[idx.get() as usize - 1] = value.into();
126                idx
127            },
128        };
129        TinyPtr(idx, PhantomData)
130    }
131    fn remaing_slots(&self) -> usize {
132        self.available.lock().len() + (RawId::MAX.get() as usize - self.map.read().len())
133            
134    }
135    unsafe fn access<T>(&self, idx: &TinyPtr<T>) -> *mut T {
136        let map = self.map.read();
137        map.get(idx.0.get() as usize - 1).expect("Index out of bounds").as_ref().expect("Pointer already freed").get()
138    }
139    unsafe fn take<T>(&self, idx: TinyPtr<T>) -> T {
140        let mut map = self.map.write();
141        let value = map.get_mut(idx.0.get() as usize - 1).expect("Index out of bounds").take().expect("Pointer already freed");
142        *value.into_box()
143    }
144}
145
146static MEMORY: Memory = Memory::new();
147
148#[cfg(test)]
149pub(crate) mod tests {
150
151    use std::{fmt::Debug, ops::{Deref, DerefMut}};
152    pub use std::sync::atomic::AtomicBool;
153
154    pub(crate) struct DropIndicator<T>(pub &'static AtomicBool, pub T);
155
156    impl<T: Debug> Debug for DropIndicator<T> {
157        fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
158            self.1.fmt(f)
159        }
160    }
161
162    impl<T: PartialEq> PartialEq<T> for DropIndicator<T> {
163        fn eq(&self, other: &T) -> bool {
164            self.1 == *other
165        }
166    }
167
168    impl<T> Drop for DropIndicator<T> {
169        fn drop(&mut self) {
170            self.0.store(true, std::sync::atomic::Ordering::Relaxed);
171        }
172    }
173
174    impl<T> Deref for DropIndicator<T> {
175        type Target = T;
176        fn deref(&self) -> &T {
177            &self.1
178        }
179    }
180
181    impl<T> DerefMut for DropIndicator<T> {
182        fn deref_mut(&mut self) -> &mut T {
183            &mut self.1
184        }
185    }
186
187    macro_rules! make_drop_indicator {
188        ($ind:ident, $b:ident, $val:expr) => {
189            let $ind = &*Box::leak(Box::new(AtomicBool::new(false)));
190            let $b = DropIndicator($ind, $val);
191        };
192    }
193
194    macro_rules! assert_dropped {
195        ($ind:ident) => {
196            assert_dropped!($ind, "Value was not dropped")
197        };
198        ($ind:ident, $msg:expr $(, $arg:expr)*) => {{
199            let __ind = unsafe { Box::from_raw($ind as *const AtomicBool as *mut AtomicBool) };
200            assert!(
201                __ind.load(std::sync::atomic::Ordering::Relaxed), $msg $(, $arg)*); }
202        };
203    }
204
205    pub(crate) use make_drop_indicator;
206    pub(crate) use assert_dropped;
207
208    use super::*;
209    #[test]
210    fn access_raw_test() {
211        let ptr = TinyPtr::new(42);
212        assert_eq!(unsafe { *ptr.get() }, 42);
213    }
214    #[test]
215    fn access_raw_string_test() {
216        let ptr = TinyPtr::new(String::from("Hello, World!"));
217        assert_eq!(unsafe { ptr.get() }, "Hello, World!");
218    }
219    #[test]
220    #[cfg_attr(feature="1byteid", ignore="leaks too much memory")]
221    fn access_after_multiple_test() {
222        let ptrs = (0..100).map(|i| TinyPtr::new(i)).collect::<Vec<_>>();
223        assert!(ptrs.iter().enumerate().all(|(i, ptr)| unsafe { *ptr.get() } == i));
224    }
225
226    #[test]
227    fn drop_single_test() {
228        let ptr = TinyPtr::new(42);
229        assert_eq!(unsafe { *ptr.get() }, 42);
230        assert_eq!(ptr.take(), 42);
231    }
232
233    #[test]
234    fn multiple_thread_access() {
235        let t1 = std::thread::spawn(|| {
236            let ptr = TinyPtr::new(42);
237            assert_eq!(unsafe { *ptr.get() }, 42);
238            ptr.take();
239        });
240        let t2 = std::thread::spawn(|| {
241            let ptr = TinyPtr::new(30);
242            assert_eq!(unsafe { *ptr.get() }, 30);
243            ptr.take();
244        });
245        t1.join().unwrap();
246        t2.join().unwrap();
247    }
248    #[test]
249    fn drop_multiple_test() {
250        let ptrs = (0..100).map(|i| TinyPtr::new(i)).collect::<Vec<_>>();
251        assert!(ptrs.iter().enumerate().all(|(i, ptr)| unsafe { *ptr.get() } == i));
252        assert!(ptrs.into_iter().enumerate().all(|(i, ptr)| ptr.take() == i));
253    }
254    #[test]
255    fn assert_optimization_test() {
256        assert_eq!(std::mem::size_of::<TinyPtr<u8>>(), std::mem::size_of::<RawId>());
257        assert_eq!(std::mem::size_of::<Option<TinyPtr<u8>>>(), std::mem::size_of::<TinyPtr<u8>>());
258    }
259
260}