arena_alloc/lib.rs
1#![no_std]
2//! # Rust arena allocator
3//!
4//! ## Description
5//! A small, thread-safe, no-std, arena allocator with a static backing store and ability to allocate arbitrary types.
6//!
7//! ## Examples
8//! ### Simple Types
9//!
10//! ```
11//! use arena_alloc::Arena;
12//! static ARENA: Arena<1000> = Arena::new();
13//!
14//! fn main() {
15//! let two = ARENA.acquire(2).unwrap();
16//! let zero = ARENA.acquire_default::<usize>().unwrap();
17//!
18//! assert_eq!(*two, 2);
19//! assert_eq!(*zero, 0);
20//! }
21//! ```
22//!
23//! ### Self Referential Types
24//!
25//! ```
26//! use arena_alloc::{Arena, Init};
27//! use std::cell::Cell;
28//! use std::ptr;
29//! use std::mem::MaybeUninit;
30//!
31//! static ARENA: Arena<1000> = Arena::new();
32//!
33//! struct CllNode<'b, T> {
34//! data: T,
35//! next: Cell<&'b Self>,
36//! }
37//!
38//! impl<'b, T> CllNode<'b, T> {
39//! fn cons(&'b self, other: &'b CllNode<'b, T>) {
40//! self.next.set(other);
41//! }
42//! }
43//!
44//! impl<'b, T> Init for CllNode<'b, T> {
45//! type InitArg = T;
46//! fn init(me: &mut MaybeUninit<Self>, arg: T) {
47//! unsafe {
48//! me.write(CllNode {
49//! data: arg,
50//! next: Cell::new(ptr::from_ref(me).cast::<Self>().as_ref().unwrap()),
51//! });
52//! }
53//! }
54//! }
55//!
56//! fn main() {
57//! let n0 = ARENA.acquire_init_default::<CllNode<usize>>().unwrap();
58//! let n1 = ARENA.acquire_init::<CllNode<_>>(1).unwrap();
59//! let n2 = ARENA.acquire_init::<CllNode<_>>(2).unwrap();
60//!
61//! n0.cons(n1);
62//! n1.cons(n2);
63//! n2.cons(n0);
64//! assert_eq!(n0.next.get().data, 1);
65//! assert_eq!(n1.next.get().data, 2);
66//! assert_eq!(n2.next.get().data, 0);
67//! }
68//! ```
69//!
70
71use core::{cell::UnsafeCell, mem::MaybeUninit, ptr, sync::atomic::{AtomicUsize, Ordering}, usize};
72pub use init::Init;
73
74mod init;
75
76type MemSlice<const SIZE: usize> = [u8; SIZE];
77
78#[derive(Clone, Copy)]
79struct Dropper<const SIZE: usize> {
80 place: usize,
81 drop_func: fn(*mut MemSlice<SIZE>),
82}
83
84/// A fixed size arena that can be used to allocate memory for arbitrary types.
85pub struct Arena<const SIZE: usize> {
86 backing_store: UnsafeCell<MemSlice<SIZE>>,
87 next_free_store_spot: AtomicUsize,
88 drop_queue: UnsafeCell<[Option<Dropper<SIZE>>; SIZE]>,
89 next_free_drop_spot: AtomicUsize,
90}
91
92unsafe impl<const SIZE: usize> Sync for Arena<SIZE> {}
93unsafe impl<const SIZE: usize> Send for Arena<SIZE> {}
94
95impl<const SIZE: usize> Default for Arena<SIZE> {
96 fn default() -> Self {
97 Self::new()
98 }
99}
100
101impl<'a, const SIZE: usize> Arena<SIZE> {
102 /// Create a new arena with a fixed size buffer of SIZE bytes.
103 #[must_use]
104 pub const fn new() -> Self {
105 Arena {
106 backing_store: UnsafeCell::new([0; SIZE]),
107 next_free_store_spot: AtomicUsize::new(0),
108 drop_queue: UnsafeCell::new([None; SIZE]),
109 next_free_drop_spot: AtomicUsize::new(0),
110 }
111 }
112
113 /// Get a pointer to a place in the backing store where a value of type T can be placed.
114 fn get_ptr_place<T>(&'a self) -> Option<(usize, &mut MaybeUninit<T>)> {
115 let place = self.next_free_store_spot.fetch_add(
116 core::mem::size_of::<T>(),
117 Ordering::Release,
118 );
119 if place + core::mem::size_of::<T>() > SIZE {
120 return None;
121 }
122
123 let ptr = unsafe {
124 self.backing_store
125 .get()
126 .byte_add(place)
127 .cast::<MaybeUninit<T>>()
128 .as_mut()
129 .unwrap()
130 };
131
132 Some((place, ptr))
133 }
134
135 /// Add a dropper function for type T at the given place to the drop queue.
136 fn add_to_drop_queue<T>(&'a self, place: usize) {
137 let dq = unsafe { self.drop_queue.get().as_mut() }.unwrap();
138 dq[self
139 .next_free_drop_spot
140 .fetch_add(1, Ordering::Relaxed)] = Some(Dropper {
141 place,
142 drop_func: |ptr: *mut MemSlice<SIZE>| unsafe {
143 ptr.cast::<T>().drop_in_place();
144 },
145 });
146 }
147
148 /// acquire a reference to a value of type T that can be initialized with
149 /// the Init trait, using the default value of the InitArg.
150 /// This is useful for types that require initialization and the init arg is Default.
151 pub fn acquire_init_default<T: Init>(&'a self) -> Option<&'a T>
152 where
153 T::InitArg: Default,
154 {
155 let (place, ptr) = self.get_ptr_place::<T>()?;
156
157 T::init(ptr, T::InitArg::default());
158
159 self.add_to_drop_queue::<T>(place);
160
161 Some(unsafe {
162 ptr::from_ref(ptr)
163 .cast::<T>()
164 .as_ref()
165 .unwrap_unchecked()
166 })
167 }
168
169 /// acquire a reference to a value of type T that can be initialized with
170 /// the Init trait, using a given InitArg.
171 /// This is useful for types that require initialization.
172 pub fn acquire_init<T: Init>(&'a self, arg: T::InitArg) -> Option<&'a T> {
173 let (place, ptr) = self.get_ptr_place::<T>()?;
174
175 T::init(ptr, arg);
176
177 self.add_to_drop_queue::<T>(place);
178
179 Some(unsafe {
180 ptr::from_ref(ptr)
181 .cast::<T>()
182 .as_ref()
183 .unwrap_unchecked()
184 })
185 }
186
187 /// acquire a reference to a value of type T that is initialized with it's default value.
188 /// This is useful for types that do not require initialization.
189 pub fn acquire_default<T: Default>(&'a self) -> Option<&'a T> {
190 let (place, ptr) = self.get_ptr_place::<T>()?;
191
192 ptr.write(T::default());
193
194 self.add_to_drop_queue::<T>(place);
195
196 Some(unsafe {
197 ptr::from_ref(ptr)
198 .cast::<T>()
199 .as_ref()
200 .unwrap_unchecked()
201 })
202 }
203
204 /// acquire a reference to a value of type T that is initialized with the given value.
205 /// This is useful for types that do not require initialization.
206 pub fn acquire<T>(&'a self, val: T) -> Option<&'a T> {
207 let (place, ptr) = self.get_ptr_place::<T>()?;
208
209 ptr.write(val);
210
211 self.add_to_drop_queue::<T>(place);
212
213 Some(unsafe {
214 ptr::from_ref(ptr)
215 .cast::<T>()
216 .as_ref()
217 .unwrap_unchecked()
218 })
219 }
220}
221
222impl<const SIZE: usize> Drop for Arena<SIZE> {
223 fn drop(&mut self) {
224 for pair in self.drop_queue.get_mut() {
225 let Some(Dropper { place, drop_func }) = pair else {
226 break;
227 };
228 let ptr = unsafe { self.backing_store.get().byte_add(*place) };
229 drop_func(ptr);
230 }
231 }
232}
233
234#[cfg(test)]
235mod test;