1#![cfg_attr(not(feature = "std"), no_std)]
2#![warn(missing_docs)]
3#![doc = include_str!("../README.md")]
4#![cfg_attr(nightly, feature(allocator_api))]
5use core::cell::UnsafeCell;
6use core::mem::MaybeUninit;
7use core::ptr::NonNull;
8use core::sync::atomic::AtomicUsize;
9use core::sync::atomic::Ordering;
10
11extern crate alloc;
12
13#[cfg(not(nightly))]
14use allocator_api2::alloc::{AllocError, Allocator, Layout};
15#[cfg(nightly)]
16use core::alloc::{AllocError, Allocator, Layout};
17
18pub struct StackAllocator<const N: usize> {
24 buf: UnsafeCell<MaybeUninit<[u8; N]>>,
26 offset: AtomicUsize,
28}
29
30impl<const N: usize> Default for StackAllocator<N> {
31 fn default() -> Self {
32 Self::new()
33 }
34}
35
36unsafe impl<const N: usize> Send for StackAllocator<N> {}
37unsafe impl<const N: usize> Sync for StackAllocator<N> {}
38
39impl<const N: usize> StackAllocator<N> {
40 pub const fn new() -> Self {
42 Self {
43 buf: UnsafeCell::new(MaybeUninit::uninit()),
44 offset: AtomicUsize::new(0),
45 }
46 }
47
48 pub unsafe fn reset(&mut self) {
54 self.offset.store(0, Ordering::Release);
55 }
56
57 #[inline]
59 const fn align_up(addr: usize, align: usize) -> usize {
60 (addr + align - 1) & !(align - 1)
61 }
62
63 pub fn current_offset(&self) -> usize {
65 self.offset.load(Ordering::Acquire)
66 }
67}
68
69unsafe impl<const N: usize> Allocator for StackAllocator<N> {
70 fn allocate(&self, layout: Layout) -> Result<NonNull<[u8]>, AllocError> {
71 let base = self.buf.get() as usize;
73 let mut current = self.offset.load(Ordering::Acquire);
74 let mut start;
75 loop {
76 let current_ptr = base + current;
78 let aligned_ptr = Self::align_up(current_ptr, layout.align());
79 start = aligned_ptr - base;
80 let end = start.checked_add(layout.size()).ok_or(AllocError)?;
81
82 if end > N {
84 return Err(AllocError);
85 }
86
87 if self
89 .offset
90 .compare_exchange(current, end, Ordering::Release, Ordering::Relaxed)
91 .is_ok()
92 {
93 break;
94 }
95 current = self.offset.load(Ordering::Acquire);
96 }
97 let ptr = unsafe { self.buf.get().cast::<u8>().add(start) };
99 Ok(NonNull::slice_from_raw_parts(
100 NonNull::new(ptr).unwrap(),
101 layout.size(),
102 ))
103 }
104
105 unsafe fn deallocate(&self, ptr: NonNull<u8>, layout: Layout) {
106 let base = self.buf.get() as usize;
109 let start = ptr.as_ptr() as usize - base;
110 let end = start + layout.size();
111
112 let _ = self
114 .offset
115 .compare_exchange(end, start, Ordering::Release, Ordering::Relaxed);
116 }
117
118 unsafe fn grow(
119 &self,
120 ptr: NonNull<u8>,
121 old_layout: Layout,
122 new_layout: Layout,
123 ) -> Result<NonNull<[u8]>, AllocError> {
124 let base = self.buf.get() as usize;
131 let old_start = ptr.as_ptr() as usize - base;
132
133 let expected_offset = old_start + old_layout.size();
135 let current_offset = self.offset.load(Ordering::Acquire);
136 if current_offset != expected_offset {
137 return Err(AllocError);
138 }
139
140 if new_layout.size() < old_layout.size() {
142 return Err(AllocError);
143 }
144
145 let new_end = old_start.checked_add(new_layout.size()).ok_or(AllocError)?;
147 if new_end > N {
148 return Err(AllocError);
149 }
150
151 if self
154 .offset
155 .compare_exchange(
156 expected_offset,
157 new_end,
158 Ordering::Release,
159 Ordering::Relaxed,
160 )
161 .is_err()
162 {
163 return self.allocate(new_layout);
165 }
166 Ok(NonNull::slice_from_raw_parts(ptr, new_layout.size()))
168 }
169
170 unsafe fn shrink(
171 &self,
172 ptr: NonNull<u8>,
173 old_layout: Layout,
174 new_layout: Layout,
175 ) -> Result<NonNull<[u8]>, AllocError> {
176 let base = self.buf.get() as usize;
178 let old_start = ptr.as_ptr() as usize - base;
179
180 let expected_offset = old_start + old_layout.size();
182 let current_offset = self.offset.load(Ordering::Acquire);
183 if current_offset != expected_offset {
184 return Err(AllocError);
185 }
186
187 if new_layout.size() > old_layout.size() {
189 return Err(AllocError);
190 }
191
192 let new_end = old_start + new_layout.size();
194
195 _ = self.offset.compare_exchange(
200 expected_offset,
201 new_end,
202 Ordering::Release,
203 Ordering::Relaxed,
204 );
205
206 Ok(NonNull::slice_from_raw_parts(ptr, new_layout.size()))
208 }
209 fn by_ref(&self) -> &Self
210 where
211 Self: Sized,
212 {
213 self
214 }
215}
216
217pub struct HybridAllocator<const N: usize, F: Allocator> {
224 stack_alloc: StackAllocator<N>,
225 fallback: F,
226}
227
228#[cfg(feature = "alloc")]
229impl<const N: usize> Default for HybridAllocator<N, alloc::alloc::Global> {
230 fn default() -> Self {
231 Self::new(alloc::alloc::Global)
232 }
233}
234
235impl<const N: usize, F: Allocator> HybridAllocator<N, F> {
236 pub const fn new(fallback: F) -> Self {
241 Self {
242 stack_alloc: StackAllocator::new(),
243 fallback,
244 }
245 }
246
247 pub unsafe fn reset(&mut self) {
253 self.stack_alloc.reset();
254 }
255
256 pub fn current_offset(&self) -> usize {
258 self.stack_alloc.current_offset()
259 }
260
261 pub fn fallback(&self) -> &F {
263 &self.fallback
264 }
265
266 pub fn is_stack_exausted(&self) -> bool {
270 self.current_offset() >= N
271 }
272}
273
274unsafe impl<const N: usize, F: Allocator> Allocator for HybridAllocator<N, F> {
275 fn allocate(&self, layout: Layout) -> Result<NonNull<[u8]>, AllocError> {
276 match self.stack_alloc.allocate(layout) {
278 ok @ Ok(_) => ok,
279 Err(_) => self.fallback.allocate(layout),
280 }
281 }
282
283 unsafe fn deallocate(&self, ptr: NonNull<u8>, layout: Layout) {
284 let base = self.stack_alloc.buf.get() as usize;
286 let end = base + N;
287 let addr = ptr.as_ptr() as usize;
288
289 if (base..end).contains(&addr) {
290 self.stack_alloc.deallocate(ptr, layout);
291 } else {
292 self.fallback.deallocate(ptr, layout);
293 }
294 }
295
296 unsafe fn grow(
297 &self,
298 ptr: NonNull<u8>,
299 old_layout: Layout,
300 new_layout: Layout,
301 ) -> Result<NonNull<[u8]>, AllocError> {
302 let base = self.stack_alloc.buf.get() as usize;
303 let addr = ptr.as_ptr() as usize;
304
305 if (base..base + N).contains(&addr) {
306 if let Ok(res) = self.stack_alloc.grow(ptr, old_layout, new_layout) {
308 return Ok(res);
309 } else {
310 let mut new_ptr = self.fallback.allocate(new_layout)?;
312 core::ptr::copy_nonoverlapping(
313 ptr.as_ptr(),
314 new_ptr.as_mut() as *mut [u8] as *mut u8,
315 old_layout.size(),
316 );
317 self.stack_alloc.deallocate(ptr, old_layout);
319 return Ok(new_ptr);
320 }
321 }
322 self.fallback.grow(ptr, old_layout, new_layout)
324 }
325
326 unsafe fn shrink(
327 &self,
328 ptr: NonNull<u8>,
329 old_layout: Layout,
330 new_layout: Layout,
331 ) -> Result<NonNull<[u8]>, AllocError> {
332 let base = self.stack_alloc.buf.get() as usize;
333 let addr = ptr.as_ptr() as usize;
334
335 if (base..base + N).contains(&addr) {
336 if let Ok(res) = self.stack_alloc.shrink(ptr, old_layout, new_layout) {
338 return Ok(res);
340 }
341 }
342 self.fallback.shrink(ptr, old_layout, new_layout)
344 }
345
346 fn by_ref(&self) -> &Self
347 where
348 Self: Sized,
349 {
350 self
351 }
352}