1#![allow(clippy::mut_from_ref)]
5
6use std::alloc::{AllocError, Allocator, Layout};
7use std::cell::Cell;
8use std::hint::cold_path;
9use std::mem::MaybeUninit;
10use std::ptr::{self, NonNull};
11use std::{mem, slice};
12
13use crate::helpers::*;
14use crate::{apperr, sys};
15
16const ALLOC_CHUNK_SIZE: usize = 64 * KIBI;
17
18pub struct Arena {
44 base: NonNull<u8>,
45 capacity: usize,
46 commit: Cell<usize>,
47 offset: Cell<usize>,
48
49 #[cfg(debug_assertions)]
51 pub(super) borrows: Cell<usize>,
52}
53
54impl Arena {
55 pub const fn empty() -> Self {
56 Self {
57 base: NonNull::dangling(),
58 capacity: 0,
59 commit: Cell::new(0),
60 offset: Cell::new(0),
61
62 #[cfg(debug_assertions)]
63 borrows: Cell::new(0),
64 }
65 }
66
67 pub fn new(capacity: usize) -> apperr::Result<Self> {
68 let capacity = (capacity.max(1) + ALLOC_CHUNK_SIZE - 1) & !(ALLOC_CHUNK_SIZE - 1);
69 let base = unsafe { sys::virtual_reserve(capacity)? };
70
71 Ok(Self {
72 base,
73 capacity,
74 commit: Cell::new(0),
75 offset: Cell::new(0),
76
77 #[cfg(debug_assertions)]
78 borrows: Cell::new(0),
79 })
80 }
81
82 pub fn offset(&self) -> usize {
83 self.offset.get()
84 }
85
86 pub unsafe fn reset(&self, to: usize) {
93 if cfg!(debug_assertions) && self.offset.get() > to {
95 let commit = self.commit.get();
96 let len = (self.offset.get() + 128).min(commit) - to;
97 unsafe { slice::from_raw_parts_mut(self.base.add(to).as_ptr(), len).fill(0xDD) };
98 }
99
100 self.offset.replace(to);
101 }
102
103 #[inline]
104 pub(super) fn alloc_raw(
105 &self,
106 bytes: usize,
107 alignment: usize,
108 ) -> Result<NonNull<[u8]>, AllocError> {
109 let commit = self.commit.get();
110 let offset = self.offset.get();
111
112 let beg = (offset + alignment - 1) & !(alignment - 1);
113 let end = beg + bytes;
114
115 if end > commit {
116 return self.alloc_raw_bump(beg, end);
117 }
118
119 if cfg!(debug_assertions) {
120 let ptr = unsafe { self.base.add(offset) };
121 let len = (end + 128).min(self.commit.get()) - offset;
122 unsafe { slice::from_raw_parts_mut(ptr.as_ptr(), len).fill(0xCD) };
123 }
124
125 self.offset.replace(end);
126 Ok(unsafe { NonNull::slice_from_raw_parts(self.base.add(beg), bytes) })
127 }
128
129 #[cold]
131 fn alloc_raw_bump(&self, beg: usize, end: usize) -> Result<NonNull<[u8]>, AllocError> {
132 let offset = self.offset.get();
133 let commit_old = self.commit.get();
134 let commit_new = (end + ALLOC_CHUNK_SIZE - 1) & !(ALLOC_CHUNK_SIZE - 1);
135
136 if commit_new > self.capacity
137 || unsafe {
138 sys::virtual_commit(self.base.add(commit_old), commit_new - commit_old).is_err()
139 }
140 {
141 return Err(AllocError);
142 }
143
144 if cfg!(debug_assertions) {
145 let ptr = unsafe { self.base.add(offset) };
146 let len = (end + 128).min(self.commit.get()) - offset;
147 unsafe { slice::from_raw_parts_mut(ptr.as_ptr(), len).fill(0xCD) };
148 }
149
150 self.commit.replace(commit_new);
151 self.offset.replace(end);
152 Ok(unsafe { NonNull::slice_from_raw_parts(self.base.add(beg), end - beg) })
153 }
154
155 #[allow(clippy::mut_from_ref)]
156 pub fn alloc_uninit<T>(&self) -> &mut MaybeUninit<T> {
157 let bytes = mem::size_of::<T>();
158 let alignment = mem::align_of::<T>();
159 let ptr = self.alloc_raw(bytes, alignment).unwrap();
160 unsafe { ptr.cast().as_mut() }
161 }
162
163 #[allow(clippy::mut_from_ref)]
164 pub fn alloc_uninit_slice<T>(&self, count: usize) -> &mut [MaybeUninit<T>] {
165 let bytes = mem::size_of::<T>() * count;
166 let alignment = mem::align_of::<T>();
167 let ptr = self.alloc_raw(bytes, alignment).unwrap();
168 unsafe { slice::from_raw_parts_mut(ptr.cast().as_ptr(), count) }
169 }
170}
171
172impl Drop for Arena {
173 fn drop(&mut self) {
174 if self.base != NonNull::dangling() {
175 unsafe { sys::virtual_release(self.base, self.capacity) };
176 }
177 }
178}
179
180impl Default for Arena {
181 fn default() -> Self {
182 Self::empty()
183 }
184}
185
186unsafe impl Allocator for Arena {
187 fn allocate(&self, layout: Layout) -> Result<NonNull<[u8]>, AllocError> {
188 self.alloc_raw(layout.size(), layout.align())
189 }
190
191 fn allocate_zeroed(&self, layout: Layout) -> Result<NonNull<[u8]>, AllocError> {
192 let p = self.alloc_raw(layout.size(), layout.align())?;
193 unsafe { p.cast::<u8>().as_ptr().write_bytes(0, p.len()) }
194 Ok(p)
195 }
196
197 unsafe fn deallocate(&self, _: NonNull<u8>, _: Layout) {}
200
201 unsafe fn grow(
202 &self,
203 ptr: NonNull<u8>,
204 old_layout: Layout,
205 new_layout: Layout,
206 ) -> Result<NonNull<[u8]>, AllocError> {
207 debug_assert!(new_layout.size() >= old_layout.size());
208 debug_assert!(new_layout.align() <= old_layout.align());
209
210 let new_ptr;
211
212 if unsafe { ptr.add(old_layout.size()) == self.base.add(self.offset.get()) } {
214 new_ptr = ptr;
215 let delta = new_layout.size() - old_layout.size();
216 self.alloc_raw(delta, 1)?;
219 } else {
220 cold_path();
221
222 new_ptr = self.allocate(new_layout)?.cast();
223
224 unsafe {
228 ptr::copy_nonoverlapping(ptr.as_ptr(), new_ptr.as_ptr(), old_layout.size());
229 self.deallocate(ptr, old_layout);
230 }
231 }
232
233 Ok(NonNull::slice_from_raw_parts(new_ptr, new_layout.size()))
234 }
235
236 unsafe fn grow_zeroed(
237 &self,
238 ptr: NonNull<u8>,
239 old_layout: Layout,
240 new_layout: Layout,
241 ) -> Result<NonNull<[u8]>, AllocError> {
242 unsafe {
243 let ptr = self.grow(ptr, old_layout, new_layout)?;
245
246 ptr.cast::<u8>()
249 .add(old_layout.size())
250 .write_bytes(0, new_layout.size() - old_layout.size());
251
252 Ok(ptr)
253 }
254 }
255
256 unsafe fn shrink(
257 &self,
258 ptr: NonNull<u8>,
259 old_layout: Layout,
260 new_layout: Layout,
261 ) -> Result<NonNull<[u8]>, AllocError> {
262 debug_assert!(new_layout.size() <= old_layout.size());
263 debug_assert!(new_layout.align() <= old_layout.align());
264
265 let mut len = old_layout.size();
266
267 if unsafe { ptr.add(len) == self.base.add(self.offset.get()) } {
269 self.offset.set(self.offset.get() - len + new_layout.size());
270 len = new_layout.size();
271 } else {
272 debug_assert!(
273 false,
274 "Did you call shrink_to_fit()? Only the last allocation can be shrunk!"
275 );
276 }
277
278 Ok(NonNull::slice_from_raw_parts(ptr, len))
279 }
280}