1#![no_std]
36
37#[cfg(test)]
38extern crate std;
39
40use core::fmt::{self, Display, Formatter};
41use core::ops::{Deref, DerefMut};
42
43const USIZE_LEN: usize = core::mem::size_of::<usize>();
44
45#[derive(Clone, Debug)]
47pub struct Arena<const N: usize> {
48 storage: [u8; N],
49 head: Option<NonMaxUsize>,
50}
51
52impl<const N: usize> Arena<N> {
53 #[inline]
55 pub const fn new(storage: [u8; N]) -> Self {
56 Self {
57 storage,
58 head: None,
59 }
60 }
61
62 pub fn alloc(&mut self, size: usize) -> Result<BufMut<'_>, AllocError> {
70 if size == 0 {
71 return Ok(BufMut {
72 index: Index(usize::MAX),
73 slice: &mut [],
74 });
75 }
76
77 if size > self.storage.len() - Header::SIZE {
79 return Err(AllocError(size));
80 }
81
82 if self.head.is_none() {
85 if size > self.storage.len() - Header::SIZE {
86 return Err(AllocError(size));
87 }
88
89 let header = Header {
90 next: None,
91 prev: None,
92 size,
93 };
94
95 self.head = NonMaxUsize::new(0);
96 header.write_to_slice(&mut self.storage[..Header::SIZE]);
97 return Ok(BufMut {
98 index: Index(0),
99 slice: &mut self.storage[Header::SIZE..Header::SIZE + size],
100 });
101 }
102
103 let mut next = self.head.map(|v| v.get());
104
105 while let Some(next_chunk) = next {
106 let mut header =
107 Header::from_slice(&self.storage[next_chunk..next_chunk + Header::SIZE]);
108
109 let chunk_start = next_chunk + Header::SIZE;
110 let chunk_end = header.next.unwrap_or(NonMaxUsize::new(N).unwrap()).get();
111 let chunk_len = chunk_end - chunk_start;
112 let free_len = chunk_len - header.size;
113
114 if free_len >= size + Header::SIZE {
115 let new_chunk_start = chunk_start + header.size;
116 let new_header = Header {
117 next: header.next,
118 prev: NonMaxUsize::new(next_chunk),
119 size,
120 };
121
122 if let Some(next) = &mut header.next {
123 let mut next_header = self.read_header(next.get());
124 next_header.prev = Some(NonMaxUsize::new(new_chunk_start).unwrap());
125 self.write_header(next.get(), next_header);
126 }
127
128 header.next = Some(NonMaxUsize::new(new_chunk_start).unwrap());
129
130 header.write_to_slice(&mut self.storage[next_chunk..next_chunk + Header::SIZE]);
131
132 new_header.write_to_slice(
133 &mut self.storage[new_chunk_start..new_chunk_start + Header::SIZE],
134 );
135
136 let index = Index(new_chunk_start);
137 let slice = &mut self.storage
138 [new_chunk_start + Header::SIZE..new_chunk_start + Header::SIZE + size];
139
140 return Ok(BufMut { index, slice });
141 }
142
143 next = header.next.map(|v| v.get());
144 }
145
146 Err(AllocError(size))
147 }
148
149 pub fn alloc_zeroed(&mut self, size: usize) -> Result<BufMut<'_>, AllocError> {
153 let mut buf = self.alloc(size)?;
154 buf.fill(0);
155 Ok(buf)
156 }
157
158 pub fn dealloc(&mut self, index: Index) {
160 let index = index.0;
161
162 if index.saturating_add(Header::SIZE) >= self.storage.len() {
164 return;
165 }
166
167 let header = self.read_header(index);
168
169 if cfg!(feature = "zero_headers") {
170 self.storage[index..index + Header::SIZE].fill(0);
171 }
172
173 if let Some(next) = header.next {
174 let mut next_header = self.read_header(next.get());
175 next_header.prev = header.prev;
176 self.write_header(next.get(), next_header);
177 }
178
179 if let Some(prev) = header.prev {
180 let mut prev_header = self.read_header(prev.get());
181 prev_header.next = header.next;
182 self.write_header(prev.get(), prev_header);
183 } else {
184 self.head = header.next;
185 }
186 }
187
188 pub fn get(&self, index: Index) -> Option<Buf<'_>> {
190 let index = index.0;
191 if index == usize::MAX {
192 return Some(Buf {
193 index: Index(index),
194 slice: &[],
195 });
196 }
197
198 if index.saturating_add(Header::SIZE) >= self.storage.len() {
199 return None;
200 }
201
202 let header = Header::from_slice(self.storage.get(index..index + Header::SIZE)?);
203 self.storage
204 .get(index + Header::SIZE..index + Header::SIZE + header.size)
205 .map(|slice| Buf {
206 index: Index(index),
207 slice,
208 })
209 }
210
211 pub unsafe fn get_unchecked(&self, index: Index) -> Buf<'_> {
228 let index = index.0;
229
230 unsafe {
233 let header =
234 Header::from_slice(self.storage.get_unchecked(index..index + Header::SIZE));
235 let slice = self
236 .storage
237 .get_unchecked(index + Header::SIZE..index + Header::SIZE + header.size);
238 Buf {
239 index: Index(index),
240 slice,
241 }
242 }
243 }
244
245 pub fn get_mut(&mut self, index: Index) -> Option<BufMut<'_>> {
247 let index = index.0;
248
249 if index == usize::MAX {
250 return Some(BufMut {
251 index: Index(index),
252 slice: &mut [],
253 });
254 }
255
256 if index.saturating_add(Header::SIZE) >= self.storage.len() {
257 return None;
258 }
259
260 let header = Header::from_slice(self.storage.get(index..index + Header::SIZE)?);
261 self.storage
262 .get_mut(index + Header::SIZE..index + Header::SIZE + header.size)
263 .map(|slice| BufMut {
264 index: Index(index),
265 slice,
266 })
267 }
268
269 pub unsafe fn get_unchecked_mut(&mut self, index: Index) -> BufMut<'_> {
286 let index = index.0;
287
288 unsafe {
291 let header =
292 Header::from_slice(self.storage.get_unchecked(index..index + Header::SIZE));
293 let slice = self
294 .storage
295 .get_unchecked_mut(index + Header::SIZE..index + Header::SIZE + header.size);
296 BufMut {
297 index: Index(index),
298 slice,
299 }
300 }
301 }
302
303 pub fn clear(&mut self) {
305 self.head = None;
306
307 if cfg!(feature = "zero_headers") {
308 self.storage.fill(0);
309 }
310 }
311
312 fn read_header(&mut self, index: usize) -> Header {
313 Header::from_slice(&self.storage[index..index + Header::SIZE])
314 }
315
316 fn write_header(&mut self, index: usize, header: Header) {
317 header.write_to_slice(&mut self.storage[index..index + Header::SIZE]);
318 }
319}
320
321impl<const N: usize> Default for Arena<N> {
322 #[inline]
323 fn default() -> Self {
324 Self::new([0; N])
325 }
326}
327
328#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)]
329pub struct AllocError(usize);
330
331impl Display for AllocError {
332 fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
333 write!(f, "allocation of {} bytes failed", self.0)
334 }
335}
336
337#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)]
338pub struct Index(usize);
339
340#[derive(Copy, Clone, Debug)]
341struct Header {
342 next: Option<NonMaxUsize>,
343 prev: Option<NonMaxUsize>,
344 size: usize,
345}
346
347#[derive(Copy, Clone, Debug)]
348pub struct Buf<'a> {
349 index: Index,
350 slice: &'a [u8],
351}
352
353impl<'a> Buf<'a> {
354 #[inline]
355 pub fn as_slice(&'a self) -> &'a [u8] {
356 self.slice
357 }
358
359 #[inline]
360 pub fn index(&self) -> Index {
361 self.index
362 }
363}
364
365impl<'a> Deref for Buf<'a> {
366 type Target = [u8];
367
368 #[inline]
369 fn deref(&self) -> &Self::Target {
370 self.slice
371 }
372}
373
374#[derive(Debug)]
375pub struct BufMut<'a> {
376 index: Index,
377 slice: &'a mut [u8],
378}
379
380impl<'a> BufMut<'a> {
381 #[inline]
382 pub fn as_slice(&'a self) -> &'a [u8] {
383 self.slice
384 }
385
386 #[inline]
387 pub fn as_mut_slice(&'a mut self) -> &'a mut [u8] {
388 self.slice
389 }
390
391 #[inline]
392 pub fn index(&self) -> Index {
393 self.index
394 }
395}
396
397impl<'a> Deref for BufMut<'a> {
398 type Target = [u8];
399
400 #[inline]
401 fn deref(&self) -> &Self::Target {
402 self.slice
403 }
404}
405
406impl<'a> DerefMut for BufMut<'a> {
407 #[inline]
408 fn deref_mut(&mut self) -> &mut Self::Target {
409 self.slice
410 }
411}
412
413impl Header {
414 const SIZE: usize = USIZE_LEN * 3;
415
416 fn from_slice(slice: &[u8]) -> Self {
417 let next: usize = usize::from_ne_bytes(slice[0..USIZE_LEN].try_into().unwrap());
418 let prev: usize = usize::from_ne_bytes(slice[USIZE_LEN..USIZE_LEN * 2].try_into().unwrap());
419 let size: usize =
420 usize::from_ne_bytes(slice[USIZE_LEN * 2..USIZE_LEN * 3].try_into().unwrap());
421
422 Self {
423 next: NonMaxUsize::new(next),
424 prev: NonMaxUsize::new(prev),
425 size,
426 }
427 }
428
429 fn write_to_slice(&self, buf: &mut [u8]) {
430 buf[0..USIZE_LEN].copy_from_slice(
431 &self
432 .next
433 .map(|v| v.get())
434 .unwrap_or(usize::MAX)
435 .to_ne_bytes(),
436 );
437 buf[USIZE_LEN..USIZE_LEN * 2].copy_from_slice(
438 &self
439 .prev
440 .map(|v| v.get())
441 .unwrap_or(usize::MAX)
442 .to_ne_bytes(),
443 );
444 buf[USIZE_LEN * 2..USIZE_LEN * 3].copy_from_slice(&self.size.to_ne_bytes());
445 }
446}
447
448#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)]
449struct NonMaxUsize(usize);
450
451impl NonMaxUsize {
452 pub fn new(value: usize) -> Option<Self> {
453 if value == usize::MAX {
454 None
455 } else {
456 Some(Self(value))
457 }
458 }
459
460 pub fn get(self) -> usize {
461 self.0
462 }
463}
464
465#[cfg(test)]
466mod tests {
467 use std::vec::Vec;
468
469 use super::{Arena, Header, NonMaxUsize};
470
471 #[test]
472 fn arena_alloc() {
473 let mut arena = Arena::new([0; 8192]);
474
475 {
476 arena.alloc(1024).unwrap();
477 assert_eq!(arena.head, Some(NonMaxUsize::new(0).unwrap()));
478 let header = arena.read_header(arena.head.unwrap().get());
479 assert_eq!(header.next, None);
480 assert_eq!(header.prev, None);
481 assert_eq!(header.size, 1024);
482 }
483
484 {
485 arena.alloc(1024).unwrap();
486 assert_eq!(arena.head, Some(NonMaxUsize::new(0).unwrap()));
487 let header0 = arena.read_header(arena.head.unwrap().get());
488 assert_eq!(
489 header0.next,
490 Some(NonMaxUsize::new(1024 + Header::SIZE).unwrap())
491 );
492 assert_eq!(header0.prev, None);
493 assert_eq!(header0.size, 1024);
494
495 let header1 = arena.read_header(1024 + Header::SIZE);
496 assert_eq!(header1.next, None);
497 assert_eq!(header1.prev, Some(NonMaxUsize::new(0).unwrap()));
498 assert_eq!(header1.size, 1024);
499 }
500
501 {
502 arena.alloc(1024).unwrap();
503 assert_eq!(arena.head, Some(NonMaxUsize::new(0).unwrap()));
504 let header0 = arena.read_header(arena.head.unwrap().get());
505 assert_eq!(
506 header0.next,
507 Some(NonMaxUsize::new(1024 + Header::SIZE).unwrap())
508 );
509 assert_eq!(header0.prev, None);
510 assert_eq!(header0.size, 1024);
511
512 let header1 = arena.read_header(1024 + Header::SIZE);
513 assert_eq!(
514 header1.next,
515 Some(NonMaxUsize::new((1024 + Header::SIZE) * 2).unwrap())
516 );
517 assert_eq!(header1.prev, Some(NonMaxUsize::new(0).unwrap()));
518 assert_eq!(header1.size, 1024);
519
520 let header2 = arena.read_header((1024 + Header::SIZE) * 2);
521 assert_eq!(header2.next, None);
522 assert_eq!(
523 header2.prev,
524 Some(NonMaxUsize::new(1024 + Header::SIZE)).unwrap()
525 );
526 assert_eq!(header2.size, 1024);
527 }
528 }
529
530 #[test]
531 fn arena_alloc_free() {
532 let mut arena = Arena::new([0; 8192]);
533
534 for _ in 0..1024 {
535 let index = arena.alloc(8000).unwrap().index();
536 arena.dealloc(index);
537 }
538 }
539
540 #[test]
541 fn arena_alloc_free_with_growth() {
542 let mut arena = Arena::new([0; 8192]);
543
544 let keys: Vec<_> = (0..128).map(|_| arena.alloc(8).unwrap().index()).collect();
545
546 for key in keys {
547 arena.dealloc(key);
548 }
549 }
550
551 #[test]
552 fn fuzz1() {
553 let mut arena = Arena::new([0; 8192]);
554 let mut keys = Vec::new();
555
556 keys.push(arena.alloc(16).unwrap().index());
557 keys.push(arena.alloc(16).unwrap().index());
558 arena.dealloc(keys.remove(1));
559 arena.dealloc(keys.remove(0));
560 assert_eq!(arena.head, None);
561 keys.push(arena.alloc(64).unwrap().index())
562 }
563}