1use core::{
2 fmt,
3 mem::{self, MaybeUninit},
4 ptr::{self, NonNull},
5 slice,
6};
7
8use super::{common::*, sealed::Sealed, *};
9
10#[allow(unused_imports)]
11use std::boxed::Box;
12
13#[cfg(feature = "std")]
14type Memory =
15 super::memory::Memory<UnsafeCell<usize>, std::rc::Rc<std::path::PathBuf>, sealed::Header>;
16
17#[cfg(not(feature = "std"))]
18type Memory = super::memory::Memory<UnsafeCell<usize>, std::rc::Rc<()>, sealed::Header>;
19
20const SEGMENT_NODE_SIZE: usize = mem::size_of::<SegmentNode>();
21
22mod sealed {
23 use super::*;
24
25 #[derive(Debug)]
26 #[repr(C, align(8))]
27 pub struct Header {
28 pub(super) sentinel: SegmentNode,
30 pub(super) allocated: u32,
31 pub(super) min_segment_size: u32,
32 pub(super) discarded: u32,
33 }
34
35 impl super::super::sealed::Header for Header {
36 #[inline]
37 fn new(size: u32, min_segment_size: u32) -> Self {
38 Self {
39 allocated: size,
40 sentinel: SegmentNode::sentinel(),
41 min_segment_size,
42 discarded: 0,
43 }
44 }
45
46 #[cfg(all(feature = "memmap", not(target_family = "wasm")))]
47 #[inline]
48 fn load_allocated(&self) -> u32 {
49 self.allocated
50 }
51
52 #[inline]
53 fn load_min_segment_size(&self) -> u32 {
54 self.min_segment_size
55 }
56 }
57}
58
59#[repr(transparent)]
60struct SegmentNode(
61 UnsafeCell<u64>,
64);
65
66impl core::fmt::Debug for SegmentNode {
67 fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
68 let (offset, next) = decode_segment_node(*self.0.as_inner_ref());
69 f.debug_struct("SegmentNode")
70 .field("offset", &offset)
71 .field("next", &next)
72 .finish()
73 }
74}
75
76impl core::ops::Deref for SegmentNode {
77 type Target = UnsafeCell<u64>;
78
79 #[inline]
80 fn deref(&self) -> &Self::Target {
81 &self.0
82 }
83}
84
85impl SegmentNode {
86 #[inline]
87 fn sentinel() -> Self {
88 Self(UnsafeCell::new(encode_segment_node(
89 SENTINEL_SEGMENT_NODE_OFFSET,
90 SENTINEL_SEGMENT_NODE_OFFSET,
91 )))
92 }
93
94 #[allow(clippy::mut_from_ref)]
95 #[inline]
96 fn as_inner_mut(&self) -> &mut u64 {
97 unsafe { &mut *self.0.as_inner_mut() }
98 }
99}
100
101#[derive(Debug, Copy, Clone, PartialEq, Eq)]
102struct Segment {
103 ptr: *mut u8,
104 ptr_offset: u32,
105 data_offset: u32,
106 data_size: u32,
107}
108
109impl Segment {
110 #[inline]
113 unsafe fn from_offset(arena: &Arena, offset: u32, data_size: u32) -> Self {
114 Self {
115 ptr: arena.ptr,
116 ptr_offset: offset,
117 data_offset: offset + SEGMENT_NODE_SIZE as u32,
118 data_size,
119 }
120 }
121
122 #[inline]
123 fn as_mut(&mut self) -> &mut SegmentNode {
124 unsafe {
126 let ptr = self.ptr.add(self.ptr_offset as usize);
127 &mut *ptr.cast::<SegmentNode>()
128 }
129 }
130
131 #[inline]
132 fn update_next_node(&mut self, next: u32) {
133 *self.as_mut().as_inner_mut() = encode_segment_node(self.data_size, next);
134 }
135}
136
137pub struct Arena {
139 ptr: *mut u8,
140 cap: u32,
141 inner: NonNull<Memory>,
142
143 reserved: usize,
145 data_offset: u32,
146 flag: MemoryFlags,
147 max_retries: u8,
148 unify: bool,
149 magic_version: u16,
150 version: u16,
151 ro: bool,
152 freelist: Freelist,
153 page_size: u32,
154}
155
156impl fmt::Debug for Arena {
157 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
158 let header = self.header();
159 let allocated = header.allocated;
160
161 f.debug_struct("Arena")
162 .field("cap", &self.cap)
163 .field("data_offset", &self.data_offset)
164 .field("allocated", &allocated)
165 .field("discarded", &header.discarded)
166 .field("freelist", &self.freelist)
167 .field("page_size", &self.page_size)
168 .field("reserved", &self.reserved)
169 .field("magic_version", &self.magic_version)
170 .field("version", &self.version)
171 .field("read_only", &self.ro)
172 .field("unify", &self.unify)
173 .finish()
174 }
175}
176
177impl Clone for Arena {
178 fn clone(&self) -> Self {
179 unsafe {
180 use super::sealed::RefCounter;
181
182 let memory = self.inner.as_ref();
183
184 let old_size = memory.refs().fetch_add(1, Ordering::Release);
185 if old_size > usize::MAX >> 1 {
186 dbutils::abort();
187 }
188
189 Self {
193 max_retries: self.max_retries,
194 reserved: self.reserved,
195 flag: self.flag,
196 magic_version: self.magic_version,
197 version: self.version,
198 ptr: self.ptr,
199 data_offset: self.data_offset,
200 ro: self.ro,
201 inner: self.inner,
202 unify: self.unify,
203 cap: self.cap,
204 freelist: self.freelist,
205 page_size: self.page_size,
206 }
207 }
208 }
209}
210
211impl From<Memory> for Arena {
212 fn from(memory: Memory) -> Self {
213 let ptr = memory.as_mut_ptr();
214
215 Self {
216 reserved: memory.reserved(),
217 freelist: memory.freelist(),
218 cap: memory.cap(),
219 flag: memory.flag(),
220 unify: memory.unify(),
221 magic_version: memory.magic_version(),
222 version: memory.version(),
223 ptr,
224 ro: memory.read_only(),
225 max_retries: memory.maximum_retries(),
226 data_offset: memory.data_offset() as u32,
227 inner: unsafe { NonNull::new_unchecked(Box::into_raw(Box::new(memory)) as _) },
228 page_size: *PAGE_SIZE,
229 }
230 }
231}
232
233impl Sealed for Arena {
234 #[cfg(feature = "std")]
235 type PathRefCounter = std::rc::Rc<std::path::PathBuf>;
236
237 #[cfg(not(feature = "std"))]
238 type PathRefCounter = std::rc::Rc<()>;
239
240 type RefCounter = UnsafeCell<usize>;
241
242 type Header = sealed::Header;
243}
244
245impl AsRef<Memory> for Arena {
246 #[inline]
247 fn as_ref(&self) -> &Memory {
248 unsafe { self.inner.as_ref() }
250 }
251}
252
253impl Allocator for Arena {
254 #[cfg(all(feature = "memmap", not(target_family = "wasm")))]
255 #[cfg_attr(docsrs, doc(cfg(all(feature = "memmap", not(target_family = "wasm")))))]
256 type Path = std::rc::Rc<std::path::PathBuf>;
257
258 #[inline]
259 fn reserved_bytes(&self) -> usize {
260 self.reserved
261 }
262
263 #[inline]
264 fn reserved_slice(&self) -> &[u8] {
265 if self.reserved == 0 {
266 return &[];
267 }
268
269 unsafe { slice::from_raw_parts(self.ptr, self.reserved) }
271 }
272
273 #[inline]
274 unsafe fn reserved_slice_mut(&self) -> &mut [u8] {
275 unsafe {
276 if self.reserved == 0 {
277 return &mut [];
278 }
279
280 if self.ro {
281 panic!("ARENA is read-only");
282 }
283
284 slice::from_raw_parts_mut(self.ptr, self.reserved)
285 }
286 }
287
288 #[inline]
289 unsafe fn alloc<T>(&self) -> Result<RefMut<'_, T, Self>, Error> {
290 if mem::size_of::<T>() == 0 {
291 return Ok(RefMut::new_zst(self));
292 }
293
294 let allocated = self
295 .alloc_in::<T>()?
296 .expect("allocated size is not zero, but get None");
297 let ptr = unsafe { self.get_aligned_pointer_mut::<T>(allocated.memory_offset as usize) };
298 if mem::needs_drop::<T>() {
299 unsafe {
300 let ptr: *mut MaybeUninit<T> = ptr.as_ptr().cast();
301 ptr::write(ptr, MaybeUninit::uninit());
302
303 Ok(RefMut::new(ptr::read(ptr), allocated, self))
304 }
305 } else {
306 Ok(RefMut::new_inline(ptr, allocated, self))
307 }
308 }
309
310 #[inline]
311 fn alloc_aligned_bytes<T>(&self, size: u32) -> Result<BytesRefMut<'_, Self>, Error> {
312 self.alloc_aligned_bytes_in::<T>(size).map(|a| match a {
313 None => BytesRefMut::null(self),
314 Some(allocated) => unsafe { BytesRefMut::new(self, allocated) },
315 })
316 }
317
318 #[inline]
331 fn alloc_bytes(&self, size: u32) -> Result<BytesRefMut<'_, Self>, Error> {
332 self.alloc_bytes_in(size).map(|a| match a {
333 None => BytesRefMut::null(self),
334 Some(allocated) => unsafe { BytesRefMut::new(self, allocated) },
335 })
336 }
337
338 #[inline]
373 fn allocated(&self) -> usize {
374 self.header().allocated as usize
375 }
376
377 #[inline]
378 fn raw_mut_ptr(&self) -> *mut u8 {
379 self.ptr
380 }
381
382 #[inline]
383 fn raw_ptr(&self) -> *const u8 {
384 self.ptr
385 }
386
387 unsafe fn clear(&self) -> Result<(), Error> {
388 unsafe {
389 if self.ro {
390 return Err(Error::ReadOnly);
391 }
392
393 let memory = &mut *self.inner.as_ptr();
394 memory.clear();
395
396 Ok(())
397 }
398 }
399
400 #[inline]
401 fn data_offset(&self) -> usize {
402 self.data_offset as usize
403 }
404
405 #[inline]
406 unsafe fn dealloc(&self, offset: u32, size: u32) -> bool {
407 let header = self.header_mut();
409 if header.allocated == offset + size {
411 header.allocated = offset;
412 return true;
413 }
414
415 match self.freelist {
416 Freelist::None => {
417 self.increase_discarded(size);
418 true
419 }
420 Freelist::Optimistic => self.optimistic_dealloc(offset, size),
421 Freelist::Pessimistic => self.pessimistic_dealloc(offset, size),
422 }
423 }
424
425 #[inline]
426 fn discard_freelist(&self) -> Result<u32, Error> {
427 if self.ro {
428 return Err(Error::ReadOnly);
429 }
430
431 Ok(match self.freelist {
432 Freelist::None => 0,
433 _ => self.discard_freelist_in(),
434 })
435 }
436
437 #[inline]
438 fn discarded(&self) -> u32 {
439 self.header().discarded
440 }
441
442 #[inline]
443 fn increase_discarded(&self, size: u32) {
444 #[cfg(feature = "tracing")]
445 tracing::debug!("discard {size} bytes");
446
447 self.header_mut().discarded += size;
448 }
449
450 #[inline]
451 fn magic_version(&self) -> u16 {
452 self.magic_version
453 }
454
455 #[inline]
456 fn minimum_segment_size(&self) -> u32 {
457 self.header().min_segment_size
458 }
459
460 #[inline]
461 fn set_minimum_segment_size(&self, size: u32) {
462 self.header_mut().min_segment_size = size;
463 }
464
465 #[cfg(all(feature = "memmap", not(target_family = "wasm")))]
466 #[cfg_attr(docsrs, doc(cfg(all(feature = "memmap", not(target_family = "wasm")))))]
467 #[inline]
468 fn path(&self) -> Option<&Self::Path> {
469 unsafe { self.inner.as_ref().path() }
471 }
472
473 #[inline]
474 unsafe fn offset(&self, ptr: *const u8) -> usize {
475 unsafe {
476 let offset = ptr.offset_from(self.ptr);
477 offset as usize
478 }
479 }
480
481 #[inline]
482 fn page_size(&self) -> usize {
483 self.page_size as usize
484 }
485
486 #[inline]
487 fn refs(&self) -> usize {
488 unsafe { *self.inner.as_ref().refs().as_inner_ref() }
489 }
490
491 #[inline]
492 fn remaining(&self) -> usize {
493 (self.cap as usize).saturating_sub(self.allocated())
494 }
495
496 #[cfg(all(feature = "memmap", not(target_family = "wasm")))]
497 #[cfg_attr(docsrs, doc(cfg(all(feature = "memmap", not(target_family = "wasm")))))]
498 #[inline]
499 fn remove_on_drop(&self, remove_on_drop: bool) {
500 unsafe { (*self.inner.as_ptr()).set_remove_on_drop(remove_on_drop) }
502 }
503
504 unsafe fn rewind(&self, pos: ArenaPosition) {
505 let data_offset = self.data_offset;
506 let cap = self.cap;
507 let header = self.header_mut();
508 let allocated = header.allocated;
509 let final_offset = match pos {
510 ArenaPosition::Start(offset) => offset.max(data_offset).min(cap),
511 ArenaPosition::Current(offset) => {
512 let offset = allocated as i64 + offset;
513 #[allow(clippy::comparison_chain)]
514 if offset > 0 {
515 if offset >= (cap as i64) {
516 cap
517 } else {
518 let offset = offset as u32;
519 offset.max(data_offset).min(cap)
520 }
521 } else if offset < 0 {
522 data_offset
523 } else {
524 return;
525 }
526 }
527 ArenaPosition::End(offset) => match cap.checked_sub(offset) {
528 Some(val) => val.max(data_offset),
529 None => data_offset,
530 },
531 };
532
533 header.allocated = final_offset;
534 }
535
536 #[inline]
537 fn version(&self) -> u16 {
538 self.version
539 }
540}
541
542impl Arena {
543 #[cfg(all(feature = "memmap", not(target_family = "wasm")))]
547 pub fn truncate(&mut self, mut size: usize) -> std::io::Result<()> {
548 if self.ro {
549 return Err(std::io::Error::new(
550 std::io::ErrorKind::PermissionDenied,
551 "ARENA is read-only",
552 ));
553 }
554
555 let allocated = self.allocated();
556 if allocated >= size {
557 size = allocated;
558 }
559
560 unsafe {
561 let memory = self.inner.as_mut();
562 memory.truncate(allocated, size)?;
563 self.ptr = memory.as_mut_ptr();
564 self.cap = memory.cap();
565 }
566 Ok(())
567 }
568
569 #[cfg(not(all(feature = "memmap", not(target_family = "wasm"))))]
573 pub fn truncate(&mut self, mut size: usize) {
574 let allocated = self.allocated();
575 if allocated >= size {
576 size = allocated;
577 }
578
579 unsafe {
580 let memory = self.inner.as_mut();
581 memory.truncate(allocated, size);
582 self.ptr = memory.as_mut_ptr();
583 self.cap = memory.cap();
584 }
585 }
586
587 #[inline]
588 fn header(&self) -> &sealed::Header {
589 unsafe { (*self.inner.as_ptr()).header() }
592 }
593
594 #[allow(clippy::mut_from_ref)]
595 #[inline]
596 fn header_mut(&self) -> &mut sealed::Header {
597 unsafe { (*self.inner.as_ptr()).header_mut() }
600 }
601
602 fn find_position(&self, val: u32, check: impl Fn(u32, u32) -> bool) -> (u64, &UnsafeCell<u64>) {
606 let header = self.header_mut();
607 let mut current: &UnsafeCell<u64> = &header.sentinel;
608 let mut current_node = current.as_inner_ref();
609 let (mut current_node_size, mut next_offset) = decode_segment_node(*current_node);
610
611 loop {
612 if current_node_size == SENTINEL_SEGMENT_NODE_SIZE
614 && next_offset == SENTINEL_SEGMENT_NODE_OFFSET
615 {
616 return (*current_node, current);
617 }
618
619 if next_offset == SENTINEL_SEGMENT_NODE_OFFSET {
621 return (*current_node, current);
622 }
623
624 if next_offset == SENTINEL_SEGMENT_NODE_OFFSET {
626 return (*current_node, current);
627 }
628
629 let next = self.get_segment_node(next_offset);
630 let next_node = next.as_inner_ref();
631 let (next_node_size, next_next_offset) = decode_segment_node(*next_node);
632
633 if check(val, next_node_size) {
634 return (*current_node, current);
635 }
636
637 current = next;
638 current_node = next_node;
639 current_node_size = next_node_size;
640 next_offset = next_next_offset;
641 }
642 }
643
644 #[allow(clippy::type_complexity)]
645 fn find_prev_and_next(
646 &self,
647 val: u32,
648 check: impl Fn(u32, u32) -> bool,
649 ) -> Option<((u64, &UnsafeCell<u64>), (u64, &UnsafeCell<u64>))> {
650 let header = self.header();
651 let mut current: &UnsafeCell<u64> = &header.sentinel;
652 let mut current_node = current.as_inner_ref();
653 let (mut current_node_size, mut next_offset) = decode_segment_node(*current_node);
654
655 loop {
656 if current_node_size == SENTINEL_SEGMENT_NODE_SIZE
658 && next_offset == SENTINEL_SEGMENT_NODE_OFFSET
659 {
660 return None;
661 }
662
663 if next_offset == SENTINEL_SEGMENT_NODE_OFFSET {
665 return None;
666 }
667
668 if next_offset == SENTINEL_SEGMENT_NODE_OFFSET {
670 return None;
671 }
672
673 let next = self.get_segment_node(next_offset);
674 let next_node = next.as_inner_ref();
675 let (next_node_size, next_next_offset) = decode_segment_node(*next_node);
676
677 if check(val, next_node_size) {
678 return Some(((*current_node, current), (*next_node, next)));
679 }
680
681 current = self.get_segment_node(next_offset);
682 current_node = next_node;
683 current_node_size = next_node_size;
684 next_offset = next_next_offset;
685 }
686 }
687
688 fn optimistic_dealloc(&self, offset: u32, size: u32) -> bool {
689 let Some(mut segment_node) = self.try_new_segment(offset, size) else {
691 return false;
692 };
693
694 loop {
695 let (current_node_size_and_next_node_offset, current) = self
696 .find_position(segment_node.data_size, |val, next_node_size| {
697 val >= next_node_size
698 });
699 let (node_size, next_node_offset) =
700 decode_segment_node(current_node_size_and_next_node_offset);
701
702 if segment_node.ptr_offset == next_node_offset {
703 continue;
704 }
705
706 segment_node.update_next_node(next_node_offset);
707
708 *current.as_inner_ref_mut() = encode_segment_node(node_size, segment_node.ptr_offset);
709
710 #[cfg(feature = "tracing")]
711 tracing::debug!(
712 "create segment node ({} bytes) at {}, next segment {next_node_offset}",
713 segment_node.data_size,
714 segment_node.ptr_offset
715 );
716
717 self.increase_discarded(segment_node.data_offset - segment_node.ptr_offset);
718 return true;
719 }
720 }
721
722 fn pessimistic_dealloc(&self, offset: u32, size: u32) -> bool {
723 let Some(mut segment_node) = self.try_new_segment(offset, size) else {
725 return false;
726 };
727
728 let (current_node_size_and_next_node_offset, current) = self
729 .find_position(segment_node.data_size, |val, next_node_size| {
730 val <= next_node_size
731 });
732 let (node_size, next_node_offset) = decode_segment_node(current_node_size_and_next_node_offset);
733
734 segment_node.update_next_node(next_node_offset);
735
736 *current.as_inner_ref_mut() = encode_segment_node(node_size, segment_node.ptr_offset);
737
738 #[cfg(feature = "tracing")]
739 tracing::debug!(
740 "create segment node ({} bytes) at {}, next segment {next_node_offset}",
741 segment_node.data_size,
742 segment_node.ptr_offset
743 );
744
745 self.increase_discarded(segment_node.data_offset - segment_node.ptr_offset);
746 true
747 }
748
749 fn alloc_bytes_in(&self, size: u32) -> Result<Option<Meta>, Error> {
750 if self.ro {
751 return Err(Error::ReadOnly);
752 }
753
754 if size == 0 {
755 return Ok(None);
756 }
757 let header = self.header_mut();
758
759 let want = header.allocated + size;
760 if want <= self.cap {
761 let offset = header.allocated;
762 header.allocated = want;
763
764 #[cfg(feature = "tracing")]
765 tracing::debug!("allocate {} bytes at offset {} from memory", size, offset);
766
767 let allocated = Meta::new(self.ptr as _, offset, size);
768 unsafe { allocated.clear(self) };
769 return Ok(Some(allocated));
770 }
771
772 match self.freelist {
774 Freelist::None => Err(Error::InsufficientSpace {
775 requested: size,
776 available: self.remaining() as u32,
777 }),
778 Freelist::Optimistic => match self.alloc_slow_path_optimistic(size) {
779 Ok(bytes) => Ok(Some(bytes)),
780 Err(e) => Err(e),
781 },
782 Freelist::Pessimistic => match self.alloc_slow_path_pessimistic(size) {
783 Ok(bytes) => Ok(Some(bytes)),
784 Err(e) => Err(e),
785 },
786 }
787 }
788
789 fn alloc_aligned_bytes_in<T>(&self, extra: u32) -> Result<Option<Meta>, Error> {
857 if self.ro {
858 return Err(Error::ReadOnly);
859 }
860
861 if mem::size_of::<T>() == 0 {
862 return self.alloc_bytes_in(extra);
863 }
864
865 let header = self.header_mut();
866 let allocated = header.allocated;
867 let aligned_offset = align_offset::<T>(allocated);
868 let size = mem::size_of::<T>() as u32;
869 let want = aligned_offset + size + extra;
870
871 if want <= self.cap {
872 let offset = header.allocated;
874 header.allocated = want;
875 let mut allocated = Meta::new(self.ptr as _, offset, want - offset);
876 allocated.align_bytes_to::<T>();
877 #[cfg(feature = "tracing")]
878 tracing::debug!(
879 "allocate {} bytes at offset {} from memory",
880 want - offset,
881 offset
882 );
883 return Ok(Some(allocated));
884 }
885
886 match self.freelist {
888 Freelist::None => Err(Error::InsufficientSpace {
889 requested: size + extra,
890 available: self.remaining() as u32,
891 }),
892 Freelist::Optimistic => {
893 match self.alloc_slow_path_optimistic(Self::pad::<T>() as u32 + extra) {
894 Ok(mut bytes) => {
895 bytes.align_bytes_to::<T>();
896 Ok(Some(bytes))
897 }
898 Err(e) => Err(e),
899 }
900 }
901 Freelist::Pessimistic => {
902 match self.alloc_slow_path_pessimistic(Self::pad::<T>() as u32 + extra) {
903 Ok(mut bytes) => {
904 bytes.align_bytes_to::<T>();
905 Ok(Some(bytes))
906 }
907 Err(e) => Err(e),
908 }
909 }
910 }
911 }
912
913 fn alloc_in<T>(&self) -> Result<Option<Meta>, Error> {
973 if self.ro {
974 return Err(Error::ReadOnly);
975 }
976
977 let t_size = mem::size_of::<T>();
978 if t_size == 0 {
979 return Ok(None);
980 }
981
982 let header = self.header_mut();
983 let allocated = header.allocated;
984 let align_offset = align_offset::<T>(allocated);
985 let size = t_size as u32;
986 let want = align_offset + size;
987
988 if want <= self.cap {
989 let offset = header.allocated;
990 header.allocated = want;
991 let mut allocated = Meta::new(self.ptr as _, offset, want - offset);
992 allocated.align_to::<T>();
993
994 #[cfg(feature = "tracing")]
995 tracing::debug!(
996 "allocate {} bytes at offset {} from memory",
997 want - offset,
998 offset
999 );
1000
1001 unsafe { allocated.clear(self) };
1002 return Ok(Some(allocated));
1003 }
1004
1005 match self.freelist {
1007 Freelist::None => Err(Error::InsufficientSpace {
1008 requested: want,
1009 available: self.remaining() as u32,
1010 }),
1011 Freelist::Optimistic => match self.alloc_slow_path_optimistic(Self::pad::<T>() as u32) {
1012 Ok(mut allocated) => {
1013 allocated.align_to::<T>();
1014 Ok(Some(allocated))
1015 }
1016 Err(e) => Err(e),
1017 },
1018 Freelist::Pessimistic => match self.alloc_slow_path_pessimistic(Self::pad::<T>() as u32) {
1019 Ok(mut allocated) => {
1020 allocated.align_to::<T>();
1021 Ok(Some(allocated))
1022 }
1023 Err(e) => Err(e),
1024 },
1025 }
1026 }
1027
1028 fn alloc_slow_path_pessimistic(&self, size: u32) -> Result<Meta, Error> {
1092 if self.ro {
1093 return Err(Error::ReadOnly);
1094 }
1095
1096 let Some(((prev_node_val, prev_node), (next_node_val, _))) =
1097 self.find_prev_and_next(size, |val, next_node_size| val <= next_node_size)
1098 else {
1099 return Err(Error::InsufficientSpace {
1100 requested: size,
1101 available: self.remaining() as u32,
1102 });
1103 };
1104
1105 let (prev_node_size, next_node_offset) = decode_segment_node(prev_node_val);
1106 let (next_node_size, next_next_node_offset) = decode_segment_node(next_node_val);
1107
1108 let remaining = next_node_size - size;
1109
1110 let segment_node = unsafe { Segment::from_offset(self, next_node_offset, next_node_size) };
1111
1112 let updated_prev = encode_segment_node(prev_node_size, next_next_node_offset);
1114
1115 *prev_node.as_inner_ref_mut() = updated_prev;
1116
1117 #[cfg(feature = "tracing")]
1118 tracing::debug!(
1119 "allocate {} bytes at offset {} from segment",
1120 size,
1121 next_node_offset
1122 );
1123
1124 let mut memory_size = next_node_size;
1125 let data_end_offset = segment_node.data_offset + size;
1126 if self.validate_segment(data_end_offset, remaining) {
1128 memory_size -= remaining;
1129 self.pessimistic_dealloc(data_end_offset, remaining);
1135 }
1136
1137 let mut allocated = Meta::new(self.ptr as _, segment_node.ptr_offset, memory_size);
1138 allocated.ptr_offset = segment_node.data_offset;
1139 allocated.ptr_size = size;
1140 unsafe {
1141 allocated.clear(self);
1142 }
1143 Ok(allocated)
1144 }
1145
1146 fn alloc_slow_path_optimistic(&self, size: u32) -> Result<Meta, Error> {
1148 if self.ro {
1149 return Err(Error::ReadOnly);
1150 }
1151
1152 let header = self.header();
1153
1154 let sentinel = header.sentinel.as_inner_ref();
1155 let (sentinel_node_size, head_node_offset) = decode_segment_node(*sentinel);
1156
1157 if sentinel_node_size == SENTINEL_SEGMENT_NODE_SIZE
1159 && head_node_offset == SENTINEL_SEGMENT_NODE_OFFSET
1160 {
1161 return Err(Error::InsufficientSpace {
1162 requested: size,
1163 available: self.remaining() as u32,
1164 });
1165 }
1166
1167 let head = self.get_segment_node(head_node_offset);
1168 let head_node_size_and_next_node_offset = head.as_inner_ref();
1169 let (head_node_size, next_node_offset) =
1170 decode_segment_node(*head_node_size_and_next_node_offset);
1171
1172 if size > head_node_size {
1174 return Err(Error::InsufficientSpace {
1175 requested: size,
1176 available: head_node_size,
1177 });
1178 }
1179
1180 let remaining = head_node_size - size;
1181
1182 let segment_node = unsafe { Segment::from_offset(self, head_node_offset, head_node_size) };
1184
1185 *header.sentinel.as_inner_mut() = encode_segment_node(sentinel_node_size, next_node_offset);
1186
1187 #[cfg(feature = "tracing")]
1188 tracing::debug!(
1189 "allocate {} bytes at offset {} from segment",
1190 size,
1191 segment_node.data_offset
1192 );
1193
1194 let mut memory_size = head_node_size;
1195 let data_end_offset = segment_node.data_offset + size;
1196 if self.validate_segment(data_end_offset, remaining) {
1198 memory_size -= remaining;
1199 self.optimistic_dealloc(data_end_offset, remaining);
1205 }
1206
1207 let mut allocated = Meta::new(self.ptr as _, segment_node.ptr_offset, memory_size);
1208 allocated.ptr_offset = segment_node.data_offset;
1209 allocated.ptr_size = size;
1210 unsafe {
1211 allocated.clear(self);
1212 }
1213 Ok(allocated)
1214 }
1215
1216 fn discard_freelist_in(&self) -> u32 {
1217 let header = self.header();
1218 let mut discarded = 0;
1219
1220 loop {
1221 let sentinel = header.sentinel.as_inner_ref();
1222 let (sentinel_node_size, head_node_offset) = decode_segment_node(*sentinel);
1223
1224 if sentinel_node_size == SENTINEL_SEGMENT_NODE_SIZE
1226 && head_node_offset == SENTINEL_SEGMENT_NODE_OFFSET
1227 {
1228 return discarded;
1229 }
1230
1231 let head = self.get_segment_node(head_node_offset);
1232 let head_node_size_and_next_node_offset = head.as_inner_ref();
1233 let (head_node_size, next_node_offset) =
1234 decode_segment_node(*head_node_size_and_next_node_offset);
1235
1236 let segment_node = unsafe { Segment::from_offset(self, head_node_offset, head_node_size) };
1238
1239 *header.sentinel.as_inner_mut() = encode_segment_node(sentinel_node_size, next_node_offset);
1240
1241 #[cfg(feature = "tracing")]
1244 tracing::debug!("discard {} bytes", segment_node.data_size);
1245 self.header_mut().discarded += segment_node.data_size;
1246
1247 discarded += segment_node.data_size;
1248 }
1249 }
1250
1251 #[inline]
1253 fn validate_segment(&self, offset: u32, size: u32) -> bool {
1254 if offset == 0 || size == 0 {
1255 return false;
1256 }
1257
1258 let aligned_offset = align_offset::<u64>(offset) as usize;
1259 let padding = aligned_offset - offset as usize;
1260 let segmented_node_size = padding + SEGMENT_NODE_SIZE;
1261 if segmented_node_size >= size as usize {
1262 return false;
1263 }
1264
1265 let available_bytes = size - segmented_node_size as u32;
1266 if available_bytes < self.header().min_segment_size {
1267 return false;
1268 }
1269
1270 true
1271 }
1272
1273 #[inline]
1274 fn try_new_segment(&self, offset: u32, size: u32) -> Option<Segment> {
1275 if offset == 0 || size == 0 {
1276 return None;
1277 }
1278
1279 let aligned_offset = align_offset::<u64>(offset) as usize;
1280 let padding = aligned_offset - offset as usize;
1281 let segmented_node_size = padding + SEGMENT_NODE_SIZE;
1282 if segmented_node_size >= size as usize {
1283 self.increase_discarded(size);
1284 return None;
1285 }
1286
1287 let available_bytes = size - segmented_node_size as u32;
1288 if available_bytes < self.header().min_segment_size {
1289 self.increase_discarded(size);
1290 return None;
1291 }
1292
1293 Some(Segment {
1294 ptr: self.ptr,
1295 ptr_offset: aligned_offset as u32,
1296 data_offset: (aligned_offset + SEGMENT_NODE_SIZE) as u32,
1297 data_size: available_bytes,
1298 })
1299 }
1300
1301 #[inline]
1302 fn get_segment_node(&self, offset: u32) -> &UnsafeCell<u64> {
1303 unsafe {
1305 let ptr = self.ptr.add(offset as usize);
1306 &*(ptr as *const _)
1307 }
1308 }
1309
1310 #[inline]
1311 fn pad<T>() -> usize {
1312 let size = mem::size_of::<T>();
1313 let align = mem::align_of::<T>();
1314 size + align - 1
1315 }
1316
1317 #[cfg(test)]
1336 #[cfg(feature = "std")]
1337 #[allow(dead_code)]
1338 pub(super) fn print_segment_list(&self) {
1339 let header = self.header();
1340 let mut current: &UnsafeCell<u64> = &header.sentinel;
1341
1342 loop {
1343 let current_node = current.as_inner_ref();
1344 let (node_size, next_node_offset) = decode_segment_node(*current_node);
1345
1346 if node_size == SENTINEL_SEGMENT_NODE_SIZE {
1347 if next_node_offset == SENTINEL_SEGMENT_NODE_OFFSET {
1348 break;
1349 }
1350
1351 current = self.get_segment_node(next_node_offset);
1352 continue;
1353 }
1354
1355 std::println!("node_size: {node_size}, next_node_offset: {next_node_offset}",);
1356
1357 if next_node_offset == SENTINEL_SEGMENT_NODE_OFFSET {
1358 break;
1359 }
1360 current = self.get_segment_node(next_node_offset);
1361 }
1362 }
1363}
1364
1365impl Drop for Arena {
1366 fn drop(&mut self) {
1367 use super::sealed::RefCounter;
1368
1369 unsafe {
1370 let memory_ptr = self.inner.as_ptr();
1371 let memory = &*memory_ptr;
1372 if memory.refs().fetch_sub(1, Ordering::Release) != 1 {
1374 return;
1375 }
1376
1377 let mut memory = Box::from_raw(memory_ptr);
1399
1400 memory.unmount();
1403 }
1404 }
1405}
1406
1407#[cfg(test)]
1408mod tests;