1use core::{
2 alloc::Layout,
3 cell::Cell,
4 marker::PhantomData,
5 mem::ManuallyDrop,
6 num::NonZeroUsize,
7 ops::{Deref, Range},
8 ptr::{self, NonNull},
9};
10
11use crate::{
12 Checkpoint, SizedTypeProperties, align_pos,
13 alloc::{AllocError, Allocator},
14 bumping::{BumpProps, BumpUp, MIN_CHUNK_ALIGN, bump_down, bump_prepare_down, bump_prepare_up, bump_up},
15 chunk::{ChunkHeader, ChunkSize, ChunkSizeHint},
16 error_behavior::ErrorBehavior,
17 layout::{ArrayLayout, CustomLayout, LayoutProps, SizedLayout},
18 polyfill::non_null,
19 settings::{BumpAllocatorSettings, MinimumAlignment, SupportedMinimumAlignment},
20 stats::Stats,
21};
22
23#[cfg(debug_assertions)]
24use crate::error_behavior;
25
26#[cfg(feature = "alloc")]
27use crate::alloc::Global;
28
29pub(crate) struct RawBump<A, S> {
38 pub(crate) chunk: Cell<RawChunk<S>>,
41
42 pub(crate) allocator: ManuallyDrop<A>,
44}
45
46impl<A, S> Clone for RawBump<A, S> {
47 fn clone(&self) -> Self {
48 Self {
49 chunk: self.chunk.clone(),
50 allocator: unsafe { ptr::read(&raw const self.allocator) },
51 }
52 }
53}
54
55impl<A, S> RawBump<A, S>
56where
57 A: Allocator,
58 S: BumpAllocatorSettings,
59{
60 #[inline(always)]
61 pub(crate) const fn new(allocator: A) -> Self {
62 const { assert!(!S::GUARANTEED_ALLOCATED) };
63
64 Self {
65 chunk: Cell::new(RawChunk::UNALLOCATED),
66 allocator: ManuallyDrop::new(allocator),
67 }
68 }
69
70 #[inline(always)]
71 pub(crate) fn with_size<E: ErrorBehavior>(size: ChunkSize<S::Up>, allocator: A) -> Result<Self, E> {
72 Ok(Self {
73 chunk: Cell::new(NonDummyChunk::new::<E>(size, None, &allocator)?.0),
74 allocator: ManuallyDrop::new(allocator),
75 })
76 }
77
78 #[inline(always)]
79 pub(crate) fn is_claimed(&self) -> bool {
80 self.chunk.get().is_claimed()
81 }
82
83 #[inline(always)]
84 pub(crate) fn claim(&self) -> RawBump<A, S> {
85 const {
86 assert!(S::CLAIMABLE, "`claim` is only available with the setting `CLAIMABLE = true`");
87 }
88
89 #[cold]
90 #[inline(never)]
91 fn already_claimed() {
92 panic!("bump allocator is already claimed");
93 }
94
95 if self.chunk.get().is_claimed() {
96 already_claimed();
97 }
98
99 let chunk = Cell::new(self.chunk.replace(RawChunk::<S>::CLAIMED));
100 let allocator = unsafe { ptr::read(&raw const self.allocator) };
101
102 RawBump { chunk, allocator }
103 }
104
105 #[inline(always)]
106 pub(crate) fn reclaim(&self, claimant: &RawBump<A, S>) {
107 self.chunk.set(claimant.chunk.get());
108 }
109
110 #[inline(always)]
111 pub(crate) fn checkpoint(&self) -> Checkpoint {
112 Checkpoint::new(self.chunk.get())
113 }
114
115 #[inline]
116 pub(crate) unsafe fn reset_to(&self, checkpoint: Checkpoint) {
117 #[cfg(debug_assertions)]
118 if checkpoint.chunk == ChunkHeader::claimed::<S>() || self.chunk.get().is_claimed() {
119 error_behavior::panic::claimed();
120 }
121
122 if !S::GUARANTEED_ALLOCATED && checkpoint.chunk == ChunkHeader::unallocated::<S>() {
132 if let Some(mut chunk) = self.chunk.get().as_non_dummy() {
133 while let Some(prev) = chunk.prev() {
134 chunk = prev;
135 }
136
137 chunk.reset();
138
139 self.chunk.set(unsafe { chunk.cast() });
141 }
142
143 return;
144 }
145
146 debug_assert_ne!(
147 checkpoint.chunk,
148 ChunkHeader::unallocated::<S>(),
149 "the safety conditions state that \"the checkpoint must not have been created by an`!GUARANTEED_ALLOCATED` when self is `GUARANTEED_ALLOCATED`\""
150 );
151
152 #[cfg(debug_assertions)]
153 {
154 let chunk = self
155 .stats()
156 .small_to_big()
157 .find(|chunk| chunk.header() == checkpoint.chunk.cast())
158 .expect("this checkpoint does not refer to any chunk of this bump allocator");
159
160 assert!(
161 chunk.chunk.contains_addr_or_end(checkpoint.address.get()),
162 "checkpoint address does not point within its chunk"
163 );
164 }
165
166 unsafe {
167 checkpoint.reset_within_chunk();
168
169 self.chunk.set(RawChunk {
170 header: checkpoint.chunk.cast(),
171 marker: PhantomData,
172 });
173 }
174 }
175
176 #[inline(always)]
177 pub(crate) fn reset(&self) {
178 let Some(mut chunk) = self.chunk.get().as_non_dummy() else {
179 return;
180 };
181
182 unsafe {
183 chunk.for_each_prev(|chunk| chunk.deallocate(&*self.allocator));
184
185 while let Some(next) = chunk.next() {
186 chunk.deallocate(&*self.allocator);
187 chunk = next;
188 }
189
190 chunk.header.as_ref().prev.set(None);
191 }
192
193 chunk.reset();
194
195 self.chunk.set(unsafe { chunk.cast() });
197 }
198
199 pub(crate) unsafe fn manually_drop(&mut self) {
200 #[cold]
201 #[inline(never)]
202 fn panic_claimed() -> ! {
203 panic!("tried to drop a `Bump` while it was still claimed")
204 }
205
206 match self.chunk.get().classify() {
207 ChunkClass::Claimed => panic_claimed(),
208 ChunkClass::Unallocated => (),
209 ChunkClass::NonDummy(chunk) => unsafe {
210 chunk.for_each_prev(|chunk| chunk.deallocate(&*self.allocator));
211 chunk.for_each_next(|chunk| chunk.deallocate(&*self.allocator));
212 chunk.deallocate(&*self.allocator);
213 },
214 }
215
216 unsafe { ManuallyDrop::drop(&mut self.allocator) };
217 }
218
219 #[inline(always)]
220 pub(crate) fn reserve<E: ErrorBehavior>(&self, additional: usize) -> Result<(), E> {
221 let chunk = self.chunk.get();
222
223 match chunk.classify() {
224 ChunkClass::Claimed => Err(E::claimed()),
225 ChunkClass::Unallocated => {
226 let Ok(layout) = Layout::from_size_align(additional, 1) else {
227 return Err(E::capacity_overflow());
228 };
229
230 let new_chunk = NonDummyChunk::new(
231 ChunkSize::<S::Up>::from_capacity(layout).ok_or_else(E::capacity_overflow)?,
232 None,
233 &*self.allocator,
234 )?;
235
236 self.chunk.set(new_chunk.0);
237 Ok(())
238 }
239 ChunkClass::NonDummy(mut chunk) => {
240 let mut additional = additional;
241
242 if let Some(rest) = additional.checked_sub(chunk.remaining()) {
243 additional = rest;
244 } else {
245 return Ok(());
246 }
247
248 while let Some(next) = chunk.next() {
249 chunk = next;
250
251 if let Some(rest) = additional.checked_sub(chunk.capacity()) {
252 additional = rest;
253 } else {
254 return Ok(());
255 }
256 }
257
258 if additional == 0 {
259 return Ok(());
260 }
261
262 let Ok(layout) = Layout::from_size_align(additional, 1) else {
263 return Err(E::capacity_overflow());
264 };
265
266 chunk.append_for(layout, &*self.allocator).map(drop)
267 }
268 }
269 }
270
271 #[inline(always)]
272 pub(crate) fn alloc<B: ErrorBehavior>(&self, layout: Layout) -> Result<NonNull<u8>, B> {
273 match self.chunk.get().alloc(CustomLayout(layout)) {
274 Some(ptr) => Ok(ptr),
275 None => self.alloc_in_another_chunk(layout),
276 }
277 }
278
279 #[inline(always)]
280 pub(crate) fn alloc_sized<E: ErrorBehavior, T>(&self) -> Result<NonNull<T>, E> {
281 match self.chunk.get().alloc(SizedLayout::new::<T>()) {
282 Some(ptr) => Ok(ptr.cast()),
283 None => match self.alloc_sized_in_another_chunk::<E, T>() {
284 Ok(ptr) => Ok(ptr.cast()),
285 Err(err) => Err(err),
286 },
287 }
288 }
289
290 #[inline(always)]
291 pub(crate) fn alloc_slice<E: ErrorBehavior, T>(&self, len: usize) -> Result<NonNull<T>, E> {
292 let Ok(layout) = ArrayLayout::array::<T>(len) else {
293 return Err(E::capacity_overflow());
294 };
295
296 match self.chunk.get().alloc(layout) {
297 Some(ptr) => Ok(ptr.cast()),
298 None => match self.alloc_slice_in_another_chunk::<E, T>(len) {
299 Ok(ptr) => Ok(ptr.cast()),
300 Err(err) => Err(err),
301 },
302 }
303 }
304
305 #[inline(always)]
306 pub(crate) fn alloc_slice_for<E: ErrorBehavior, T>(&self, value: &[T]) -> Result<NonNull<T>, E> {
307 let layout = ArrayLayout::for_value(value);
308
309 match self.chunk.get().alloc(layout) {
310 Some(ptr) => Ok(ptr.cast()),
311 None => match self.alloc_slice_in_another_chunk::<E, T>(value.len()) {
312 Ok(ptr) => Ok(ptr.cast()),
313 Err(err) => Err(err),
314 },
315 }
316 }
317
318 #[inline(always)]
319 pub(crate) fn prepare_sized_allocation<B: ErrorBehavior, T>(&self) -> Result<NonNull<T>, B> {
320 match self.chunk.get().prepare_allocation(SizedLayout::new::<T>()) {
321 Some(ptr) => Ok(ptr.cast()),
322 None => match self.prepare_allocation_in_another_chunk::<B, T>() {
323 Ok(ptr) => Ok(ptr.cast()),
324 Err(err) => Err(err),
325 },
326 }
327 }
328
329 #[inline(always)]
330 pub(crate) fn prepare_slice_allocation<B: ErrorBehavior, T>(&self, min_cap: usize) -> Result<NonNull<[T]>, B> {
331 let range = self.prepare_allocation_range::<B, T>(min_cap)?;
332
333 let cap = unsafe { non_null::byte_offset_from_unsigned(range.end, range.start) } / T::SIZE;
335
336 let ptr = if S::UP { range.start } else { unsafe { range.end.sub(cap) } };
337
338 Ok(NonNull::slice_from_raw_parts(ptr, cap))
339 }
340
341 #[inline(always)]
342 pub(crate) fn prepare_slice_allocation_rev<B: ErrorBehavior, T>(
343 &self,
344 min_cap: usize,
345 ) -> Result<(NonNull<T>, usize), B> {
346 let range = self.prepare_allocation_range::<B, T>(min_cap)?;
347
348 let cap = unsafe { non_null::byte_offset_from_unsigned(range.end, range.start) } / T::SIZE;
350
351 let end = if S::UP { unsafe { range.start.add(cap) } } else { range.end };
352
353 Ok((end, cap))
354 }
355
356 #[inline(always)]
361 fn prepare_allocation_range<B: ErrorBehavior, T>(&self, cap: usize) -> Result<Range<NonNull<T>>, B> {
362 let Ok(layout) = ArrayLayout::array::<T>(cap) else {
363 return Err(B::capacity_overflow());
364 };
365
366 let range = match self.chunk.get().prepare_allocation_range(layout) {
367 Some(ptr) => ptr,
368 None => self.prepare_allocation_range_in_another_chunk(layout)?,
369 };
370
371 Ok(range.start.cast::<T>()..range.end.cast::<T>())
372 }
373
374 #[cold]
377 #[inline(never)]
378 pub(crate) fn alloc_in_another_chunk<E: ErrorBehavior>(&self, layout: Layout) -> Result<NonNull<u8>, E> {
379 unsafe { self.in_another_chunk(CustomLayout(layout), RawChunk::alloc) }
380 }
381
382 #[cold]
383 #[inline(never)]
384 fn alloc_sized_in_another_chunk<E: ErrorBehavior, T>(&self) -> Result<NonNull<u8>, E> {
385 self.alloc_in_another_chunk(Layout::new::<T>())
386 }
387
388 #[cold]
389 #[inline(never)]
390 fn alloc_slice_in_another_chunk<E: ErrorBehavior, T>(&self, len: usize) -> Result<NonNull<u8>, E> {
391 let Ok(layout) = Layout::array::<T>(len) else {
392 return Err(E::capacity_overflow());
393 };
394
395 self.alloc_in_another_chunk(layout)
396 }
397
398 #[cold]
399 #[inline(never)]
400 pub(crate) fn prepare_allocation_in_another_chunk<E: ErrorBehavior, T>(&self) -> Result<NonNull<u8>, E> {
401 let layout = CustomLayout(Layout::new::<T>());
402
403 unsafe { self.in_another_chunk(layout, RawChunk::prepare_allocation) }
404 }
405
406 #[cold]
407 #[inline(never)]
408 fn prepare_allocation_range_in_another_chunk<E: ErrorBehavior>(
409 &self,
410 layout: ArrayLayout,
411 ) -> Result<Range<NonNull<u8>>, E> {
412 unsafe { self.in_another_chunk(layout, RawChunk::prepare_allocation_range) }
413 }
414
415 #[inline(always)]
419 pub(crate) unsafe fn in_another_chunk<E: ErrorBehavior, R, L: LayoutProps>(
420 &self,
421 layout: L,
422 mut f: impl FnMut(RawChunk<S>, L) -> Option<R>,
423 ) -> Result<R, E> {
424 let new_chunk: NonDummyChunk<S> = match self.chunk.get().classify() {
425 ChunkClass::Claimed => Err(E::claimed()),
426 ChunkClass::Unallocated => NonDummyChunk::new(
427 ChunkSize::from_capacity(*layout).ok_or_else(E::capacity_overflow)?,
428 None,
429 &*self.allocator,
430 ),
431 ChunkClass::NonDummy(mut chunk) => {
432 while let Some(next_chunk) = chunk.next() {
433 chunk = next_chunk;
434
435 chunk.reset();
437
438 self.chunk.set(chunk.0);
439
440 if let Some(ptr) = f(chunk.0, layout) {
441 return Ok(ptr);
442 }
443 }
444
445 chunk.append_for(*layout, &*self.allocator)
447 }
448 }?;
449
450 self.chunk.set(new_chunk.0);
451
452 match f(new_chunk.0, layout) {
453 Some(ptr) => Ok(ptr),
454 _ => {
455 unsafe { core::hint::unreachable_unchecked() }
459 }
460 }
461 }
462
463 pub(crate) fn make_allocated<E: ErrorBehavior>(&self) -> Result<(), E> {
464 match self.chunk.get().classify() {
465 ChunkClass::Claimed => Err(E::claimed()),
466 ChunkClass::Unallocated => {
467 let new_chunk = NonDummyChunk::new(ChunkSize::ZERO, None, &*self.allocator)?;
468 self.chunk.set(new_chunk.0);
469 Ok(())
470 }
471 ChunkClass::NonDummy(_) => Ok(()),
472 }
473 }
474}
475
476impl<A, S> RawBump<A, S>
477where
478 S: BumpAllocatorSettings,
479{
480 #[must_use]
482 #[inline(always)]
483 pub fn stats<'a>(&self) -> Stats<'a, S> {
484 Stats::from_raw_chunk(self.chunk.get())
485 }
486
487 #[inline(always)]
488 pub(crate) fn align<const ALIGN: usize>(&self)
489 where
490 MinimumAlignment<ALIGN>: SupportedMinimumAlignment,
491 {
492 self.align_to::<MinimumAlignment<ALIGN>>();
493 }
494
495 #[inline(always)]
496 pub(crate) fn align_to<MinimumAlignment>(&self)
497 where
498 MinimumAlignment: SupportedMinimumAlignment,
499 {
500 if MinimumAlignment::VALUE > S::MIN_ALIGN {
501 if let Some(chunk) = self.chunk.get().as_non_dummy() {
503 let pos = chunk.pos().addr().get();
504 let addr = align_pos(S::UP, MinimumAlignment::VALUE, pos);
505 unsafe { chunk.set_pos_addr(addr) };
506 }
507 }
508 }
509
510 pub(crate) fn ensure_satisfies_settings<NewS>(&self)
511 where
512 NewS: BumpAllocatorSettings,
513 {
514 const {
515 assert!(NewS::UP == S::UP, "can't change `UP` setting");
516
517 assert!(
518 NewS::GUARANTEED_ALLOCATED <= S::GUARANTEED_ALLOCATED,
519 "can't turn a non-guaranteed-allocated bump allocator into a guaranteed-allocated one"
520 );
521
522 assert!(
523 NewS::CLAIMABLE >= S::CLAIMABLE,
524 "can't turn a claimable bump allocator into a non-claimable one"
525 );
526 }
527
528 self.align_to::<NewS::MinimumAlignment>();
529 }
530
531 pub(crate) fn ensure_scope_satisfies_settings<NewS>(&self)
532 where
533 NewS: BumpAllocatorSettings,
534 {
535 const {
536 assert!(NewS::UP == S::UP, "can't change `UP` setting");
537
538 assert!(
539 NewS::GUARANTEED_ALLOCATED <= S::GUARANTEED_ALLOCATED,
540 "can't turn a non-guaranteed-allocated bump allocator into a guaranteed-allocated one"
541 );
542
543 assert!(
544 NewS::MIN_ALIGN >= S::MIN_ALIGN,
545 "can't decrease minimum alignment when mutably borrowing with new settings"
546 );
547
548 assert!(
549 NewS::CLAIMABLE >= S::CLAIMABLE,
550 "can't turn a claimable bump allocator into a non-claimable one"
551 );
552 }
553
554 self.align_to::<NewS::MinimumAlignment>();
555 }
556
557 #[expect(clippy::unused_self)]
558 pub(crate) fn ensure_satisfies_settings_for_borrow<NewS>(&self)
559 where
560 NewS: BumpAllocatorSettings,
561 {
562 const {
563 assert!(NewS::UP == S::UP, "can't change `UP` setting");
564
565 assert!(
566 NewS::GUARANTEED_ALLOCATED <= S::GUARANTEED_ALLOCATED,
567 "can't turn a non-guaranteed-allocated bump allocator into a guaranteed-allocated one"
568 );
569
570 assert!(
571 NewS::MIN_ALIGN == S::MIN_ALIGN,
572 "can't change minimum alignment when borrowing with new settings"
573 );
574
575 assert!(NewS::CLAIMABLE == S::CLAIMABLE, "can't change claimability");
576 }
577 }
578
579 pub(crate) fn ensure_satisfies_settings_for_borrow_mut<NewS>(&self)
580 where
581 NewS: BumpAllocatorSettings,
582 {
583 const {
584 assert!(NewS::UP == S::UP, "can't change `UP` setting");
585
586 assert!(
587 NewS::GUARANTEED_ALLOCATED == S::GUARANTEED_ALLOCATED,
588 "can't change guaranteed-allocated property when mutably borrowing with new settings"
589 );
590
591 assert!(
592 NewS::MIN_ALIGN >= S::MIN_ALIGN,
593 "can't decrease minimum alignment when mutably borrowing with new settings"
594 );
595
596 assert!(NewS::CLAIMABLE == S::CLAIMABLE, "can't change claimability");
597 }
598
599 self.align_to::<NewS::MinimumAlignment>();
600 }
601}
602
603#[cfg(feature = "alloc")]
604impl<S> RawBump<Global, S>
605where
606 S: BumpAllocatorSettings,
607{
608 #[inline]
609 pub(crate) fn into_raw(self) -> NonNull<()> {
610 self.chunk.get().header.cast()
611 }
612
613 #[inline]
614 pub(crate) unsafe fn from_raw(ptr: NonNull<()>) -> Self {
615 Self {
616 chunk: Cell::new(RawChunk {
617 header: ptr.cast(),
618 marker: PhantomData,
619 }),
620 allocator: ManuallyDrop::new(Global),
621 }
622 }
623}
624
625pub(crate) struct RawChunk<S> {
626 pub(crate) header: NonNull<ChunkHeader>,
627 pub(crate) marker: PhantomData<fn() -> S>,
628}
629
630impl<S> Clone for RawChunk<S> {
631 fn clone(&self) -> Self {
632 *self
633 }
634}
635
636impl<S> Copy for RawChunk<S> {}
637
638pub(crate) struct NonDummyChunk<S>(RawChunk<S>);
639
640impl<S> Copy for NonDummyChunk<S> {}
641
642impl<S> Clone for NonDummyChunk<S> {
643 fn clone(&self) -> Self {
644 *self
645 }
646}
647
648impl<S> Deref for NonDummyChunk<S> {
649 type Target = RawChunk<S>;
650
651 fn deref(&self) -> &Self::Target {
652 &self.0
653 }
654}
655
656impl<S> RawChunk<S>
657where
658 S: BumpAllocatorSettings,
659{
660 pub(crate) const UNALLOCATED: Self = {
661 assert!(!S::GUARANTEED_ALLOCATED);
662
663 Self {
664 header: ChunkHeader::unallocated::<S>(),
665 marker: PhantomData,
666 }
667 };
668
669 const CLAIMED: Self = {
670 assert!(S::CLAIMABLE);
671
672 Self {
673 header: ChunkHeader::claimed::<S>(),
674 marker: PhantomData,
675 }
676 };
677
678 #[inline(always)]
679 pub(crate) fn header(self) -> NonNull<ChunkHeader> {
680 self.header
681 }
682
683 #[inline(always)]
684 fn is_claimed(self) -> bool {
685 S::CLAIMABLE && self.header == ChunkHeader::claimed::<S>()
686 }
687
688 #[inline(always)]
689 pub(crate) fn is_unallocated(self) -> bool {
690 !S::GUARANTEED_ALLOCATED && self.header == ChunkHeader::unallocated::<S>()
691 }
692
693 #[inline(always)]
694 pub(crate) fn classify(self) -> ChunkClass<S> {
695 if self.is_claimed() {
696 return ChunkClass::Claimed;
697 }
698
699 if self.is_unallocated() {
700 return ChunkClass::Unallocated;
701 }
702
703 ChunkClass::NonDummy(NonDummyChunk(self))
704 }
705
706 #[inline(always)]
707 pub(crate) fn as_non_dummy(self) -> Option<NonDummyChunk<S>> {
708 match self.classify() {
709 ChunkClass::Claimed | ChunkClass::Unallocated => None,
710 ChunkClass::NonDummy(chunk) => Some(chunk),
711 }
712 }
713
714 #[inline(always)]
718 pub(crate) fn alloc(self, layout: impl LayoutProps) -> Option<NonNull<u8>> {
719 let props = self.bump_props(layout);
720
721 if S::UP {
722 let BumpUp { new_pos, ptr } = bump_up(props)?;
723
724 unsafe {
726 let chunk = self.as_non_dummy_unchecked();
727 chunk.set_pos_addr(new_pos);
728 Some(chunk.content_ptr_from_addr(ptr))
729 }
730 } else {
731 let ptr = bump_down(props)?;
732
733 unsafe {
735 let chunk = self.as_non_dummy_unchecked();
736 chunk.set_pos_addr(ptr);
737 Some(chunk.content_ptr_from_addr(ptr))
738 }
739 }
740 }
741
742 #[inline(always)]
748 pub(crate) fn prepare_allocation(self, layout: impl LayoutProps) -> Option<NonNull<u8>> {
749 let props = self.bump_props(layout);
750
751 let ptr = if S::UP { bump_up(props)?.ptr } else { bump_down(props)? };
752
753 unsafe {
755 let chunk = self.as_non_dummy_unchecked();
756 Some(chunk.content_ptr_from_addr(ptr))
757 }
758 }
759
760 #[inline(always)]
775 pub(crate) fn prepare_allocation_range(self, layout: impl LayoutProps) -> Option<Range<NonNull<u8>>> {
776 let props = self.bump_props(layout);
777
778 let range = if S::UP {
779 bump_prepare_up(props)
780 } else {
781 bump_prepare_down(props)
782 }?;
783
784 unsafe {
786 let chunk = self.as_non_dummy_unchecked();
787 Some(chunk.content_ptr_from_addr_range(range))
788 }
789 }
790
791 #[inline(always)]
792 fn bump_props<L>(self, layout: L) -> BumpProps
793 where
794 L: LayoutProps,
795 {
796 let pos = self.pos().addr().get();
797 let end = unsafe { self.header.as_ref() }.end.addr().get();
798
799 let start = if S::UP { pos } else { end };
800 let end = if S::UP { end } else { pos };
801
802 #[cfg(debug_assertions)]
803 if !matches!(self.classify(), ChunkClass::NonDummy(_)) {
804 assert!(start > end);
805 }
806
807 BumpProps {
808 start,
809 end,
810 layout: *layout,
811 min_align: S::MIN_ALIGN,
812 align_is_const: L::ALIGN_IS_CONST,
813 size_is_const: L::SIZE_IS_CONST,
814 size_is_multiple_of_align: L::SIZE_IS_MULTIPLE_OF_ALIGN,
815 }
816 }
817
818 #[inline(always)]
819 pub(crate) fn pos(self) -> NonNull<u8> {
820 unsafe { self.header.as_ref().pos.get() }
821 }
822
823 #[inline(always)]
824 pub(crate) unsafe fn as_non_dummy_unchecked(self) -> NonDummyChunk<S> {
825 debug_assert!(matches!(self.classify(), ChunkClass::NonDummy(_)));
826 NonDummyChunk(self)
827 }
828
829 pub(crate) unsafe fn cast<S2>(self) -> RawChunk<S2> {
831 RawChunk {
832 header: self.header,
833 marker: PhantomData,
834 }
835 }
836}
837
838impl<S> NonDummyChunk<S>
840where
841 S: BumpAllocatorSettings,
842{
843 pub(crate) fn new<E>(
844 chunk_size: ChunkSize<S::Up>,
845 prev: Option<NonDummyChunk<S>>,
846 allocator: &impl Allocator,
847 ) -> Result<NonDummyChunk<S>, E>
848 where
849 E: ErrorBehavior,
850 {
851 let min_size = const {
852 match ChunkSize::<S::Up>::from_hint(S::MINIMUM_CHUNK_SIZE) {
853 Some(some) => some,
854 None => panic!("failed to calculate minimum chunk size"),
855 }
856 };
857
858 let layout = chunk_size.max(min_size).layout().ok_or_else(E::capacity_overflow)?;
859
860 let allocation = match allocator.allocate(layout) {
861 Ok(ok) => ok,
862 Err(AllocError) => return Err(E::allocation(layout)),
863 };
864
865 let ptr = non_null::as_non_null_ptr(allocation);
866 let size = allocation.len();
867
868 let size = chunk_size.align_allocation_size(size);
880
881 debug_assert!(size >= layout.size());
882 debug_assert!(size % MIN_CHUNK_ALIGN == 0);
883
884 let prev = Cell::new(prev.map(|c| c.header));
885 let next = Cell::new(None);
886
887 let header = unsafe {
888 if S::UP {
889 let header = ptr.cast::<ChunkHeader>();
890
891 header.write(ChunkHeader {
892 pos: Cell::new(header.add(1).cast()),
893 end: ptr.add(size),
894 prev,
895 next,
896 });
897
898 header
899 } else {
900 let header = ptr.add(size).cast::<ChunkHeader>().sub(1);
901
902 header.write(ChunkHeader {
903 pos: Cell::new(header.cast()),
904 end: ptr,
905 prev,
906 next,
907 });
908
909 header
910 }
911 };
912
913 Ok(NonDummyChunk(RawChunk {
914 header,
915 marker: PhantomData,
916 }))
917 }
918
919 pub(crate) fn append_for<B: ErrorBehavior>(self, layout: Layout, allocator: &impl Allocator) -> Result<Self, B> {
923 debug_assert!(self.next().is_none());
924
925 let required_size = ChunkSizeHint::for_capacity(layout).ok_or_else(B::capacity_overflow)?;
926 let grown_size = self.grow_size()?;
927 let size = required_size.max(grown_size).calc_size().ok_or_else(B::capacity_overflow)?;
928
929 let new_chunk = Self::new::<B>(size, Some(self), allocator)?;
930
931 unsafe {
932 self.header.as_ref().next.set(Some(new_chunk.header));
933 }
934
935 Ok(new_chunk)
936 }
937
938 #[inline(always)]
939 fn grow_size<B: ErrorBehavior>(self) -> Result<ChunkSizeHint<S::Up>, B> {
940 let Some(size) = self.size().get().checked_mul(2) else {
941 return Err(B::capacity_overflow());
942 };
943
944 Ok(ChunkSizeHint::new(size))
945 }
946
947 #[inline(always)]
948 pub(crate) fn prev(self) -> Option<NonDummyChunk<S>> {
949 unsafe {
950 Some(NonDummyChunk(RawChunk {
951 header: self.header.as_ref().prev.get()?,
952 marker: PhantomData,
953 }))
954 }
955 }
956
957 #[inline(always)]
958 pub(crate) fn next(self) -> Option<NonDummyChunk<S>> {
959 unsafe {
960 Some(NonDummyChunk(RawChunk {
961 header: self.header.as_ref().next.get()?,
962 marker: PhantomData,
963 }))
964 }
965 }
966
967 #[inline(always)]
968 pub(crate) fn size(self) -> NonZeroUsize {
969 let start = self.chunk_start().addr().get();
970 let end = self.chunk_end().addr().get();
971 unsafe { NonZeroUsize::new_unchecked(end - start) }
972 }
973
974 #[inline(always)]
975 pub(crate) fn capacity(self) -> usize {
976 let start = self.content_start().addr().get();
977 let end = self.content_end().addr().get();
978 end - start
979 }
980
981 #[inline(always)]
982 pub(crate) fn allocated(self) -> usize {
983 let range = self.allocated_range();
984 let start = range.start.addr().get();
985 let end = range.end.addr().get();
986 end - start
987 }
988
989 #[inline(always)]
990 pub(crate) fn remaining(self) -> usize {
991 let range = self.remaining_range();
992 let start = range.start.addr().get();
993 let end = range.end.addr().get();
994 end - start
995 }
996
997 #[inline(always)]
998 fn reset(self) {
999 unsafe {
1000 if S::UP {
1001 self.set_pos(self.content_start());
1002 } else {
1003 self.set_pos(self.content_end());
1004 }
1005 }
1006 }
1007
1008 #[inline(always)]
1009 pub(crate) fn chunk_start(self) -> NonNull<u8> {
1010 unsafe { if S::UP { self.header.cast() } else { self.header.as_ref().end } }
1011 }
1012
1013 #[inline(always)]
1014 pub(crate) fn chunk_end(self) -> NonNull<u8> {
1015 unsafe {
1016 if S::UP {
1017 self.header.as_ref().end
1018 } else {
1019 self.after_header()
1020 }
1021 }
1022 }
1023
1024 #[inline(always)]
1025 pub(crate) fn content_start(self) -> NonNull<u8> {
1026 if S::UP { self.after_header() } else { self.chunk_start() }
1027 }
1028
1029 #[inline(always)]
1030 pub(crate) fn content_end(self) -> NonNull<u8> {
1031 if S::UP { self.chunk_end() } else { self.header.cast() }
1032 }
1033
1034 #[inline(always)]
1037 pub(crate) unsafe fn set_pos(self, ptr: NonNull<u8>) {
1038 unsafe { self.set_pos_addr(ptr.addr().get()) };
1039 }
1040
1041 #[inline(always)]
1044 pub(crate) unsafe fn set_pos_addr(self, addr: usize) {
1045 unsafe { self.header.as_ref().pos.set(self.content_ptr_from_addr(addr)) };
1046 }
1047
1048 #[inline(always)]
1050 pub(crate) unsafe fn set_pos_addr_and_align(self, pos: usize) {
1051 unsafe {
1052 let addr = align_pos(S::UP, S::MIN_ALIGN, pos);
1053 self.set_pos_addr(addr);
1054 }
1055 }
1056
1057 #[inline(always)]
1063 pub(crate) unsafe fn set_pos_addr_and_align_from(self, mut pos: usize, pos_align: usize) {
1064 debug_assert_eq!(pos % pos_align, 0);
1065
1066 if pos_align < S::MIN_ALIGN {
1067 pos = align_pos(S::UP, S::MIN_ALIGN, pos);
1068 }
1069
1070 unsafe { self.set_pos_addr(pos) };
1071 }
1072
1073 #[inline(always)]
1076 unsafe fn content_ptr_from_addr(self, addr: usize) -> NonNull<u8> {
1077 unsafe {
1078 debug_assert!(self.contains_addr_or_end(addr));
1079 let ptr = self.header.cast();
1080 let addr = NonZeroUsize::new_unchecked(addr);
1081 ptr.with_addr(addr)
1082 }
1083 }
1084
1085 #[inline(always)]
1086 pub(crate) unsafe fn content_ptr_from_addr_range(self, range: Range<usize>) -> Range<NonNull<u8>> {
1087 unsafe {
1088 debug_assert!(range.start <= range.end);
1089 let start = self.content_ptr_from_addr(range.start);
1090 let end = self.content_ptr_from_addr(range.end);
1091 start..end
1092 }
1093 }
1094
1095 #[inline(always)]
1096 fn contains_addr_or_end(self, addr: usize) -> bool {
1097 let start = self.content_start().addr().get();
1098 let end = self.content_end().addr().get();
1099 addr >= start && addr <= end
1100 }
1101
1102 #[inline(always)]
1103 fn allocated_range(self) -> Range<NonNull<u8>> {
1104 if S::UP {
1105 self.content_start()..self.pos()
1106 } else {
1107 self.pos()..self.content_end()
1108 }
1109 }
1110
1111 #[inline(always)]
1112 fn remaining_range(self) -> Range<NonNull<u8>> {
1113 if S::UP {
1114 let start = self.pos();
1115 let end = self.content_end();
1116 start..end
1117 } else {
1118 let start = self.content_start();
1119 let end = self.pos();
1120 start..end
1121 }
1122 }
1123
1124 #[inline(always)]
1125 fn after_header(self) -> NonNull<u8> {
1126 unsafe { self.header.add(1).cast() }
1127 }
1128
1129 fn for_each_prev(self, mut f: impl FnMut(NonDummyChunk<S>)) {
1131 let mut iter = self.prev();
1132
1133 while let Some(chunk) = iter {
1134 iter = chunk.prev();
1135 f(chunk);
1136 }
1137 }
1138
1139 fn for_each_next(self, mut f: impl FnMut(NonDummyChunk<S>)) {
1141 let mut iter = self.next();
1142
1143 while let Some(chunk) = iter {
1144 iter = chunk.next();
1145 f(chunk);
1146 }
1147 }
1148
1149 unsafe fn deallocate(self, allocator: &impl Allocator) {
1152 let ptr = self.chunk_start();
1153 let layout = self.layout();
1154
1155 unsafe {
1156 allocator.deallocate(ptr, layout);
1157 }
1158 }
1159
1160 #[inline(always)]
1161 pub(crate) fn layout(self) -> Layout {
1162 unsafe { Layout::from_size_align_unchecked(self.size().get(), align_of::<ChunkHeader>()) }
1164 }
1165}
1166
1167pub(crate) enum ChunkClass<S: BumpAllocatorSettings> {
1168 Claimed,
1169 Unallocated,
1170 NonDummy(NonDummyChunk<S>),
1171}