Skip to main content

bump_scope/
raw_bump.rs

1use core::{
2    alloc::Layout,
3    cell::Cell,
4    marker::PhantomData,
5    num::NonZeroUsize,
6    ops::{Deref, Range},
7    ptr::{self, NonNull},
8};
9
10use crate::{
11    BaseAllocator, Checkpoint, SizedTypeProperties, align_pos,
12    alloc::{AllocError, Allocator},
13    bumping::{BumpProps, BumpUp, MIN_CHUNK_ALIGN, bump_down, bump_prepare_down, bump_prepare_up, bump_up},
14    chunk::{ChunkHeader, ChunkSize, ChunkSizeHint},
15    error_behavior::{self, ErrorBehavior},
16    layout::{ArrayLayout, CustomLayout, LayoutProps, SizedLayout},
17    polyfill::non_null,
18    settings::{BumpAllocatorSettings, False, MinimumAlignment, SupportedMinimumAlignment},
19    stats::Stats,
20};
21
22/// The internal type used by `Bump` and `Bump(Scope)`.
23///
24/// All the api that can fail due to allocation failure take a `E: ErrorBehavior`
25/// instead of having a `try_` and non-`try_` version.
26///
27/// It does not concern itself with deallocating chunks or the base allocator.
28/// A clone of this type is just a bitwise copy, `manually_drop` must only be called
29/// once for this bump allocator.
30pub(crate) struct RawBump<A, S> {
31    /// Either a chunk allocated from the `allocator`, or either a `CLAIMED`
32    /// or `UNALLOCATED` dummy chunk.
33    pub(crate) chunk: Cell<RawChunk<A, S>>,
34}
35
36impl<A, S> Clone for RawBump<A, S> {
37    fn clone(&self) -> Self {
38        Self {
39            chunk: self.chunk.clone(),
40        }
41    }
42}
43
44impl<A, S> RawBump<A, S>
45where
46    S: BumpAllocatorSettings,
47{
48    #[inline(always)]
49    pub(crate) const fn new() -> Self
50    where
51        S: BumpAllocatorSettings<GuaranteedAllocated = False>,
52    {
53        Self {
54            chunk: Cell::new(RawChunk::UNALLOCATED),
55        }
56    }
57
58    #[inline(always)]
59    pub(crate) fn is_claimed(&self) -> bool {
60        self.chunk.get().is_claimed()
61    }
62
63    #[inline(always)]
64    pub(crate) fn allocator<'a>(&self) -> Option<&'a A> {
65        match self.chunk.get().classify() {
66            ChunkClass::Claimed | ChunkClass::Unallocated => None,
67            ChunkClass::NonDummy(chunk) => Some(chunk.allocator()),
68        }
69    }
70}
71
72impl<A, S> RawBump<A, S>
73where
74    A: Allocator,
75    S: BumpAllocatorSettings,
76{
77    #[inline(always)]
78    pub(crate) fn with_size<E: ErrorBehavior>(size: ChunkSize<A, S::Up>, allocator: A) -> Result<Self, E> {
79        Ok(Self {
80            chunk: Cell::new(NonDummyChunk::new::<E>(size, None, allocator)?.0),
81        })
82    }
83
84    #[inline(always)]
85    pub(crate) fn reset(&self) {
86        let Some(mut chunk) = self.chunk.get().as_non_dummy() else {
87            return;
88        };
89
90        unsafe {
91            chunk.for_each_prev(|chunk| chunk.deallocate());
92
93            while let Some(next) = chunk.next() {
94                chunk.deallocate();
95                chunk = next;
96            }
97
98            chunk.header.as_ref().prev.set(None);
99        }
100
101        chunk.reset();
102
103        // SAFETY: casting from guaranteed-allocated to non-guaranteed-allocated is safe
104        self.chunk.set(unsafe { chunk.cast() });
105    }
106
107    pub(crate) unsafe fn manually_drop(&mut self) {
108        match self.chunk.get().classify() {
109            ChunkClass::Claimed => {
110                // The user must have somehow leaked a `BumpClaimGuard`.
111            }
112            ChunkClass::Unallocated => (),
113            ChunkClass::NonDummy(chunk) => unsafe {
114                chunk.for_each_prev(|chunk| chunk.deallocate());
115                chunk.for_each_next(|chunk| chunk.deallocate());
116                chunk.deallocate();
117            },
118        }
119    }
120}
121
122impl<A, S> RawBump<A, S>
123where
124    A: BaseAllocator<S::GuaranteedAllocated>,
125    S: BumpAllocatorSettings,
126{
127    #[inline(always)]
128    pub(crate) fn claim(&self) -> RawBump<A, S> {
129        const {
130            assert!(S::CLAIMABLE, "`claim` is only available with the setting `CLAIMABLE = true`");
131        }
132
133        #[cold]
134        #[inline(never)]
135        fn already_claimed() {
136            panic!("bump allocator is already claimed");
137        }
138
139        if self.chunk.get().is_claimed() {
140            already_claimed();
141        }
142
143        RawBump {
144            chunk: Cell::new(self.chunk.replace(RawChunk::<A, S>::CLAIMED)),
145        }
146    }
147
148    #[inline(always)]
149    pub(crate) fn reclaim(&self, claimant: &RawBump<A, S>) {
150        self.chunk.set(claimant.chunk.get());
151    }
152
153    #[inline(always)]
154    pub(crate) fn checkpoint(&self) -> Checkpoint {
155        Checkpoint::new(self.chunk.get())
156    }
157
158    #[inline]
159    pub(crate) unsafe fn reset_to(&self, checkpoint: Checkpoint) {
160        #[cfg(debug_assertions)]
161        if checkpoint.chunk == ChunkHeader::claimed::<S>() || self.chunk.get().is_claimed() {
162            error_behavior::panic::claimed();
163        }
164
165        // If the checkpoint was created when the bump allocator had no allocated chunk
166        // then the chunk pointer will point to the unallocated chunk header.
167        //
168        // In such cases we reset the bump pointer to the very start of the very first chunk.
169        //
170        // We don't check if the chunk pointer points to the unallocated chunk header
171        // if the bump allocator is `GUARANTEED_ALLOCATED`. We are allowed to not do this check
172        // because of this safety condition of `reset_to`:
173        // > the checkpoint must not have been created by an`!GUARANTEED_ALLOCATED` when self is `GUARANTEED_ALLOCATED`
174        if !S::GUARANTEED_ALLOCATED && checkpoint.chunk == ChunkHeader::unallocated::<S>() {
175            if let Some(mut chunk) = self.chunk.get().as_non_dummy() {
176                while let Some(prev) = chunk.prev() {
177                    chunk = prev;
178                }
179
180                chunk.reset();
181
182                // SAFETY: casting from guaranteed-allocated to non-guaranteed-allocated is safe
183                self.chunk.set(unsafe { chunk.cast() });
184            }
185
186            return;
187        }
188
189        debug_assert_ne!(
190            checkpoint.chunk,
191            ChunkHeader::unallocated::<S>(),
192            "the safety conditions state that \"the checkpoint must not have been created by an`!GUARANTEED_ALLOCATED` when self is `GUARANTEED_ALLOCATED`\""
193        );
194
195        #[cfg(debug_assertions)]
196        {
197            let chunk = self
198                .stats()
199                .small_to_big()
200                .find(|chunk| chunk.header() == checkpoint.chunk.cast())
201                .expect("this checkpoint does not refer to any chunk of this bump allocator");
202
203            assert!(
204                chunk.chunk.contains_addr_or_end(checkpoint.address.get()),
205                "checkpoint address does not point within its chunk"
206            );
207        }
208
209        unsafe {
210            checkpoint.reset_within_chunk();
211
212            self.chunk.set(RawChunk {
213                header: checkpoint.chunk.cast(),
214                marker: PhantomData,
215            });
216        }
217    }
218
219    #[inline(always)]
220    pub(crate) fn reserve<E: ErrorBehavior>(&self, additional: usize) -> Result<(), E>
221    where
222        A: BaseAllocator<S::GuaranteedAllocated>,
223    {
224        let chunk = self.chunk.get();
225
226        match chunk.classify() {
227            ChunkClass::Claimed => Err(E::claimed()),
228            ChunkClass::Unallocated => {
229                let Ok(layout) = Layout::from_size_align(additional, 1) else {
230                    return Err(E::capacity_overflow());
231                };
232
233                let new_chunk = NonDummyChunk::<A, S>::new(
234                    ChunkSize::<A, S::Up>::from_capacity(layout).ok_or_else(E::capacity_overflow)?,
235                    None,
236                    // When this bump allocator is unallocated, `A` is guaranteed to implement `Default`,
237                    // `default_or_panic` will not panic.
238                    A::default_or_panic(),
239                )?;
240
241                self.chunk.set(new_chunk.0);
242                Ok(())
243            }
244            ChunkClass::NonDummy(mut chunk) => {
245                let mut additional = additional;
246
247                if let Some(rest) = additional.checked_sub(chunk.remaining()) {
248                    additional = rest;
249                } else {
250                    return Ok(());
251                }
252
253                while let Some(next) = chunk.next() {
254                    chunk = next;
255
256                    if let Some(rest) = additional.checked_sub(chunk.capacity()) {
257                        additional = rest;
258                    } else {
259                        return Ok(());
260                    }
261                }
262
263                if additional == 0 {
264                    return Ok(());
265                }
266
267                let Ok(layout) = Layout::from_size_align(additional, 1) else {
268                    return Err(E::capacity_overflow());
269                };
270
271                chunk.append_for(layout).map(drop)
272            }
273        }
274    }
275
276    #[inline(always)]
277    pub(crate) fn alloc<B: ErrorBehavior>(&self, layout: Layout) -> Result<NonNull<u8>, B> {
278        match self.chunk.get().alloc(CustomLayout(layout)) {
279            Some(ptr) => Ok(ptr),
280            None => self.alloc_in_another_chunk(layout),
281        }
282    }
283
284    #[inline(always)]
285    pub(crate) fn alloc_sized<E: ErrorBehavior, T>(&self) -> Result<NonNull<T>, E> {
286        match self.chunk.get().alloc(SizedLayout::new::<T>()) {
287            Some(ptr) => Ok(ptr.cast()),
288            None => match self.alloc_sized_in_another_chunk::<E, T>() {
289                Ok(ptr) => Ok(ptr.cast()),
290                Err(err) => Err(err),
291            },
292        }
293    }
294
295    #[inline(always)]
296    pub(crate) fn alloc_slice<E: ErrorBehavior, T>(&self, len: usize) -> Result<NonNull<T>, E> {
297        let Ok(layout) = ArrayLayout::array::<T>(len) else {
298            return Err(E::capacity_overflow());
299        };
300
301        match self.chunk.get().alloc(layout) {
302            Some(ptr) => Ok(ptr.cast()),
303            None => match self.alloc_slice_in_another_chunk::<E, T>(len) {
304                Ok(ptr) => Ok(ptr.cast()),
305                Err(err) => Err(err),
306            },
307        }
308    }
309
310    #[inline(always)]
311    pub(crate) fn alloc_slice_for<E: ErrorBehavior, T>(&self, value: &[T]) -> Result<NonNull<T>, E> {
312        let layout = ArrayLayout::for_value(value);
313
314        match self.chunk.get().alloc(layout) {
315            Some(ptr) => Ok(ptr.cast()),
316            None => match self.alloc_slice_in_another_chunk::<E, T>(value.len()) {
317                Ok(ptr) => Ok(ptr.cast()),
318                Err(err) => Err(err),
319            },
320        }
321    }
322
323    #[inline(always)]
324    pub(crate) fn prepare_sized_allocation<B: ErrorBehavior, T>(&self) -> Result<NonNull<T>, B> {
325        match self.chunk.get().prepare_allocation(SizedLayout::new::<T>()) {
326            Some(ptr) => Ok(ptr.cast()),
327            None => match self.prepare_allocation_in_another_chunk::<B, T>() {
328                Ok(ptr) => Ok(ptr.cast()),
329                Err(err) => Err(err),
330            },
331        }
332    }
333
334    #[inline(always)]
335    pub(crate) fn prepare_slice_allocation<B: ErrorBehavior, T>(&self, min_cap: usize) -> Result<NonNull<[T]>, B> {
336        let range = self.prepare_allocation_range::<B, T>(min_cap)?;
337
338        // NB: We can't use `offset_from_unsigned`, because the size is not a multiple of `T`'s.
339        let cap = unsafe { non_null::byte_offset_from_unsigned(range.end, range.start) } / T::SIZE;
340
341        let ptr = if S::UP { range.start } else { unsafe { range.end.sub(cap) } };
342
343        Ok(NonNull::slice_from_raw_parts(ptr, cap))
344    }
345
346    #[inline(always)]
347    pub(crate) fn prepare_slice_allocation_rev<B: ErrorBehavior, T>(
348        &self,
349        min_cap: usize,
350    ) -> Result<(NonNull<T>, usize), B> {
351        let range = self.prepare_allocation_range::<B, T>(min_cap)?;
352
353        // NB: We can't use `offset_from_unsigned`, because the size is not a multiple of `T`'s.
354        let cap = unsafe { non_null::byte_offset_from_unsigned(range.end, range.start) } / T::SIZE;
355
356        let end = if S::UP { unsafe { range.start.add(cap) } } else { range.end };
357
358        Ok((end, cap))
359    }
360
361    /// Returns a pointer range.
362    /// The start and end pointers are aligned.
363    /// But `end - start` is *not* a multiple of `size_of::<T>()`.
364    /// So `end.offset_from_unsigned(start)` may not be used!
365    #[inline(always)]
366    fn prepare_allocation_range<B: ErrorBehavior, T>(&self, cap: usize) -> Result<Range<NonNull<T>>, B> {
367        let Ok(layout) = ArrayLayout::array::<T>(cap) else {
368            return Err(B::capacity_overflow());
369        };
370
371        let range = match self.chunk.get().prepare_allocation_range(layout) {
372            Some(ptr) => ptr,
373            None => self.prepare_allocation_range_in_another_chunk(layout)?,
374        };
375
376        Ok(range.start.cast::<T>()..range.end.cast::<T>())
377    }
378
379    /// Allocation slow path.
380    /// The active chunk must *not* have space for `layout`.
381    #[cold]
382    #[inline(never)]
383    pub(crate) fn alloc_in_another_chunk<E: ErrorBehavior>(&self, layout: Layout) -> Result<NonNull<u8>, E> {
384        unsafe { self.in_another_chunk(CustomLayout(layout), RawChunk::alloc) }
385    }
386
387    #[cold]
388    #[inline(never)]
389    fn alloc_sized_in_another_chunk<E: ErrorBehavior, T>(&self) -> Result<NonNull<u8>, E> {
390        self.alloc_in_another_chunk(Layout::new::<T>())
391    }
392
393    #[cold]
394    #[inline(never)]
395    fn alloc_slice_in_another_chunk<E: ErrorBehavior, T>(&self, len: usize) -> Result<NonNull<u8>, E> {
396        let Ok(layout) = Layout::array::<T>(len) else {
397            return Err(E::capacity_overflow());
398        };
399
400        self.alloc_in_another_chunk(layout)
401    }
402
403    #[cold]
404    #[inline(never)]
405    pub(crate) fn prepare_allocation_in_another_chunk<E: ErrorBehavior, T>(&self) -> Result<NonNull<u8>, E> {
406        let layout = CustomLayout(Layout::new::<T>());
407
408        unsafe { self.in_another_chunk(layout, RawChunk::prepare_allocation) }
409    }
410
411    #[cold]
412    #[inline(never)]
413    fn prepare_allocation_range_in_another_chunk<E: ErrorBehavior>(
414        &self,
415        layout: ArrayLayout,
416    ) -> Result<Range<NonNull<u8>>, E> {
417        unsafe { self.in_another_chunk(layout, RawChunk::prepare_allocation_range) }
418    }
419
420    /// # Safety
421    ///
422    /// `f` on the new chunk created by `RawChunk::append_for` with the layout `layout` must return `Some`.
423    #[inline(always)]
424    pub(crate) unsafe fn in_another_chunk<E: ErrorBehavior, R, L: LayoutProps>(
425        &self,
426        layout: L,
427        mut f: impl FnMut(RawChunk<A, S>, L) -> Option<R>,
428    ) -> Result<R, E> {
429        let new_chunk: NonDummyChunk<A, S> = match self.chunk.get().classify() {
430            ChunkClass::Claimed => Err(E::claimed()),
431            ChunkClass::Unallocated => NonDummyChunk::new(
432                ChunkSize::from_capacity(*layout).ok_or_else(E::capacity_overflow)?,
433                None,
434                // When this bump allocator is unallocated, `A` is guaranteed to implement `Default`,
435                // `default_or_panic` will not panic.
436                A::default_or_panic(),
437            ),
438            ChunkClass::NonDummy(mut chunk) => {
439                while let Some(next_chunk) = chunk.next() {
440                    chunk = next_chunk;
441
442                    // We don't reset the chunk position when we leave a scope, so we need to do it here.
443                    chunk.reset();
444
445                    self.chunk.set(chunk.0);
446
447                    if let Some(ptr) = f(chunk.0, layout) {
448                        return Ok(ptr);
449                    }
450                }
451
452                // there is no chunk that fits, we need a new chunk
453                chunk.append_for(*layout)
454            }
455        }?;
456
457        self.chunk.set(new_chunk.0);
458
459        match f(new_chunk.0, layout) {
460            Some(ptr) => Ok(ptr),
461            _ => {
462                // SAFETY: We just appended a chunk for that specific layout, it must have enough space.
463                // We don't panic here so we don't produce any panic code when using `try_` apis.
464                // We check for that in `test-no-panic`.
465                unsafe { core::hint::unreachable_unchecked() }
466            }
467        }
468    }
469
470    pub(crate) fn make_allocated<E: ErrorBehavior>(&self) -> Result<(), E> {
471        match self.chunk.get().classify() {
472            ChunkClass::Claimed => Err(E::claimed()),
473            ChunkClass::Unallocated => {
474                // When this bump allocator is unallocated, `A` is guaranteed to implement `Default`,
475                // `default_or_panic` will not panic.
476                let new_chunk = NonDummyChunk::new(ChunkSize::ZERO, None, A::default_or_panic())?;
477                self.chunk.set(new_chunk.0);
478                Ok(())
479            }
480            ChunkClass::NonDummy(_) => Ok(()),
481        }
482    }
483}
484
485impl<A, S> RawBump<A, S>
486where
487    S: BumpAllocatorSettings,
488{
489    /// Returns a type which provides statistics about the memory usage of the bump allocator.
490    #[must_use]
491    #[inline(always)]
492    pub fn stats<'a>(&self) -> Stats<'a, A, S> {
493        Stats::from_raw_chunk(self.chunk.get())
494    }
495
496    #[inline(always)]
497    pub(crate) fn align<const ALIGN: usize>(&self)
498    where
499        MinimumAlignment<ALIGN>: SupportedMinimumAlignment,
500    {
501        self.align_to::<MinimumAlignment<ALIGN>>();
502    }
503
504    #[inline(always)]
505    pub(crate) fn align_to<MinimumAlignment>(&self)
506    where
507        MinimumAlignment: SupportedMinimumAlignment,
508    {
509        if MinimumAlignment::VALUE > S::MIN_ALIGN {
510            // a dummy chunk is always aligned
511            if let Some(chunk) = self.chunk.get().as_non_dummy() {
512                let pos = chunk.pos().addr().get();
513                let addr = align_pos(S::UP, MinimumAlignment::VALUE, pos);
514                unsafe { chunk.set_pos_addr(addr) };
515            }
516        }
517    }
518
519    pub(crate) fn ensure_satisfies_settings<NewS>(&self)
520    where
521        NewS: BumpAllocatorSettings,
522    {
523        const {
524            assert!(NewS::UP == S::UP, "can't change `UP` setting of `Bump(Scope)`");
525        }
526
527        if !NewS::CLAIMABLE && self.chunk.get().is_claimed() {
528            error_behavior::panic::claimed();
529        }
530
531        if NewS::GUARANTEED_ALLOCATED && self.chunk.get().is_unallocated() {
532            error_behavior::panic::unallocated();
533        }
534
535        self.align_to::<NewS::MinimumAlignment>();
536    }
537
538    pub(crate) fn ensure_scope_satisfies_settings<NewS>(&self)
539    where
540        NewS: BumpAllocatorSettings,
541    {
542        const {
543            assert!(NewS::UP == S::UP, "can't change `UP` setting of `Bump(Scope)`");
544
545            assert!(
546                NewS::MIN_ALIGN >= S::MIN_ALIGN,
547                "can't decrease minimum alignment using `BumpScope::with_settings`"
548            );
549        }
550
551        if !NewS::CLAIMABLE && self.chunk.get().is_claimed() {
552            error_behavior::panic::claimed();
553        }
554
555        // A scope by value is always allocated, created by `(try_)by_value`.
556
557        self.align_to::<NewS::MinimumAlignment>();
558    }
559
560    #[expect(clippy::unused_self)]
561    pub(crate) fn ensure_satisfies_settings_for_borrow<NewS>(&self)
562    where
563        NewS: BumpAllocatorSettings,
564    {
565        const {
566            assert!(NewS::UP == S::UP, "can't change `UP` setting of `Bump(Scope)`");
567
568            assert!(
569                NewS::MIN_ALIGN == S::MIN_ALIGN,
570                "can't change minimum alignment using `Bump(Scope)::borrow_with_settings`"
571            );
572
573            assert!(
574                NewS::CLAIMABLE == S::CLAIMABLE,
575                "can't change claimable property using `Bump(Scope)::borrow_with_settings`"
576            );
577
578            // A reference to a guaranteed-allocated `Bump(Scope)` can never become unallocated.
579            assert!(
580                NewS::GUARANTEED_ALLOCATED <= S::GUARANTEED_ALLOCATED,
581                "can't increase guaranteed-allocated property using `Bump(Scope)::borrow_with_settings`"
582            );
583        }
584    }
585
586    pub(crate) fn ensure_satisfies_settings_for_borrow_mut<NewS>(&self)
587    where
588        NewS: BumpAllocatorSettings,
589    {
590        const {
591            assert!(NewS::UP == S::UP, "can't change `UP` setting of `Bump(Scope)`");
592
593            assert!(
594                NewS::MIN_ALIGN >= S::MIN_ALIGN,
595                "can't decrease minimum alignment using `Bump(Scope)::borrow_mut_with_settings`"
596            );
597
598            assert!(
599                NewS::CLAIMABLE == S::CLAIMABLE,
600                "can't change claimable property using `Bump(Scope)::borrow_mut_with_settings`"
601            );
602
603            assert!(
604                NewS::GUARANTEED_ALLOCATED == S::GUARANTEED_ALLOCATED,
605                "can't change guaranteed-allocated property using `Bump(Scope)::borrow_mut_with_settings`"
606            );
607        }
608
609        self.align_to::<NewS::MinimumAlignment>();
610    }
611
612    #[inline]
613    pub(crate) fn into_raw(self) -> NonNull<()> {
614        self.chunk.get().header.cast()
615    }
616
617    #[inline]
618    pub(crate) unsafe fn from_raw(ptr: NonNull<()>) -> Self {
619        Self {
620            chunk: Cell::new(RawChunk {
621                header: ptr.cast(),
622                marker: PhantomData,
623            }),
624        }
625    }
626}
627
628pub(crate) struct RawChunk<A, S> {
629    pub(crate) header: NonNull<ChunkHeader<A>>,
630    pub(crate) marker: PhantomData<fn() -> (A, S)>,
631}
632
633impl<A, S> Clone for RawChunk<A, S> {
634    fn clone(&self) -> Self {
635        *self
636    }
637}
638
639impl<A, S> Copy for RawChunk<A, S> {}
640
641pub(crate) struct NonDummyChunk<A, S>(RawChunk<A, S>);
642
643impl<A, S> Copy for NonDummyChunk<A, S> {}
644
645impl<A, S> Clone for NonDummyChunk<A, S> {
646    fn clone(&self) -> Self {
647        *self
648    }
649}
650
651impl<A, S> Deref for NonDummyChunk<A, S> {
652    type Target = RawChunk<A, S>;
653
654    fn deref(&self) -> &Self::Target {
655        &self.0
656    }
657}
658
659impl<A, S> RawChunk<A, S>
660where
661    S: BumpAllocatorSettings,
662{
663    pub(crate) const UNALLOCATED: Self = {
664        assert!(!S::GUARANTEED_ALLOCATED);
665
666        Self {
667            header: ChunkHeader::unallocated::<S>().cast(),
668            marker: PhantomData,
669        }
670    };
671
672    const CLAIMED: Self = {
673        assert!(S::CLAIMABLE);
674
675        Self {
676            header: ChunkHeader::claimed::<S>().cast(),
677            marker: PhantomData,
678        }
679    };
680
681    #[inline(always)]
682    pub(crate) fn header(self) -> NonNull<ChunkHeader<A>> {
683        self.header
684    }
685
686    #[inline(always)]
687    fn is_claimed(self) -> bool {
688        S::CLAIMABLE && self.header.cast() == ChunkHeader::claimed::<S>()
689    }
690
691    #[inline(always)]
692    pub(crate) fn is_unallocated(self) -> bool {
693        !S::GUARANTEED_ALLOCATED && self.header.cast() == ChunkHeader::unallocated::<S>()
694    }
695
696    #[inline(always)]
697    pub(crate) fn classify(self) -> ChunkClass<A, S> {
698        if self.is_claimed() {
699            return ChunkClass::Claimed;
700        }
701
702        if self.is_unallocated() {
703            return ChunkClass::Unallocated;
704        }
705
706        ChunkClass::NonDummy(NonDummyChunk(self))
707    }
708
709    #[inline(always)]
710    pub(crate) fn as_non_dummy(self) -> Option<NonDummyChunk<A, S>> {
711        match self.classify() {
712            ChunkClass::Claimed | ChunkClass::Unallocated => None,
713            ChunkClass::NonDummy(chunk) => Some(chunk),
714        }
715    }
716
717    /// Attempts to allocate a block of memory.
718    ///
719    /// On success, returns a [`NonNull<u8>`] meeting the size and alignment guarantees of `layout`.
720    #[inline(always)]
721    pub(crate) fn alloc(self, layout: impl LayoutProps) -> Option<NonNull<u8>> {
722        let props = self.bump_props(layout);
723
724        if S::UP {
725            let BumpUp { new_pos, ptr } = bump_up(props)?;
726
727            // SAFETY: allocations never succeed for a dummy chunk
728            unsafe {
729                let chunk = self.as_non_dummy_unchecked();
730                chunk.set_pos_addr(new_pos);
731                Some(chunk.content_ptr_from_addr(ptr))
732            }
733        } else {
734            let ptr = bump_down(props)?;
735
736            // SAFETY: allocations never succeed for a dummy chunk
737            unsafe {
738                let chunk = self.as_non_dummy_unchecked();
739                chunk.set_pos_addr(ptr);
740                Some(chunk.content_ptr_from_addr(ptr))
741            }
742        }
743    }
744
745    /// Prepares allocation for a block of memory.
746    ///
747    /// On success, returns a [`NonNull<u8>`] meeting the size and alignment guarantees of `layout`.
748    ///
749    /// This is like [`alloc`](Self::alloc), except that it won't change the bump pointer.
750    #[inline(always)]
751    pub(crate) fn prepare_allocation(self, layout: impl LayoutProps) -> Option<NonNull<u8>> {
752        let props = self.bump_props(layout);
753
754        let ptr = if S::UP { bump_up(props)?.ptr } else { bump_down(props)? };
755
756        // SAFETY: allocations never succeed for a dummy chunk
757        unsafe {
758            let chunk = self.as_non_dummy_unchecked();
759            Some(chunk.content_ptr_from_addr(ptr))
760        }
761    }
762
763    /// Returns the rest of the capacity of the chunk.
764    /// This does not change the position within the chunk.
765    ///
766    /// This is used in [`MutBumpVec`] where we mutably burrow bump access.
767    /// In this case we do not want to update the bump pointer. This way
768    /// neither reallocations (a new chunk) nor dropping needs to move the bump pointer.
769    /// The bump pointer is only updated when we call [`into_slice`].
770    ///
771    /// - `range.start` and `range.end` are aligned.
772    /// - `layout.size` must not be zero
773    /// - `layout.size` must be a multiple of `layout.align`
774    ///
775    /// [`MutBumpVec`]: crate::MutBumpVec
776    /// [`into_slice`]: crate::MutBumpVec::into_slice
777    #[inline(always)]
778    pub(crate) fn prepare_allocation_range(self, layout: impl LayoutProps) -> Option<Range<NonNull<u8>>> {
779        let props = self.bump_props(layout);
780
781        let range = if S::UP {
782            bump_prepare_up(props)
783        } else {
784            bump_prepare_down(props)
785        }?;
786
787        // SAFETY: allocations never succeed for a dummy chunk
788        unsafe {
789            let chunk = self.as_non_dummy_unchecked();
790            Some(chunk.content_ptr_from_addr_range(range))
791        }
792    }
793
794    #[inline(always)]
795    fn bump_props<L>(self, layout: L) -> BumpProps
796    where
797        L: LayoutProps,
798    {
799        let pos = self.pos().addr().get();
800        let end = unsafe { self.header.as_ref() }.end.addr().get();
801
802        let start = if S::UP { pos } else { end };
803        let end = if S::UP { end } else { pos };
804
805        #[cfg(debug_assertions)]
806        if !matches!(self.classify(), ChunkClass::NonDummy(_)) {
807            assert!(start > end);
808        }
809
810        BumpProps {
811            start,
812            end,
813            layout: *layout,
814            min_align: S::MIN_ALIGN,
815            align_is_const: L::ALIGN_IS_CONST,
816            size_is_const: L::SIZE_IS_CONST,
817            size_is_multiple_of_align: L::SIZE_IS_MULTIPLE_OF_ALIGN,
818        }
819    }
820
821    #[inline(always)]
822    pub(crate) fn pos(self) -> NonNull<u8> {
823        unsafe { self.header.as_ref().pos.get() }
824    }
825
826    #[inline(always)]
827    pub(crate) unsafe fn as_non_dummy_unchecked(self) -> NonDummyChunk<A, S> {
828        debug_assert!(matches!(self.classify(), ChunkClass::NonDummy(_)));
829        NonDummyChunk(self)
830    }
831
832    /// Cast the settings.
833    pub(crate) unsafe fn cast<S2>(self) -> RawChunk<A, S2> {
834        RawChunk {
835            header: self.header,
836            marker: PhantomData,
837        }
838    }
839}
840
841// Methods only available for a non-dummy chunk.
842impl<A, S> NonDummyChunk<A, S>
843where
844    S: BumpAllocatorSettings,
845{
846    pub(crate) fn new<E>(
847        chunk_size: ChunkSize<A, S::Up>,
848        prev: Option<NonDummyChunk<A, S>>,
849        allocator: A,
850    ) -> Result<NonDummyChunk<A, S>, E>
851    where
852        A: Allocator,
853        E: ErrorBehavior,
854    {
855        let min_size = const {
856            match ChunkSize::<A, S::Up>::from_hint(S::MINIMUM_CHUNK_SIZE) {
857                Some(some) => some,
858                None => panic!("failed to calculate minimum chunk size"),
859            }
860        };
861
862        let layout = chunk_size.max(min_size).layout().ok_or_else(E::capacity_overflow)?;
863
864        let allocation = match allocator.allocate(layout) {
865            Ok(ok) => ok,
866            Err(AllocError) => return Err(E::allocation(layout)),
867        };
868
869        let ptr = non_null::as_non_null_ptr(allocation);
870        let size = allocation.len();
871
872        // Note that the allocation's size may be larger than
873        // the requested layout's size.
874        //
875        // We could be ignoring the allocation's size and just use
876        // our layout's size, but then we would be wasting
877        // the extra space the allocator might have given us.
878        //
879        // This returned size does not satisfy our invariants though
880        // so we need to align it first.
881        //
882        // Follow this method for details.
883        let size = chunk_size.align_allocation_size(size);
884
885        debug_assert!(size >= layout.size());
886        debug_assert!(size % MIN_CHUNK_ALIGN == 0);
887
888        let prev = Cell::new(prev.map(|c| c.header));
889        let next = Cell::new(None);
890
891        let header = unsafe {
892            if S::UP {
893                let header = ptr.cast::<ChunkHeader<A>>();
894
895                header.write(ChunkHeader {
896                    pos: Cell::new(header.add(1).cast()),
897                    end: ptr.add(size),
898                    prev,
899                    next,
900                    allocator,
901                });
902
903                header
904            } else {
905                let header = ptr.add(size).cast::<ChunkHeader<A>>().sub(1);
906
907                header.write(ChunkHeader {
908                    pos: Cell::new(header.cast()),
909                    end: ptr,
910                    prev,
911                    next,
912                    allocator,
913                });
914
915                header
916            }
917        };
918
919        Ok(NonDummyChunk(RawChunk {
920            header,
921            marker: PhantomData,
922        }))
923    }
924
925    /// # Panic
926    ///
927    /// [`self.next`](RawChunk::next) must return `None`
928    pub(crate) fn append_for<B: ErrorBehavior>(self, layout: Layout) -> Result<Self, B>
929    where
930        A: Allocator + Clone,
931    {
932        debug_assert!(self.next().is_none());
933
934        let required_size = ChunkSizeHint::for_capacity(layout).ok_or_else(B::capacity_overflow)?;
935        let grown_size = self.grow_size()?;
936        let size = required_size.max(grown_size).calc_size().ok_or_else(B::capacity_overflow)?;
937        let allocator = unsafe { self.header.as_ref().allocator.clone() };
938        let new_chunk = Self::new::<B>(size, Some(self), allocator)?;
939
940        unsafe {
941            self.header.as_ref().next.set(Some(new_chunk.header));
942        }
943
944        Ok(new_chunk)
945    }
946
947    #[inline(always)]
948    fn grow_size<B: ErrorBehavior>(self) -> Result<ChunkSizeHint<A, S::Up>, B> {
949        let Some(size) = self.size().get().checked_mul(2) else {
950            return Err(B::capacity_overflow());
951        };
952
953        Ok(ChunkSizeHint::new(size))
954    }
955
956    #[inline(always)]
957    pub(crate) fn allocator<'a>(self) -> &'a A {
958        unsafe { &self.header.as_ref().allocator }
959    }
960
961    #[inline(always)]
962    pub(crate) fn prev(self) -> Option<NonDummyChunk<A, S>> {
963        unsafe {
964            Some(NonDummyChunk(RawChunk {
965                header: self.header.as_ref().prev.get()?,
966                marker: PhantomData,
967            }))
968        }
969    }
970
971    #[inline(always)]
972    pub(crate) fn next(self) -> Option<NonDummyChunk<A, S>> {
973        unsafe {
974            Some(NonDummyChunk(RawChunk {
975                header: self.header.as_ref().next.get()?,
976                marker: PhantomData,
977            }))
978        }
979    }
980
981    #[inline(always)]
982    pub(crate) fn size(self) -> NonZeroUsize {
983        let start = self.chunk_start().addr().get();
984        let end = self.chunk_end().addr().get();
985        unsafe { NonZeroUsize::new_unchecked(end - start) }
986    }
987
988    #[inline(always)]
989    pub(crate) fn capacity(self) -> usize {
990        let start = self.content_start().addr().get();
991        let end = self.content_end().addr().get();
992        end - start
993    }
994
995    #[inline(always)]
996    pub(crate) fn allocated(self) -> usize {
997        let range = self.allocated_range();
998        let start = range.start.addr().get();
999        let end = range.end.addr().get();
1000        end - start
1001    }
1002
1003    #[inline(always)]
1004    pub(crate) fn remaining(self) -> usize {
1005        let range = self.remaining_range();
1006        let start = range.start.addr().get();
1007        let end = range.end.addr().get();
1008        end - start
1009    }
1010
1011    #[inline(always)]
1012    fn reset(self) {
1013        unsafe {
1014            if S::UP {
1015                self.set_pos(self.content_start());
1016            } else {
1017                self.set_pos(self.content_end());
1018            }
1019        }
1020    }
1021
1022    #[inline(always)]
1023    pub(crate) fn chunk_start(self) -> NonNull<u8> {
1024        unsafe { if S::UP { self.header.cast() } else { self.header.as_ref().end } }
1025    }
1026
1027    #[inline(always)]
1028    pub(crate) fn chunk_end(self) -> NonNull<u8> {
1029        unsafe {
1030            if S::UP {
1031                self.header.as_ref().end
1032            } else {
1033                self.after_header()
1034            }
1035        }
1036    }
1037
1038    #[inline(always)]
1039    pub(crate) fn content_start(self) -> NonNull<u8> {
1040        if S::UP { self.after_header() } else { self.chunk_start() }
1041    }
1042
1043    #[inline(always)]
1044    pub(crate) fn content_end(self) -> NonNull<u8> {
1045        if S::UP { self.chunk_end() } else { self.header.cast() }
1046    }
1047
1048    /// # Safety
1049    /// [`contains_addr_or_end`](RawChunk::contains_addr_or_end) must return true
1050    #[inline(always)]
1051    pub(crate) unsafe fn set_pos(self, ptr: NonNull<u8>) {
1052        unsafe { self.set_pos_addr(ptr.addr().get()) };
1053    }
1054
1055    /// # Safety
1056    /// [`contains_addr_or_end`](RawChunk::contains_addr_or_end) must return true
1057    #[inline(always)]
1058    pub(crate) unsafe fn set_pos_addr(self, addr: usize) {
1059        unsafe { self.header.as_ref().pos.set(self.content_ptr_from_addr(addr)) };
1060    }
1061
1062    /// Sets the bump position and aligns it to the required `MIN_ALIGN`.
1063    #[inline(always)]
1064    pub(crate) unsafe fn set_pos_addr_and_align(self, pos: usize) {
1065        unsafe {
1066            let addr = align_pos(S::UP, S::MIN_ALIGN, pos);
1067            self.set_pos_addr(addr);
1068        }
1069    }
1070
1071    /// A version of [`set_pos_addr_and_align`](Self::set_pos_addr_and_align) that only aligns the pointer
1072    /// if it the `pos_align` is smaller than the `MIN_ALIGN`.
1073    ///
1074    /// This should only be called when the `pos_align` is statically known so
1075    /// the branch gets optimized out.
1076    #[inline(always)]
1077    pub(crate) unsafe fn set_pos_addr_and_align_from(self, mut pos: usize, pos_align: usize) {
1078        debug_assert_eq!(pos % pos_align, 0);
1079
1080        if pos_align < S::MIN_ALIGN {
1081            pos = align_pos(S::UP, S::MIN_ALIGN, pos);
1082        }
1083
1084        unsafe { self.set_pos_addr(pos) };
1085    }
1086
1087    /// # Safety
1088    /// [`contains_addr_or_end`](RawChunk::contains_addr_or_end) must return true
1089    #[inline(always)]
1090    unsafe fn content_ptr_from_addr(self, addr: usize) -> NonNull<u8> {
1091        unsafe {
1092            debug_assert!(self.contains_addr_or_end(addr));
1093            let ptr = self.header.cast();
1094            let addr = NonZeroUsize::new_unchecked(addr);
1095            ptr.with_addr(addr)
1096        }
1097    }
1098
1099    #[inline(always)]
1100    pub(crate) unsafe fn content_ptr_from_addr_range(self, range: Range<usize>) -> Range<NonNull<u8>> {
1101        unsafe {
1102            debug_assert!(range.start <= range.end);
1103            let start = self.content_ptr_from_addr(range.start);
1104            let end = self.content_ptr_from_addr(range.end);
1105            start..end
1106        }
1107    }
1108
1109    #[inline(always)]
1110    fn contains_addr_or_end(self, addr: usize) -> bool {
1111        let start = self.content_start().addr().get();
1112        let end = self.content_end().addr().get();
1113        addr >= start && addr <= end
1114    }
1115
1116    #[inline(always)]
1117    fn allocated_range(self) -> Range<NonNull<u8>> {
1118        if S::UP {
1119            self.content_start()..self.pos()
1120        } else {
1121            self.pos()..self.content_end()
1122        }
1123    }
1124
1125    #[inline(always)]
1126    fn remaining_range(self) -> Range<NonNull<u8>> {
1127        if S::UP {
1128            let start = self.pos();
1129            let end = self.content_end();
1130            start..end
1131        } else {
1132            let start = self.content_start();
1133            let end = self.pos();
1134            start..end
1135        }
1136    }
1137
1138    #[inline(always)]
1139    fn after_header(self) -> NonNull<u8> {
1140        unsafe { self.header.add(1).cast() }
1141    }
1142
1143    /// This resolves the next chunk before calling `f`. So calling [`deallocate`](NonDummyChunk::deallocate) on the chunk parameter of `f` is fine.
1144    fn for_each_prev(self, mut f: impl FnMut(NonDummyChunk<A, S>)) {
1145        let mut iter = self.prev();
1146
1147        while let Some(chunk) = iter {
1148            iter = chunk.prev();
1149            f(chunk);
1150        }
1151    }
1152
1153    /// This resolves the next chunk before calling `f`. So calling [`deallocate`](NonDummyChunk::deallocate) on the chunk parameter of `f` is fine.
1154    fn for_each_next(self, mut f: impl FnMut(NonDummyChunk<A, S>)) {
1155        let mut iter = self.next();
1156
1157        while let Some(chunk) = iter {
1158            iter = chunk.next();
1159            f(chunk);
1160        }
1161    }
1162
1163    /// # Safety
1164    /// - self must not be used after calling this.
1165    unsafe fn deallocate(self)
1166    where
1167        A: Allocator,
1168    {
1169        let allocator = unsafe { ptr::read(&raw const self.header.as_ref().allocator) };
1170
1171        let ptr = self.chunk_start();
1172        let layout = self.layout();
1173
1174        unsafe {
1175            allocator.deallocate(ptr, layout);
1176        }
1177    }
1178
1179    #[inline(always)]
1180    pub(crate) fn layout(self) -> Layout {
1181        // SAFETY: this layout fits the one we allocated, which means it must be valid
1182        unsafe { Layout::from_size_align_unchecked(self.size().get(), align_of::<ChunkHeader<A>>()) }
1183    }
1184}
1185
1186pub(crate) enum ChunkClass<A, S: BumpAllocatorSettings> {
1187    Claimed,
1188    Unallocated,
1189    NonDummy(NonDummyChunk<A, S>),
1190}