Skip to main content

bump_scope/
raw_bump.rs

1use core::{
2    alloc::Layout,
3    cell::Cell,
4    marker::PhantomData,
5    num::NonZeroUsize,
6    ops::{Deref, Range},
7    ptr::{self, NonNull},
8};
9
10use crate::{
11    BaseAllocator, Checkpoint, SizedTypeProperties, align_pos,
12    alloc::{AllocError, Allocator},
13    bumping::{BumpProps, BumpUp, MIN_CHUNK_ALIGN, bump_down, bump_prepare_down, bump_prepare_up, bump_up},
14    chunk::{ChunkHeader, ChunkSize, ChunkSizeHint},
15    error_behavior::{self, ErrorBehavior},
16    layout::{ArrayLayout, CustomLayout, LayoutProps, SizedLayout},
17    polyfill::non_null,
18    settings::{BumpAllocatorSettings, False, MinimumAlignment, SupportedMinimumAlignment},
19    stats::Stats,
20};
21
22/// The internal type used by `Bump` and `Bump(Scope)`.
23///
24/// All the api that can fail due to allocation failure take a `E: ErrorBehavior`
25/// instead of having a `try_` and non-`try_` version.
26///
27/// It does not concern itself with deallocating chunks or the base allocator.
28/// A clone of this type is just a bitwise copy, `manually_drop` must only be called
29/// once for this bump allocator.
30pub(crate) struct RawBump<A, S> {
31    /// Either a chunk allocated from the `allocator`, or either a `CLAIMED`
32    /// or `UNALLOCATED` dummy chunk.
33    pub(crate) chunk: Cell<RawChunk<A, S>>,
34}
35
36impl<A, S> Clone for RawBump<A, S> {
37    fn clone(&self) -> Self {
38        Self {
39            chunk: self.chunk.clone(),
40        }
41    }
42}
43
44impl<A, S> RawBump<A, S>
45where
46    S: BumpAllocatorSettings,
47{
48    #[inline(always)]
49    pub(crate) const fn new() -> Self
50    where
51        S: BumpAllocatorSettings<GuaranteedAllocated = False>,
52    {
53        Self {
54            chunk: Cell::new(RawChunk::UNALLOCATED),
55        }
56    }
57
58    #[inline(always)]
59    pub(crate) fn is_claimed(&self) -> bool {
60        self.chunk.get().is_claimed()
61    }
62
63    #[inline(always)]
64    pub(crate) fn allocator<'a>(&self) -> Option<&'a A> {
65        match self.chunk.get().classify() {
66            ChunkClass::Claimed | ChunkClass::Unallocated => None,
67            ChunkClass::NonDummy(chunk) => Some(chunk.allocator()),
68        }
69    }
70}
71
72impl<A, S> RawBump<A, S>
73where
74    A: Allocator,
75    S: BumpAllocatorSettings,
76{
77    #[inline(always)]
78    pub(crate) fn with_size<E: ErrorBehavior>(size: ChunkSize<A, S>, allocator: A) -> Result<Self, E> {
79        Ok(Self {
80            chunk: Cell::new(NonDummyChunk::new::<E>(size, None, allocator)?.raw),
81        })
82    }
83
84    #[inline(always)]
85    pub(crate) fn reset(&self) {
86        let Some(mut chunk) = self.chunk.get().as_non_dummy() else {
87            return;
88        };
89
90        unsafe {
91            chunk.for_each_prev(|chunk| chunk.deallocate());
92
93            while let Some(next) = chunk.next() {
94                chunk.deallocate();
95                chunk = next;
96            }
97
98            chunk.header.as_ref().prev.set(None);
99        }
100
101        chunk.reset();
102
103        self.chunk.set(chunk.raw);
104    }
105
106    pub(crate) unsafe fn manually_drop(&mut self) {
107        match self.chunk.get().classify() {
108            ChunkClass::Claimed => {
109                // The user must have somehow leaked a `BumpClaimGuard`.
110            }
111            ChunkClass::Unallocated => (),
112            ChunkClass::NonDummy(chunk) => unsafe {
113                chunk.for_each_prev(|chunk| chunk.deallocate());
114                chunk.for_each_next(|chunk| chunk.deallocate());
115                chunk.deallocate();
116            },
117        }
118    }
119}
120
121impl<A, S> RawBump<A, S>
122where
123    A: BaseAllocator<S::GuaranteedAllocated>,
124    S: BumpAllocatorSettings,
125{
126    #[inline(always)]
127    pub(crate) fn claim(&self) -> RawBump<A, S> {
128        const {
129            assert!(S::CLAIMABLE, "`claim` is only available with the setting `CLAIMABLE = true`");
130        }
131
132        #[cold]
133        #[inline(never)]
134        fn already_claimed() {
135            panic!("bump allocator is already claimed");
136        }
137
138        if self.chunk.get().is_claimed() {
139            already_claimed();
140        }
141
142        RawBump {
143            chunk: Cell::new(self.chunk.replace(RawChunk::<A, S>::CLAIMED)),
144        }
145    }
146
147    #[inline(always)]
148    pub(crate) fn reclaim(&self, claimant: &RawBump<A, S>) {
149        self.chunk.set(claimant.chunk.get());
150    }
151
152    #[inline(always)]
153    pub(crate) fn checkpoint(&self) -> Checkpoint {
154        Checkpoint::new(self.chunk.get())
155    }
156
157    #[inline]
158    pub(crate) unsafe fn reset_to(&self, checkpoint: Checkpoint) {
159        // If the checkpoint was created when the bump allocator had no allocated chunk
160        // then the chunk pointer will point to the unallocated chunk header.
161        //
162        // In such cases we reset the bump pointer to the very start of the very first chunk.
163        //
164        // We don't check if the chunk pointer points to the unallocated chunk header
165        // if the bump allocator is `GUARANTEED_ALLOCATED`. We are allowed to not do this check
166        // because of this safety condition of `reset_to`:
167        // > the checkpoint must not have been created by an `!GUARANTEED_ALLOCATED` when self is `GUARANTEED_ALLOCATED`
168        if !S::GUARANTEED_ALLOCATED && checkpoint.chunk == ChunkHeader::unallocated::<S>() {
169            self.reset_to_start();
170            return;
171        }
172
173        #[cfg(debug_assertions)]
174        {
175            assert_ne!(
176                checkpoint.chunk,
177                ChunkHeader::claimed::<S>(),
178                "the checkpoint must not have been created by a claimed bump allocator"
179            );
180
181            assert_ne!(
182                self.chunk.get().header.cast(),
183                ChunkHeader::claimed::<S>(),
184                "this function must not be called on a claimed bump allocator"
185            );
186
187            assert_ne!(
188                checkpoint.chunk,
189                ChunkHeader::unallocated::<S>(),
190                "the checkpoint must not have been created by an`!GUARANTEED_ALLOCATED` when self is `GUARANTEED_ALLOCATED`"
191            );
192
193            let chunk = self
194                .stats()
195                .small_to_big()
196                .find(|chunk| chunk.header() == checkpoint.chunk.cast())
197                .expect("this checkpoint does not refer to any chunk in this bump allocator");
198
199            assert!(
200                chunk.chunk.contains_addr_or_end(checkpoint.address.get()),
201                "checkpoint address does not point within its chunk"
202            );
203        }
204
205        unsafe {
206            checkpoint.reset_within_chunk();
207
208            self.chunk.set(RawChunk {
209                header: checkpoint.chunk.cast(),
210                marker: PhantomData,
211            });
212        }
213    }
214
215    /// Reset's the bump pointer to the very start.
216    #[inline]
217    pub(crate) fn reset_to_start(&self) {
218        if let Some(mut chunk) = self.chunk.get().as_non_dummy() {
219            while let Some(prev) = chunk.prev() {
220                chunk = prev;
221            }
222
223            chunk.reset();
224
225            self.chunk.set(chunk.raw);
226        }
227    }
228
229    #[inline(always)]
230    pub(crate) fn reserve<E: ErrorBehavior>(&self, additional: usize) -> Result<(), E>
231    where
232        A: BaseAllocator<S::GuaranteedAllocated>,
233    {
234        let chunk = self.chunk.get();
235
236        match chunk.classify() {
237            ChunkClass::Claimed => Err(E::claimed()),
238            ChunkClass::Unallocated => {
239                let Ok(layout) = Layout::from_size_align(additional, 1) else {
240                    return Err(E::capacity_overflow());
241                };
242
243                let new_chunk = NonDummyChunk::<A, S>::new(
244                    ChunkSize::<A, S>::from_capacity(layout).ok_or_else(E::capacity_overflow)?,
245                    None,
246                    // When this bump allocator is unallocated, `A` is guaranteed to implement `Default`,
247                    // `default_or_panic` will not panic.
248                    A::default_or_panic(),
249                )?;
250
251                self.chunk.set(new_chunk.raw);
252                Ok(())
253            }
254            ChunkClass::NonDummy(mut chunk) => {
255                let mut additional = additional;
256
257                if let Some(rest) = additional.checked_sub(chunk.remaining()) {
258                    additional = rest;
259                } else {
260                    return Ok(());
261                }
262
263                while let Some(next) = chunk.next() {
264                    chunk = next;
265
266                    if let Some(rest) = additional.checked_sub(chunk.capacity()) {
267                        additional = rest;
268                    } else {
269                        return Ok(());
270                    }
271                }
272
273                if additional == 0 {
274                    return Ok(());
275                }
276
277                let Ok(layout) = Layout::from_size_align(additional, 1) else {
278                    return Err(E::capacity_overflow());
279                };
280
281                chunk.append_for(layout).map(drop)
282            }
283        }
284    }
285
286    #[inline(always)]
287    pub(crate) fn alloc<B: ErrorBehavior>(&self, layout: Layout) -> Result<NonNull<u8>, B> {
288        match self.chunk.get().alloc(CustomLayout(layout)) {
289            Some(ptr) => Ok(ptr),
290            None => self.alloc_in_another_chunk(layout),
291        }
292    }
293
294    #[inline(always)]
295    pub(crate) fn alloc_sized<E: ErrorBehavior, T>(&self) -> Result<NonNull<T>, E> {
296        match self.chunk.get().alloc(SizedLayout::new::<T>()) {
297            Some(ptr) => Ok(ptr.cast()),
298            None => match self.alloc_sized_in_another_chunk::<E, T>() {
299                Ok(ptr) => Ok(ptr.cast()),
300                Err(err) => Err(err),
301            },
302        }
303    }
304
305    #[inline(always)]
306    pub(crate) fn alloc_slice<E: ErrorBehavior, T>(&self, len: usize) -> Result<NonNull<T>, E> {
307        let Ok(layout) = ArrayLayout::array::<T>(len) else {
308            return Err(E::capacity_overflow());
309        };
310
311        match self.chunk.get().alloc(layout) {
312            Some(ptr) => Ok(ptr.cast()),
313            None => match self.alloc_slice_in_another_chunk::<E, T>(len) {
314                Ok(ptr) => Ok(ptr.cast()),
315                Err(err) => Err(err),
316            },
317        }
318    }
319
320    #[inline(always)]
321    pub(crate) fn alloc_slice_for<E: ErrorBehavior, T>(&self, value: &[T]) -> Result<NonNull<T>, E> {
322        let layout = ArrayLayout::for_value(value);
323
324        match self.chunk.get().alloc(layout) {
325            Some(ptr) => Ok(ptr.cast()),
326            None => match self.alloc_slice_in_another_chunk::<E, T>(value.len()) {
327                Ok(ptr) => Ok(ptr.cast()),
328                Err(err) => Err(err),
329            },
330        }
331    }
332
333    #[inline(always)]
334    pub(crate) fn prepare_sized_allocation<B: ErrorBehavior, T>(&self) -> Result<NonNull<T>, B> {
335        match self.chunk.get().prepare_allocation(SizedLayout::new::<T>()) {
336            Some(ptr) => Ok(ptr.cast()),
337            None => match self.prepare_allocation_in_another_chunk::<B, T>() {
338                Ok(ptr) => Ok(ptr.cast()),
339                Err(err) => Err(err),
340            },
341        }
342    }
343
344    #[inline(always)]
345    pub(crate) fn prepare_slice_allocation<B: ErrorBehavior, T>(&self, min_cap: usize) -> Result<NonNull<[T]>, B> {
346        let range = self.prepare_allocation_range::<B, T>(min_cap)?;
347
348        // NB: We can't use `offset_from_unsigned`, because the size is not a multiple of `T`'s.
349        let cap = unsafe { non_null::byte_offset_from_unsigned(range.end, range.start) } / T::SIZE;
350
351        let ptr = if S::UP { range.start } else { unsafe { range.end.sub(cap) } };
352
353        Ok(NonNull::slice_from_raw_parts(ptr, cap))
354    }
355
356    #[inline(always)]
357    pub(crate) fn prepare_slice_allocation_rev<B: ErrorBehavior, T>(
358        &self,
359        min_cap: usize,
360    ) -> Result<(NonNull<T>, usize), B> {
361        let range = self.prepare_allocation_range::<B, T>(min_cap)?;
362
363        // NB: We can't use `offset_from_unsigned`, because the size is not a multiple of `T`'s.
364        let cap = unsafe { non_null::byte_offset_from_unsigned(range.end, range.start) } / T::SIZE;
365
366        let end = if S::UP { unsafe { range.start.add(cap) } } else { range.end };
367
368        Ok((end, cap))
369    }
370
371    /// Returns a pointer range.
372    /// The start and end pointers are aligned.
373    /// But `end - start` is *not* a multiple of `size_of::<T>()`.
374    /// So `end.offset_from_unsigned(start)` may not be used!
375    #[inline(always)]
376    fn prepare_allocation_range<B: ErrorBehavior, T>(&self, cap: usize) -> Result<Range<NonNull<T>>, B> {
377        let Ok(layout) = ArrayLayout::array::<T>(cap) else {
378            return Err(B::capacity_overflow());
379        };
380
381        let range = match self.chunk.get().prepare_allocation_range(layout) {
382            Some(ptr) => ptr,
383            None => self.prepare_allocation_range_in_another_chunk(layout)?,
384        };
385
386        Ok(range.start.cast::<T>()..range.end.cast::<T>())
387    }
388
389    /// Allocation slow path.
390    /// The active chunk must *not* have space for `layout`.
391    #[cold]
392    #[inline(never)]
393    pub(crate) fn alloc_in_another_chunk<E: ErrorBehavior>(&self, layout: Layout) -> Result<NonNull<u8>, E> {
394        unsafe { self.in_another_chunk(CustomLayout(layout), RawChunk::alloc) }
395    }
396
397    #[cold]
398    #[inline(never)]
399    fn alloc_sized_in_another_chunk<E: ErrorBehavior, T>(&self) -> Result<NonNull<u8>, E> {
400        self.alloc_in_another_chunk(Layout::new::<T>())
401    }
402
403    #[cold]
404    #[inline(never)]
405    fn alloc_slice_in_another_chunk<E: ErrorBehavior, T>(&self, len: usize) -> Result<NonNull<u8>, E> {
406        let Ok(layout) = Layout::array::<T>(len) else {
407            return Err(E::capacity_overflow());
408        };
409
410        self.alloc_in_another_chunk(layout)
411    }
412
413    #[cold]
414    #[inline(never)]
415    pub(crate) fn prepare_allocation_in_another_chunk<E: ErrorBehavior, T>(&self) -> Result<NonNull<u8>, E> {
416        let layout = CustomLayout(Layout::new::<T>());
417
418        unsafe { self.in_another_chunk(layout, RawChunk::prepare_allocation) }
419    }
420
421    #[cold]
422    #[inline(never)]
423    fn prepare_allocation_range_in_another_chunk<E: ErrorBehavior>(
424        &self,
425        layout: ArrayLayout,
426    ) -> Result<Range<NonNull<u8>>, E> {
427        unsafe { self.in_another_chunk(layout, RawChunk::prepare_allocation_range) }
428    }
429
430    /// # Safety
431    ///
432    /// `f` on the new chunk created by `RawChunk::append_for` with the layout `layout` must return `Some`.
433    #[inline(always)]
434    pub(crate) unsafe fn in_another_chunk<E: ErrorBehavior, R, L: LayoutProps>(
435        &self,
436        layout: L,
437        mut f: impl FnMut(RawChunk<A, S>, L) -> Option<R>,
438    ) -> Result<R, E> {
439        let new_chunk: NonDummyChunk<A, S> = match self.chunk.get().classify() {
440            ChunkClass::Claimed => Err(E::claimed()),
441            ChunkClass::Unallocated => NonDummyChunk::new(
442                ChunkSize::from_capacity(*layout).ok_or_else(E::capacity_overflow)?,
443                None,
444                // When this bump allocator is unallocated, `A` is guaranteed to implement `Default`,
445                // `default_or_panic` will not panic.
446                A::default_or_panic(),
447            ),
448            ChunkClass::NonDummy(mut chunk) => {
449                while let Some(next_chunk) = chunk.next() {
450                    chunk = next_chunk;
451
452                    // We don't reset the chunk position when we leave a scope, so we need to do it here.
453                    chunk.reset();
454
455                    self.chunk.set(chunk.raw);
456
457                    if let Some(ptr) = f(chunk.raw, layout) {
458                        return Ok(ptr);
459                    }
460                }
461
462                // there is no chunk that fits, we need a new chunk
463                chunk.append_for(*layout)
464            }
465        }?;
466
467        self.chunk.set(new_chunk.raw);
468
469        match f(new_chunk.raw, layout) {
470            Some(ptr) => Ok(ptr),
471            _ => {
472                // SAFETY: We just appended a chunk for that specific layout, it must have enough space.
473                // We don't panic here so we don't produce any panic code when using `try_` apis.
474                // We check for that in `test-no-panic`.
475                unsafe { core::hint::unreachable_unchecked() }
476            }
477        }
478    }
479
480    pub(crate) fn make_allocated<E: ErrorBehavior>(&self) -> Result<(), E> {
481        match self.chunk.get().classify() {
482            ChunkClass::Claimed => Err(E::claimed()),
483            ChunkClass::Unallocated => {
484                // When this bump allocator is unallocated, `A` is guaranteed to implement `Default`,
485                // `default_or_panic` will not panic.
486                let new_chunk = NonDummyChunk::new(ChunkSize::MINIMUM, None, A::default_or_panic())?;
487                self.chunk.set(new_chunk.raw);
488                Ok(())
489            }
490            ChunkClass::NonDummy(_) => Ok(()),
491        }
492    }
493}
494
495impl<A, S> RawBump<A, S>
496where
497    S: BumpAllocatorSettings,
498{
499    /// Returns a type which provides statistics about the memory usage of the bump allocator.
500    #[must_use]
501    #[inline(always)]
502    pub fn stats<'a>(&self) -> Stats<'a, A, S> {
503        Stats::from_raw_chunk(self.chunk.get())
504    }
505
506    #[inline(always)]
507    pub(crate) fn align<const ALIGN: usize>(&self)
508    where
509        MinimumAlignment<ALIGN>: SupportedMinimumAlignment,
510    {
511        self.align_to::<MinimumAlignment<ALIGN>>();
512    }
513
514    #[inline(always)]
515    pub(crate) fn align_to<MinimumAlignment>(&self)
516    where
517        MinimumAlignment: SupportedMinimumAlignment,
518    {
519        if MinimumAlignment::VALUE > S::MIN_ALIGN {
520            // a dummy chunk is always aligned
521            if let Some(chunk) = self.chunk.get().as_non_dummy() {
522                let pos = chunk.pos().addr().get();
523                let addr = align_pos(S::UP, MinimumAlignment::VALUE, pos);
524                unsafe { chunk.set_pos_addr(addr) };
525            }
526        }
527    }
528
529    pub(crate) fn ensure_satisfies_settings<NewS>(&self)
530    where
531        NewS: BumpAllocatorSettings,
532    {
533        const {
534            assert!(NewS::UP == S::UP, "can't change `UP` setting of `Bump(Scope)`");
535        }
536
537        if !NewS::CLAIMABLE && self.chunk.get().is_claimed() {
538            error_behavior::panic::claimed();
539        }
540
541        if NewS::GUARANTEED_ALLOCATED && self.chunk.get().is_unallocated() {
542            error_behavior::panic::unallocated();
543        }
544
545        self.align_to::<NewS::MinimumAlignment>();
546    }
547
548    pub(crate) fn ensure_scope_satisfies_settings<NewS>(&self)
549    where
550        NewS: BumpAllocatorSettings,
551    {
552        const {
553            assert!(NewS::UP == S::UP, "can't change `UP` setting of `Bump(Scope)`");
554
555            assert!(
556                NewS::MIN_ALIGN >= S::MIN_ALIGN,
557                "can't decrease minimum alignment using `BumpScope::with_settings`"
558            );
559        }
560
561        if !NewS::CLAIMABLE && self.chunk.get().is_claimed() {
562            error_behavior::panic::claimed();
563        }
564
565        // A scope by value is always allocated, created by `(try_)by_value`.
566
567        self.align_to::<NewS::MinimumAlignment>();
568    }
569
570    #[expect(clippy::unused_self)]
571    pub(crate) fn ensure_satisfies_settings_for_borrow<NewS>(&self)
572    where
573        NewS: BumpAllocatorSettings,
574    {
575        const {
576            assert!(NewS::UP == S::UP, "can't change `UP` setting of `Bump(Scope)`");
577
578            assert!(
579                NewS::MIN_ALIGN == S::MIN_ALIGN,
580                "can't change minimum alignment using `Bump(Scope)::borrow_with_settings`"
581            );
582
583            assert!(
584                NewS::CLAIMABLE == S::CLAIMABLE,
585                "can't change claimable property using `Bump(Scope)::borrow_with_settings`"
586            );
587
588            // A reference to a guaranteed-allocated `Bump(Scope)` can never become unallocated.
589            assert!(
590                NewS::GUARANTEED_ALLOCATED <= S::GUARANTEED_ALLOCATED,
591                "can't increase guaranteed-allocated property using `Bump(Scope)::borrow_with_settings`"
592            );
593        }
594    }
595
596    pub(crate) fn ensure_satisfies_settings_for_borrow_mut<NewS>(&self)
597    where
598        NewS: BumpAllocatorSettings,
599    {
600        const {
601            assert!(NewS::UP == S::UP, "can't change `UP` setting of `Bump(Scope)`");
602
603            assert!(
604                NewS::MIN_ALIGN >= S::MIN_ALIGN,
605                "can't decrease minimum alignment using `Bump(Scope)::borrow_mut_with_settings`"
606            );
607
608            assert!(
609                NewS::CLAIMABLE == S::CLAIMABLE,
610                "can't change claimable property using `Bump(Scope)::borrow_mut_with_settings`"
611            );
612
613            assert!(
614                NewS::GUARANTEED_ALLOCATED == S::GUARANTEED_ALLOCATED,
615                "can't change guaranteed-allocated property using `Bump(Scope)::borrow_mut_with_settings`"
616            );
617        }
618
619        self.align_to::<NewS::MinimumAlignment>();
620    }
621
622    #[inline]
623    pub(crate) fn into_raw(self) -> NonNull<()> {
624        self.chunk.get().header.cast()
625    }
626
627    #[inline]
628    pub(crate) unsafe fn from_raw(ptr: NonNull<()>) -> Self {
629        Self {
630            chunk: Cell::new(RawChunk {
631                header: ptr.cast(),
632                marker: PhantomData,
633            }),
634        }
635    }
636}
637
638pub(crate) struct RawChunk<A, S> {
639    pub(crate) header: NonNull<ChunkHeader<A>>,
640    pub(crate) marker: PhantomData<fn() -> (A, S)>,
641}
642
643impl<A, S> Clone for RawChunk<A, S> {
644    fn clone(&self) -> Self {
645        *self
646    }
647}
648
649impl<A, S> Copy for RawChunk<A, S> {}
650
651pub(crate) struct NonDummyChunk<A, S> {
652    raw: RawChunk<A, S>,
653}
654
655impl<A, S> Copy for NonDummyChunk<A, S> {}
656
657impl<A, S> Clone for NonDummyChunk<A, S> {
658    fn clone(&self) -> Self {
659        *self
660    }
661}
662
663impl<A, S> Deref for NonDummyChunk<A, S> {
664    type Target = RawChunk<A, S>;
665
666    fn deref(&self) -> &Self::Target {
667        &self.raw
668    }
669}
670
671impl<A, S> RawChunk<A, S>
672where
673    S: BumpAllocatorSettings,
674{
675    pub(crate) const UNALLOCATED: Self = {
676        assert!(!S::GUARANTEED_ALLOCATED);
677
678        Self {
679            header: ChunkHeader::unallocated::<S>().cast(),
680            marker: PhantomData,
681        }
682    };
683
684    const CLAIMED: Self = {
685        assert!(S::CLAIMABLE);
686
687        Self {
688            header: ChunkHeader::claimed::<S>().cast(),
689            marker: PhantomData,
690        }
691    };
692
693    #[inline(always)]
694    pub(crate) fn header(self) -> NonNull<ChunkHeader<A>> {
695        self.header
696    }
697
698    #[inline(always)]
699    fn is_claimed(self) -> bool {
700        S::CLAIMABLE && self.header.cast() == ChunkHeader::claimed::<S>()
701    }
702
703    #[inline(always)]
704    pub(crate) fn is_unallocated(self) -> bool {
705        !S::GUARANTEED_ALLOCATED && self.header.cast() == ChunkHeader::unallocated::<S>()
706    }
707
708    #[inline(always)]
709    pub(crate) fn classify(self) -> ChunkClass<A, S> {
710        if self.is_claimed() {
711            return ChunkClass::Claimed;
712        }
713
714        if self.is_unallocated() {
715            return ChunkClass::Unallocated;
716        }
717
718        ChunkClass::NonDummy(NonDummyChunk { raw: self })
719    }
720
721    #[inline(always)]
722    pub(crate) fn as_non_dummy(self) -> Option<NonDummyChunk<A, S>> {
723        match self.classify() {
724            ChunkClass::Claimed | ChunkClass::Unallocated => None,
725            ChunkClass::NonDummy(chunk) => Some(chunk),
726        }
727    }
728
729    /// Attempts to allocate a block of memory.
730    ///
731    /// On success, returns a [`NonNull<u8>`] meeting the size and alignment guarantees of `layout`.
732    #[inline(always)]
733    pub(crate) fn alloc(self, layout: impl LayoutProps) -> Option<NonNull<u8>> {
734        let props = self.bump_props(layout);
735
736        if S::UP {
737            let BumpUp { new_pos, ptr } = bump_up(props)?;
738
739            // SAFETY: allocations never succeed for a dummy chunk
740            unsafe {
741                let chunk = self.as_non_dummy_unchecked();
742                chunk.set_pos_addr(new_pos);
743                Some(chunk.content_ptr_from_addr(ptr))
744            }
745        } else {
746            let ptr = bump_down(props)?;
747
748            // SAFETY: allocations never succeed for a dummy chunk
749            unsafe {
750                let chunk = self.as_non_dummy_unchecked();
751                chunk.set_pos_addr(ptr);
752                Some(chunk.content_ptr_from_addr(ptr))
753            }
754        }
755    }
756
757    /// Prepares allocation for a block of memory.
758    ///
759    /// On success, returns a [`NonNull<u8>`] meeting the size and alignment guarantees of `layout`.
760    ///
761    /// This is like [`alloc`](Self::alloc), except that it won't change the bump pointer.
762    #[inline(always)]
763    pub(crate) fn prepare_allocation(self, layout: impl LayoutProps) -> Option<NonNull<u8>> {
764        let props = self.bump_props(layout);
765
766        let ptr = if S::UP { bump_up(props)?.ptr } else { bump_down(props)? };
767
768        // SAFETY: allocations never succeed for a dummy chunk
769        unsafe {
770            let chunk = self.as_non_dummy_unchecked();
771            Some(chunk.content_ptr_from_addr(ptr))
772        }
773    }
774
775    /// Returns the rest of the capacity of the chunk.
776    /// This does not change the position within the chunk.
777    ///
778    /// This is used in [`MutBumpVec`] where we mutably burrow bump access.
779    /// In this case we do not want to update the bump pointer. This way
780    /// neither reallocations (a new chunk) nor dropping needs to move the bump pointer.
781    /// The bump pointer is only updated when we call [`into_slice`].
782    ///
783    /// - `range.start` and `range.end` are aligned.
784    /// - `layout.size` must not be zero
785    /// - `layout.size` must be a multiple of `layout.align`
786    ///
787    /// [`MutBumpVec`]: crate::MutBumpVec
788    /// [`into_slice`]: crate::MutBumpVec::into_slice
789    #[inline(always)]
790    pub(crate) fn prepare_allocation_range(self, layout: impl LayoutProps) -> Option<Range<NonNull<u8>>> {
791        let props = self.bump_props(layout);
792
793        let range = if S::UP {
794            bump_prepare_up(props)
795        } else {
796            bump_prepare_down(props)
797        }?;
798
799        // SAFETY: allocations never succeed for a dummy chunk
800        unsafe {
801            let chunk = self.as_non_dummy_unchecked();
802            Some(chunk.content_ptr_from_addr_range(range))
803        }
804    }
805
806    #[inline(always)]
807    fn bump_props<L>(self, layout: L) -> BumpProps
808    where
809        L: LayoutProps,
810    {
811        let pos = self.pos().addr().get();
812        let end = unsafe { self.header.as_ref() }.end.addr().get();
813
814        let start = if S::UP { pos } else { end };
815        let end = if S::UP { end } else { pos };
816
817        #[cfg(debug_assertions)]
818        if !matches!(self.classify(), ChunkClass::NonDummy(_)) {
819            assert!(start > end);
820        }
821
822        BumpProps {
823            start,
824            end,
825            layout: *layout,
826            min_align: S::MIN_ALIGN,
827            align_is_const: L::ALIGN_IS_CONST,
828            size_is_const: L::SIZE_IS_CONST,
829            size_is_multiple_of_align: L::SIZE_IS_MULTIPLE_OF_ALIGN,
830        }
831    }
832
833    #[inline(always)]
834    pub(crate) fn pos(self) -> NonNull<u8> {
835        unsafe { self.header.as_ref().pos.get() }
836    }
837
838    #[inline(always)]
839    pub(crate) unsafe fn as_non_dummy_unchecked(self) -> NonDummyChunk<A, S> {
840        debug_assert!(matches!(self.classify(), ChunkClass::NonDummy(_)));
841        NonDummyChunk { raw: self }
842    }
843}
844
845// Methods only available for a non-dummy chunk.
846impl<A, S> NonDummyChunk<A, S>
847where
848    S: BumpAllocatorSettings,
849{
850    pub(crate) fn new<E>(
851        chunk_size: ChunkSize<A, S>,
852        prev: Option<NonDummyChunk<A, S>>,
853        allocator: A,
854    ) -> Result<NonDummyChunk<A, S>, E>
855    where
856        A: Allocator,
857        E: ErrorBehavior,
858    {
859        let layout = chunk_size.layout().ok_or_else(E::capacity_overflow)?;
860
861        let allocation = match allocator.allocate(layout) {
862            Ok(ok) => ok,
863            Err(AllocError) => return Err(E::allocation(layout)),
864        };
865
866        let ptr = non_null::as_non_null_ptr(allocation);
867        let size = allocation.len();
868
869        // Note that the allocation's size may be larger than
870        // the requested layout's size.
871        //
872        // We could be ignoring the allocation's size and just use
873        // our layout's size, but then we would be wasting
874        // the extra space the allocator might have given us.
875        //
876        // This returned size does not satisfy our invariants though
877        // so we need to align it first.
878        //
879        // Follow this method for details.
880        let size = ChunkSize::<A, S>::align_allocation_size(size);
881
882        debug_assert!(size >= layout.size());
883        debug_assert!(size % MIN_CHUNK_ALIGN == 0);
884
885        let prev = Cell::new(prev.map(|c| c.header));
886        let next = Cell::new(None);
887
888        let header = unsafe {
889            if S::UP {
890                let header = ptr.cast::<ChunkHeader<A>>();
891
892                header.write(ChunkHeader {
893                    pos: Cell::new(header.add(1).cast()),
894                    end: ptr.add(size),
895                    prev,
896                    next,
897                    allocator,
898                });
899
900                header
901            } else {
902                let header = ptr.add(size).cast::<ChunkHeader<A>>().sub(1);
903
904                header.write(ChunkHeader {
905                    pos: Cell::new(header.cast()),
906                    end: ptr,
907                    prev,
908                    next,
909                    allocator,
910                });
911
912                header
913            }
914        };
915
916        Ok(NonDummyChunk {
917            raw: RawChunk {
918                header,
919                marker: PhantomData,
920            },
921        })
922    }
923
924    /// # Panic
925    ///
926    /// [`self.next`](RawChunk::next) must return `None`
927    pub(crate) fn append_for<B: ErrorBehavior>(self, layout: Layout) -> Result<Self, B>
928    where
929        A: Allocator + Clone,
930    {
931        debug_assert!(self.next().is_none());
932
933        let required_size = ChunkSizeHint::for_capacity(layout).ok_or_else(B::capacity_overflow)?;
934        let grown_size = self.grow_size()?;
935        let size = required_size.max(grown_size).calc_size().ok_or_else(B::capacity_overflow)?;
936        let allocator = unsafe { self.header.as_ref().allocator.clone() };
937        let new_chunk = Self::new::<B>(size, Some(self), allocator)?;
938
939        unsafe {
940            self.header.as_ref().next.set(Some(new_chunk.header));
941        }
942
943        Ok(new_chunk)
944    }
945
946    #[inline(always)]
947    fn grow_size<B: ErrorBehavior>(self) -> Result<ChunkSizeHint<A, S>, B> {
948        let Some(size) = self.size().get().checked_mul(2) else {
949            return Err(B::capacity_overflow());
950        };
951
952        Ok(ChunkSizeHint::new(size))
953    }
954
955    #[inline(always)]
956    pub(crate) fn allocator<'a>(self) -> &'a A {
957        unsafe { &self.header.as_ref().allocator }
958    }
959
960    #[inline(always)]
961    pub(crate) fn prev(self) -> Option<NonDummyChunk<A, S>> {
962        unsafe {
963            Some(NonDummyChunk {
964                raw: RawChunk {
965                    header: self.header.as_ref().prev.get()?,
966                    marker: PhantomData,
967                },
968            })
969        }
970    }
971
972    #[inline(always)]
973    pub(crate) fn next(self) -> Option<NonDummyChunk<A, S>> {
974        unsafe {
975            Some(NonDummyChunk {
976                raw: RawChunk {
977                    header: self.header.as_ref().next.get()?,
978                    marker: PhantomData,
979                },
980            })
981        }
982    }
983
984    #[inline(always)]
985    pub(crate) fn size(self) -> NonZeroUsize {
986        let start = self.chunk_start().addr().get();
987        let end = self.chunk_end().addr().get();
988        unsafe { NonZeroUsize::new_unchecked(end - start) }
989    }
990
991    #[inline(always)]
992    pub(crate) fn capacity(self) -> usize {
993        let start = self.content_start().addr().get();
994        let end = self.content_end().addr().get();
995        end - start
996    }
997
998    #[inline(always)]
999    pub(crate) fn allocated(self) -> usize {
1000        let range = self.allocated_range();
1001        let start = range.start.addr().get();
1002        let end = range.end.addr().get();
1003        end - start
1004    }
1005
1006    #[inline(always)]
1007    pub(crate) fn remaining(self) -> usize {
1008        let range = self.remaining_range();
1009        let start = range.start.addr().get();
1010        let end = range.end.addr().get();
1011        end - start
1012    }
1013
1014    #[inline(always)]
1015    fn reset(self) {
1016        unsafe {
1017            if S::UP {
1018                self.set_pos(self.content_start());
1019            } else {
1020                self.set_pos(self.content_end());
1021            }
1022        }
1023    }
1024
1025    #[inline(always)]
1026    pub(crate) fn chunk_start(self) -> NonNull<u8> {
1027        unsafe { if S::UP { self.header.cast() } else { self.header.as_ref().end } }
1028    }
1029
1030    #[inline(always)]
1031    pub(crate) fn chunk_end(self) -> NonNull<u8> {
1032        unsafe {
1033            if S::UP {
1034                self.header.as_ref().end
1035            } else {
1036                self.after_header()
1037            }
1038        }
1039    }
1040
1041    #[inline(always)]
1042    pub(crate) fn content_start(self) -> NonNull<u8> {
1043        if S::UP { self.after_header() } else { self.chunk_start() }
1044    }
1045
1046    #[inline(always)]
1047    pub(crate) fn content_end(self) -> NonNull<u8> {
1048        if S::UP { self.chunk_end() } else { self.header.cast() }
1049    }
1050
1051    /// # Safety
1052    /// [`contains_addr_or_end`](RawChunk::contains_addr_or_end) must return true
1053    #[inline(always)]
1054    pub(crate) unsafe fn set_pos(self, ptr: NonNull<u8>) {
1055        unsafe { self.set_pos_addr(ptr.addr().get()) };
1056    }
1057
1058    /// # Safety
1059    /// [`contains_addr_or_end`](RawChunk::contains_addr_or_end) must return true
1060    #[inline(always)]
1061    pub(crate) unsafe fn set_pos_addr(self, addr: usize) {
1062        unsafe { self.header.as_ref().pos.set(self.content_ptr_from_addr(addr)) };
1063    }
1064
1065    /// Sets the bump position and aligns it to the required `MIN_ALIGN`.
1066    #[inline(always)]
1067    pub(crate) unsafe fn set_pos_addr_and_align(self, pos: usize) {
1068        unsafe {
1069            let addr = align_pos(S::UP, S::MIN_ALIGN, pos);
1070            self.set_pos_addr(addr);
1071        }
1072    }
1073
1074    /// A version of [`set_pos_addr_and_align`](Self::set_pos_addr_and_align) that only aligns the pointer
1075    /// if it the `pos_align` is smaller than the `MIN_ALIGN`.
1076    ///
1077    /// This should only be called when the `pos_align` is statically known so
1078    /// the branch gets optimized out.
1079    #[inline(always)]
1080    pub(crate) unsafe fn set_pos_addr_and_align_from(self, mut pos: usize, pos_align: usize) {
1081        debug_assert_eq!(pos % pos_align, 0);
1082
1083        if pos_align < S::MIN_ALIGN {
1084            pos = align_pos(S::UP, S::MIN_ALIGN, pos);
1085        }
1086
1087        unsafe { self.set_pos_addr(pos) };
1088    }
1089
1090    /// # Safety
1091    /// [`contains_addr_or_end`](RawChunk::contains_addr_or_end) must return true
1092    #[inline(always)]
1093    unsafe fn content_ptr_from_addr(self, addr: usize) -> NonNull<u8> {
1094        unsafe {
1095            debug_assert!(self.contains_addr_or_end(addr));
1096            let ptr = self.header.cast();
1097            let addr = NonZeroUsize::new_unchecked(addr);
1098            ptr.with_addr(addr)
1099        }
1100    }
1101
1102    #[inline(always)]
1103    pub(crate) unsafe fn content_ptr_from_addr_range(self, range: Range<usize>) -> Range<NonNull<u8>> {
1104        unsafe {
1105            debug_assert!(range.start <= range.end);
1106            let start = self.content_ptr_from_addr(range.start);
1107            let end = self.content_ptr_from_addr(range.end);
1108            start..end
1109        }
1110    }
1111
1112    #[inline(always)]
1113    fn contains_addr_or_end(self, addr: usize) -> bool {
1114        let start = self.content_start().addr().get();
1115        let end = self.content_end().addr().get();
1116        addr >= start && addr <= end
1117    }
1118
1119    #[inline(always)]
1120    fn allocated_range(self) -> Range<NonNull<u8>> {
1121        if S::UP {
1122            self.content_start()..self.pos()
1123        } else {
1124            self.pos()..self.content_end()
1125        }
1126    }
1127
1128    #[inline(always)]
1129    fn remaining_range(self) -> Range<NonNull<u8>> {
1130        if S::UP {
1131            let start = self.pos();
1132            let end = self.content_end();
1133            start..end
1134        } else {
1135            let start = self.content_start();
1136            let end = self.pos();
1137            start..end
1138        }
1139    }
1140
1141    #[inline(always)]
1142    fn after_header(self) -> NonNull<u8> {
1143        unsafe { self.header.add(1).cast() }
1144    }
1145
1146    /// This resolves the next chunk before calling `f`. So calling [`deallocate`](NonDummyChunk::deallocate) on the chunk parameter of `f` is fine.
1147    fn for_each_prev(self, mut f: impl FnMut(NonDummyChunk<A, S>)) {
1148        let mut iter = self.prev();
1149
1150        while let Some(chunk) = iter {
1151            iter = chunk.prev();
1152            f(chunk);
1153        }
1154    }
1155
1156    /// This resolves the next chunk before calling `f`. So calling [`deallocate`](NonDummyChunk::deallocate) on the chunk parameter of `f` is fine.
1157    fn for_each_next(self, mut f: impl FnMut(NonDummyChunk<A, S>)) {
1158        let mut iter = self.next();
1159
1160        while let Some(chunk) = iter {
1161            iter = chunk.next();
1162            f(chunk);
1163        }
1164    }
1165
1166    /// # Safety
1167    /// - self must not be used after calling this.
1168    unsafe fn deallocate(self)
1169    where
1170        A: Allocator,
1171    {
1172        let allocator = unsafe { ptr::read(&raw const self.header.as_ref().allocator) };
1173
1174        let ptr = self.chunk_start();
1175        let layout = self.layout();
1176
1177        unsafe {
1178            allocator.deallocate(ptr, layout);
1179        }
1180    }
1181
1182    #[inline(always)]
1183    pub(crate) fn layout(self) -> Layout {
1184        // SAFETY: this layout fits the one we allocated, which means it must be valid
1185        unsafe { Layout::from_size_align_unchecked(self.size().get(), align_of::<ChunkHeader<A>>()) }
1186    }
1187}
1188
1189pub(crate) enum ChunkClass<A, S: BumpAllocatorSettings> {
1190    Claimed,
1191    Unallocated,
1192    NonDummy(NonDummyChunk<A, S>),
1193}