Skip to main content

bump_scope/
raw_bump.rs

1use core::{
2    alloc::Layout,
3    cell::Cell,
4    marker::PhantomData,
5    num::NonZeroUsize,
6    ops::{Deref, Range},
7    ptr::{self, NonNull},
8};
9
10use crate::{
11    BaseAllocator, Checkpoint, SizedTypeProperties, align_pos,
12    alloc::{AllocError, Allocator},
13    bumping::{BumpProps, BumpUp, MIN_CHUNK_ALIGN, bump_down, bump_prepare_down, bump_prepare_up, bump_up},
14    chunk::{ChunkHeader, ChunkSize, ChunkSizeHint},
15    error_behavior::{self, ErrorBehavior},
16    layout::{ArrayLayout, CustomLayout, LayoutProps, SizedLayout},
17    polyfill::non_null,
18    settings::{BumpAllocatorSettings, False, MinimumAlignment, SupportedMinimumAlignment},
19    stats::Stats,
20};
21
22/// The internal type used by `Bump` and `Bump(Scope)`.
23///
24/// All the api that can fail due to allocation failure take a `E: ErrorBehavior`
25/// instead of having a `try_` and non-`try_` version.
26///
27/// It does not concern itself with deallocating chunks or the base allocator.
28/// A clone of this type is just a bitwise copy, `manually_drop` must only be called
29/// once for this bump allocator.
30pub(crate) struct RawBump<A, S> {
31    /// Either a chunk allocated from the `allocator`, or either a `CLAIMED`
32    /// or `UNALLOCATED` dummy chunk.
33    pub(crate) chunk: Cell<RawChunk<A, S>>,
34}
35
36impl<A, S> Clone for RawBump<A, S> {
37    fn clone(&self) -> Self {
38        Self {
39            chunk: self.chunk.clone(),
40        }
41    }
42}
43
44impl<A, S> RawBump<A, S>
45where
46    S: BumpAllocatorSettings,
47{
48    #[inline(always)]
49    pub(crate) const fn new() -> Self
50    where
51        S: BumpAllocatorSettings<GuaranteedAllocated = False>,
52    {
53        Self {
54            chunk: Cell::new(RawChunk::UNALLOCATED),
55        }
56    }
57
58    #[inline(always)]
59    pub(crate) fn is_claimed(&self) -> bool {
60        self.chunk.get().is_claimed()
61    }
62
63    /// The caller must ensure the returned reference is dead before calling [`manually_drop`](Self::manually_drop).
64    #[inline(always)]
65    pub(crate) fn allocator<'a>(&self) -> Option<&'a A> {
66        match self.chunk.get().classify() {
67            ChunkClass::Claimed | ChunkClass::Unallocated => None,
68            ChunkClass::NonDummy(chunk) => Some(chunk.allocator()),
69        }
70    }
71}
72
73impl<A, S> RawBump<A, S>
74where
75    A: Allocator,
76    S: BumpAllocatorSettings,
77{
78    #[inline(always)]
79    pub(crate) fn with_size<E: ErrorBehavior>(size: ChunkSize<A, S>, allocator: A) -> Result<Self, E> {
80        Ok(Self {
81            chunk: Cell::new(NonDummyChunk::new::<E>(size, None, allocator)?.raw),
82        })
83    }
84
85    #[inline(always)]
86    pub(crate) fn reset(&self) {
87        let Some(mut chunk) = self.chunk.get().as_non_dummy() else {
88            return;
89        };
90
91        unsafe {
92            chunk.for_each_prev(|chunk| chunk.deallocate());
93
94            while let Some(next) = chunk.next() {
95                chunk.deallocate();
96                chunk = next;
97            }
98
99            chunk.header.as_ref().prev.set(None);
100        }
101
102        chunk.reset();
103
104        self.chunk.set(chunk.raw);
105    }
106
107    /// Reset's the bump pointer to the very start.
108    #[inline]
109    pub(crate) fn reset_to_start(&self) {
110        if let Some(mut chunk) = self.chunk.get().as_non_dummy() {
111            while let Some(prev) = chunk.prev() {
112                chunk = prev;
113            }
114
115            chunk.reset();
116
117            self.chunk.set(chunk.raw);
118        }
119    }
120
121    /// # Safety
122    /// - self must not be used after calling this.
123    pub(crate) unsafe fn manually_drop(&mut self) {
124        match self.chunk.get().classify() {
125            ChunkClass::Claimed => {
126                // The user must have somehow leaked a `BumpClaimGuard`.
127                // Thus the bump allocator has been leaked, there is nothing to do here.
128            }
129            ChunkClass::Unallocated => (),
130            ChunkClass::NonDummy(chunk) => unsafe {
131                chunk.for_each_prev(|chunk| chunk.deallocate());
132                chunk.for_each_next(|chunk| chunk.deallocate());
133                chunk.deallocate();
134            },
135        }
136    }
137}
138
139impl<A, S> RawBump<A, S>
140where
141    A: BaseAllocator<S::GuaranteedAllocated>,
142    S: BumpAllocatorSettings,
143{
144    #[inline(always)]
145    pub(crate) fn claim(&self) -> RawBump<A, S> {
146        const {
147            assert!(S::CLAIMABLE, "`claim` is only available with the setting `CLAIMABLE = true`");
148        }
149
150        #[cold]
151        #[inline(never)]
152        fn already_claimed() {
153            panic!("bump allocator is already claimed");
154        }
155
156        if self.chunk.get().is_claimed() {
157            already_claimed();
158        }
159
160        RawBump {
161            chunk: Cell::new(self.chunk.replace(RawChunk::<A, S>::CLAIMED)),
162        }
163    }
164
165    #[inline(always)]
166    pub(crate) fn reclaim(&self, claimant: &RawBump<A, S>) {
167        self.chunk.set(claimant.chunk.get());
168    }
169
170    #[inline(always)]
171    pub(crate) fn checkpoint(&self) -> Checkpoint {
172        Checkpoint::new(self.chunk.get())
173    }
174
175    #[inline]
176    pub(crate) unsafe fn reset_to(&self, checkpoint: Checkpoint) {
177        // If the checkpoint was created when the bump allocator had no allocated chunk
178        // then the chunk pointer will point to the unallocated chunk header.
179        //
180        // In such cases we reset the bump pointer to the very start of the very first chunk.
181        //
182        // We don't check if the chunk pointer points to the unallocated chunk header
183        // if the bump allocator is `GUARANTEED_ALLOCATED`. We are allowed to not do this check
184        // because of this safety condition of `reset_to`:
185        // > the checkpoint must not have been created by an `!GUARANTEED_ALLOCATED` when self is `GUARANTEED_ALLOCATED`
186        if !S::GUARANTEED_ALLOCATED && checkpoint.chunk == ChunkHeader::unallocated::<S>() {
187            self.reset_to_start();
188            return;
189        }
190
191        #[cfg(debug_assertions)]
192        {
193            assert_ne!(
194                checkpoint.chunk,
195                ChunkHeader::claimed::<S>(),
196                "the checkpoint must not have been created by a claimed bump allocator"
197            );
198
199            assert_ne!(
200                self.chunk.get().header.cast(),
201                ChunkHeader::claimed::<S>(),
202                "this function must not be called on a claimed bump allocator"
203            );
204
205            assert_ne!(
206                checkpoint.chunk,
207                ChunkHeader::unallocated::<S>(),
208                "the checkpoint must not have been created by a `!GUARANTEED_ALLOCATED` when self is `GUARANTEED_ALLOCATED`"
209            );
210
211            let chunk = self
212                .stats()
213                .small_to_big()
214                .find(|chunk| chunk.header() == checkpoint.chunk.cast())
215                .expect("this checkpoint does not refer to any chunk in this bump allocator");
216
217            assert!(
218                chunk.chunk.contains_addr_or_end(checkpoint.address.get()),
219                "checkpoint address does not point within its chunk"
220            );
221        }
222
223        unsafe {
224            checkpoint.reset_within_chunk();
225
226            self.chunk.set(RawChunk {
227                header: checkpoint.chunk.cast(),
228                marker: PhantomData,
229            });
230        }
231    }
232
233    #[inline(always)]
234    pub(crate) fn reserve<E: ErrorBehavior>(&self, additional: usize) -> Result<(), E>
235    where
236        A: BaseAllocator<S::GuaranteedAllocated>,
237    {
238        let chunk = self.chunk.get();
239
240        match chunk.classify() {
241            ChunkClass::Claimed => Err(E::claimed()),
242            ChunkClass::Unallocated => {
243                let Ok(layout) = Layout::from_size_align(additional, 1) else {
244                    return Err(E::capacity_overflow());
245                };
246
247                let new_chunk = NonDummyChunk::<A, S>::new(
248                    ChunkSize::<A, S>::from_capacity(layout).ok_or_else(E::capacity_overflow)?,
249                    None,
250                    // When this bump allocator is unallocated, `A` is guaranteed to implement `Default`,
251                    // `default_or_panic` will not panic.
252                    A::default_or_panic(),
253                )?;
254
255                self.chunk.set(new_chunk.raw);
256                Ok(())
257            }
258            ChunkClass::NonDummy(mut chunk) => {
259                let mut additional = additional;
260
261                if let Some(rest) = additional.checked_sub(chunk.remaining()) {
262                    additional = rest;
263                } else {
264                    return Ok(());
265                }
266
267                while let Some(next) = chunk.next() {
268                    chunk = next;
269
270                    if let Some(rest) = additional.checked_sub(chunk.capacity()) {
271                        additional = rest;
272                    } else {
273                        return Ok(());
274                    }
275                }
276
277                if additional == 0 {
278                    return Ok(());
279                }
280
281                let Ok(layout) = Layout::from_size_align(additional, 1) else {
282                    return Err(E::capacity_overflow());
283                };
284
285                chunk.append_for(layout).map(drop)
286            }
287        }
288    }
289
290    #[inline(always)]
291    pub(crate) fn alloc<B: ErrorBehavior>(&self, layout: Layout) -> Result<NonNull<u8>, B> {
292        match self.chunk.get().alloc(CustomLayout(layout)) {
293            Some(ptr) => Ok(ptr),
294            None => self.alloc_in_another_chunk(layout),
295        }
296    }
297
298    #[inline(always)]
299    pub(crate) fn alloc_sized<E: ErrorBehavior, T>(&self) -> Result<NonNull<T>, E> {
300        match self.chunk.get().alloc(SizedLayout::new::<T>()) {
301            Some(ptr) => Ok(ptr.cast()),
302            None => match self.alloc_sized_in_another_chunk::<E, T>() {
303                Ok(ptr) => Ok(ptr.cast()),
304                Err(err) => Err(err),
305            },
306        }
307    }
308
309    #[inline(always)]
310    pub(crate) fn alloc_slice<E: ErrorBehavior, T>(&self, len: usize) -> Result<NonNull<T>, E> {
311        let Ok(layout) = ArrayLayout::array::<T>(len) else {
312            return Err(E::capacity_overflow());
313        };
314
315        match self.chunk.get().alloc(layout) {
316            Some(ptr) => Ok(ptr.cast()),
317            None => match self.alloc_slice_in_another_chunk::<E, T>(len) {
318                Ok(ptr) => Ok(ptr.cast()),
319                Err(err) => Err(err),
320            },
321        }
322    }
323
324    #[inline(always)]
325    pub(crate) fn alloc_slice_for<E: ErrorBehavior, T>(&self, value: &[T]) -> Result<NonNull<T>, E> {
326        let layout = ArrayLayout::for_value(value);
327
328        match self.chunk.get().alloc(layout) {
329            Some(ptr) => Ok(ptr.cast()),
330            None => match self.alloc_slice_in_another_chunk::<E, T>(value.len()) {
331                Ok(ptr) => Ok(ptr.cast()),
332                Err(err) => Err(err),
333            },
334        }
335    }
336
337    #[inline(always)]
338    pub(crate) fn prepare_sized_allocation<B: ErrorBehavior, T>(&self) -> Result<NonNull<T>, B> {
339        match self.chunk.get().prepare_allocation(SizedLayout::new::<T>()) {
340            Some(ptr) => Ok(ptr.cast()),
341            None => match self.prepare_allocation_in_another_chunk::<B, T>() {
342                Ok(ptr) => Ok(ptr.cast()),
343                Err(err) => Err(err),
344            },
345        }
346    }
347
348    #[inline(always)]
349    pub(crate) fn prepare_slice_allocation<B: ErrorBehavior, T>(&self, min_cap: usize) -> Result<NonNull<[T]>, B> {
350        let range = self.prepare_allocation_range::<B, T>(min_cap)?;
351
352        // NB: We can't use `offset_from_unsigned`, because the size is not a multiple of `T`'s.
353        let cap = unsafe { non_null::byte_offset_from_unsigned(range.end, range.start) } / T::SIZE;
354
355        let ptr = if S::UP { range.start } else { unsafe { range.end.sub(cap) } };
356
357        Ok(NonNull::slice_from_raw_parts(ptr, cap))
358    }
359
360    #[inline(always)]
361    pub(crate) fn prepare_slice_allocation_rev<B: ErrorBehavior, T>(
362        &self,
363        min_cap: usize,
364    ) -> Result<(NonNull<T>, usize), B> {
365        let range = self.prepare_allocation_range::<B, T>(min_cap)?;
366
367        // NB: We can't use `offset_from_unsigned`, because the size is not a multiple of `T`'s.
368        let cap = unsafe { non_null::byte_offset_from_unsigned(range.end, range.start) } / T::SIZE;
369
370        let end = if S::UP { unsafe { range.start.add(cap) } } else { range.end };
371
372        Ok((end, cap))
373    }
374
375    /// Returns a pointer range.
376    /// The start and end pointers are aligned.
377    /// But `end - start` is *not* a multiple of `size_of::<T>()`.
378    /// So `end.offset_from_unsigned(start)` may not be used!
379    #[inline(always)]
380    fn prepare_allocation_range<B: ErrorBehavior, T>(&self, cap: usize) -> Result<Range<NonNull<T>>, B> {
381        let Ok(layout) = ArrayLayout::array::<T>(cap) else {
382            return Err(B::capacity_overflow());
383        };
384
385        let range = match self.chunk.get().prepare_allocation_range(layout) {
386            Some(ptr) => ptr,
387            None => self.prepare_allocation_range_in_another_chunk(layout)?,
388        };
389
390        Ok(range.start.cast::<T>()..range.end.cast::<T>())
391    }
392
393    /// Allocation slow path.
394    /// The active chunk must *not* have space for `layout`.
395    #[cold]
396    #[inline(never)]
397    pub(crate) fn alloc_in_another_chunk<E: ErrorBehavior>(&self, layout: Layout) -> Result<NonNull<u8>, E> {
398        unsafe { self.in_another_chunk(CustomLayout(layout), RawChunk::alloc) }
399    }
400
401    #[cold]
402    #[inline(never)]
403    fn alloc_sized_in_another_chunk<E: ErrorBehavior, T>(&self) -> Result<NonNull<u8>, E> {
404        self.alloc_in_another_chunk(Layout::new::<T>())
405    }
406
407    #[cold]
408    #[inline(never)]
409    fn alloc_slice_in_another_chunk<E: ErrorBehavior, T>(&self, len: usize) -> Result<NonNull<u8>, E> {
410        let Ok(layout) = Layout::array::<T>(len) else {
411            return Err(E::capacity_overflow());
412        };
413
414        self.alloc_in_another_chunk(layout)
415    }
416
417    #[cold]
418    #[inline(never)]
419    pub(crate) fn prepare_allocation_in_another_chunk<E: ErrorBehavior, T>(&self) -> Result<NonNull<u8>, E> {
420        let layout = CustomLayout(Layout::new::<T>());
421
422        unsafe { self.in_another_chunk(layout, RawChunk::prepare_allocation) }
423    }
424
425    #[cold]
426    #[inline(never)]
427    fn prepare_allocation_range_in_another_chunk<E: ErrorBehavior>(
428        &self,
429        layout: ArrayLayout,
430    ) -> Result<Range<NonNull<u8>>, E> {
431        unsafe { self.in_another_chunk(layout, RawChunk::prepare_allocation_range) }
432    }
433
434    /// # Safety
435    ///
436    /// `f` on the new chunk created by `RawChunk::append_for` with the layout `layout` must return `Some`.
437    #[inline(always)]
438    pub(crate) unsafe fn in_another_chunk<E: ErrorBehavior, R, L: LayoutProps>(
439        &self,
440        layout: L,
441        mut f: impl FnMut(RawChunk<A, S>, L) -> Option<R>,
442    ) -> Result<R, E> {
443        let new_chunk: NonDummyChunk<A, S> = match self.chunk.get().classify() {
444            ChunkClass::Claimed => Err(E::claimed()),
445            ChunkClass::Unallocated => NonDummyChunk::new(
446                ChunkSize::from_capacity(*layout).ok_or_else(E::capacity_overflow)?,
447                None,
448                // When this bump allocator is unallocated, `A` is guaranteed to implement `Default`,
449                // `default_or_panic` will not panic.
450                A::default_or_panic(),
451            ),
452            ChunkClass::NonDummy(mut chunk) => {
453                while let Some(next_chunk) = chunk.next() {
454                    chunk = next_chunk;
455
456                    // We don't reset the chunk position when we leave a scope, so we need to do it here.
457                    chunk.reset();
458
459                    self.chunk.set(chunk.raw);
460
461                    if let Some(ptr) = f(chunk.raw, layout) {
462                        return Ok(ptr);
463                    }
464                }
465
466                // there is no chunk that fits, we need a new chunk
467                chunk.append_for(*layout)
468            }
469        }?;
470
471        self.chunk.set(new_chunk.raw);
472
473        match f(new_chunk.raw, layout) {
474            Some(ptr) => Ok(ptr),
475            _ => {
476                // SAFETY: We just appended a chunk for that specific layout, it must have enough space.
477                // We don't panic here so we don't produce any panic code when using `try_` apis.
478                // We check for that in `test-no-panic`.
479                unsafe { core::hint::unreachable_unchecked() }
480            }
481        }
482    }
483
484    pub(crate) fn make_allocated<E: ErrorBehavior>(&self) -> Result<(), E> {
485        match self.chunk.get().classify() {
486            ChunkClass::Claimed => Err(E::claimed()),
487            ChunkClass::Unallocated => {
488                // When this bump allocator is unallocated, `A` is guaranteed to implement `Default`,
489                // `default_or_panic` will not panic.
490                let new_chunk = NonDummyChunk::new(ChunkSize::MINIMUM, None, A::default_or_panic())?;
491                self.chunk.set(new_chunk.raw);
492                Ok(())
493            }
494            ChunkClass::NonDummy(_) => Ok(()),
495        }
496    }
497}
498
499impl<A, S> RawBump<A, S>
500where
501    S: BumpAllocatorSettings,
502{
503    /// Returns a type which provides statistics about the memory usage of the bump allocator.
504    #[must_use]
505    #[inline(always)]
506    pub fn stats<'a>(&self) -> Stats<'a, A, S> {
507        Stats::from_raw_chunk(self.chunk.get())
508    }
509
510    #[inline(always)]
511    pub(crate) fn align<const ALIGN: usize>(&self)
512    where
513        MinimumAlignment<ALIGN>: SupportedMinimumAlignment,
514    {
515        self.align_to::<MinimumAlignment<ALIGN>>();
516    }
517
518    #[inline(always)]
519    pub(crate) fn align_to<MinimumAlignment>(&self)
520    where
521        MinimumAlignment: SupportedMinimumAlignment,
522    {
523        if MinimumAlignment::VALUE > S::MIN_ALIGN {
524            // a dummy chunk is always aligned
525            if let Some(chunk) = self.chunk.get().as_non_dummy() {
526                let pos = chunk.pos().addr().get();
527                let addr = align_pos(S::UP, MinimumAlignment::VALUE, pos);
528                unsafe { chunk.set_pos_addr(addr) };
529            }
530        }
531    }
532
533    pub(crate) fn ensure_satisfies_settings<NewS>(&self)
534    where
535        NewS: BumpAllocatorSettings,
536    {
537        const {
538            assert!(NewS::UP == S::UP, "can't change `UP` setting of `Bump(Scope)`");
539        }
540
541        if !NewS::CLAIMABLE && self.chunk.get().is_claimed() {
542            error_behavior::panic::claimed();
543        }
544
545        if NewS::GUARANTEED_ALLOCATED && self.chunk.get().is_unallocated() {
546            error_behavior::panic::unallocated();
547        }
548
549        self.align_to::<NewS::MinimumAlignment>();
550    }
551
552    pub(crate) fn ensure_scope_satisfies_settings<NewS>(&self)
553    where
554        NewS: BumpAllocatorSettings,
555    {
556        const {
557            assert!(NewS::UP == S::UP, "can't change `UP` setting of `Bump(Scope)`");
558
559            assert!(
560                NewS::MIN_ALIGN >= S::MIN_ALIGN,
561                "can't decrease minimum alignment using `BumpScope::with_settings`"
562            );
563        }
564
565        if !NewS::CLAIMABLE && self.chunk.get().is_claimed() {
566            error_behavior::panic::claimed();
567        }
568
569        // A scope by value is always allocated, created by `(try_)by_value`.
570
571        self.align_to::<NewS::MinimumAlignment>();
572    }
573
574    #[expect(clippy::unused_self)]
575    pub(crate) fn ensure_satisfies_settings_for_borrow<NewS>(&self)
576    where
577        NewS: BumpAllocatorSettings,
578    {
579        const {
580            assert!(NewS::UP == S::UP, "can't change `UP` setting of `Bump(Scope)`");
581
582            assert!(
583                NewS::MIN_ALIGN == S::MIN_ALIGN,
584                "can't change minimum alignment using `Bump(Scope)::borrow_with_settings`"
585            );
586
587            assert!(
588                NewS::CLAIMABLE == S::CLAIMABLE,
589                "can't change claimable property using `Bump(Scope)::borrow_with_settings`"
590            );
591
592            // A reference to a guaranteed-allocated `Bump(Scope)` can never become unallocated.
593            assert!(
594                NewS::GUARANTEED_ALLOCATED <= S::GUARANTEED_ALLOCATED,
595                "can't increase guaranteed-allocated property using `Bump(Scope)::borrow_with_settings`"
596            );
597        }
598    }
599
600    pub(crate) fn ensure_satisfies_settings_for_borrow_mut<NewS>(&self)
601    where
602        NewS: BumpAllocatorSettings,
603    {
604        const {
605            assert!(NewS::UP == S::UP, "can't change `UP` setting of `Bump(Scope)`");
606
607            assert!(
608                NewS::MIN_ALIGN >= S::MIN_ALIGN,
609                "can't decrease minimum alignment using `Bump(Scope)::borrow_mut_with_settings`"
610            );
611
612            assert!(
613                NewS::CLAIMABLE == S::CLAIMABLE,
614                "can't change claimable property using `Bump(Scope)::borrow_mut_with_settings`"
615            );
616
617            assert!(
618                NewS::GUARANTEED_ALLOCATED == S::GUARANTEED_ALLOCATED,
619                "can't change guaranteed-allocated property using `Bump(Scope)::borrow_mut_with_settings`"
620            );
621        }
622
623        self.align_to::<NewS::MinimumAlignment>();
624    }
625
626    #[inline]
627    pub(crate) fn into_raw(self) -> NonNull<()> {
628        self.chunk.get().header.cast()
629    }
630
631    #[inline]
632    pub(crate) unsafe fn from_raw(ptr: NonNull<()>) -> Self {
633        Self {
634            chunk: Cell::new(RawChunk {
635                header: ptr.cast(),
636                marker: PhantomData,
637            }),
638        }
639    }
640}
641
642pub(crate) struct RawChunk<A, S> {
643    pub(crate) header: NonNull<ChunkHeader<A>>,
644    pub(crate) marker: PhantomData<fn() -> (A, S)>,
645}
646
647impl<A, S> Clone for RawChunk<A, S> {
648    fn clone(&self) -> Self {
649        *self
650    }
651}
652
653impl<A, S> Copy for RawChunk<A, S> {}
654
655pub(crate) struct NonDummyChunk<A, S> {
656    raw: RawChunk<A, S>,
657}
658
659impl<A, S> Copy for NonDummyChunk<A, S> {}
660
661impl<A, S> Clone for NonDummyChunk<A, S> {
662    fn clone(&self) -> Self {
663        *self
664    }
665}
666
667impl<A, S> Deref for NonDummyChunk<A, S> {
668    type Target = RawChunk<A, S>;
669
670    fn deref(&self) -> &Self::Target {
671        &self.raw
672    }
673}
674
675impl<A, S> RawChunk<A, S>
676where
677    S: BumpAllocatorSettings,
678{
679    pub(crate) const UNALLOCATED: Self = {
680        assert!(!S::GUARANTEED_ALLOCATED);
681
682        Self {
683            header: ChunkHeader::unallocated::<S>().cast(),
684            marker: PhantomData,
685        }
686    };
687
688    const CLAIMED: Self = {
689        assert!(S::CLAIMABLE);
690
691        Self {
692            header: ChunkHeader::claimed::<S>().cast(),
693            marker: PhantomData,
694        }
695    };
696
697    #[inline(always)]
698    pub(crate) fn header(self) -> NonNull<ChunkHeader<A>> {
699        self.header
700    }
701
702    #[inline(always)]
703    fn is_claimed(self) -> bool {
704        S::CLAIMABLE && self.header.cast() == ChunkHeader::claimed::<S>()
705    }
706
707    #[inline(always)]
708    pub(crate) fn is_unallocated(self) -> bool {
709        !S::GUARANTEED_ALLOCATED && self.header.cast() == ChunkHeader::unallocated::<S>()
710    }
711
712    #[inline(always)]
713    pub(crate) fn classify(self) -> ChunkClass<A, S> {
714        if self.is_claimed() {
715            return ChunkClass::Claimed;
716        }
717
718        if self.is_unallocated() {
719            return ChunkClass::Unallocated;
720        }
721
722        ChunkClass::NonDummy(NonDummyChunk { raw: self })
723    }
724
725    #[inline(always)]
726    pub(crate) fn as_non_dummy(self) -> Option<NonDummyChunk<A, S>> {
727        match self.classify() {
728            ChunkClass::Claimed | ChunkClass::Unallocated => None,
729            ChunkClass::NonDummy(chunk) => Some(chunk),
730        }
731    }
732
733    /// Attempts to allocate a block of memory.
734    ///
735    /// On success, returns a [`NonNull<u8>`] meeting the size and alignment guarantees of `layout`.
736    #[inline(always)]
737    pub(crate) fn alloc(self, layout: impl LayoutProps) -> Option<NonNull<u8>> {
738        let props = self.bump_props(layout);
739
740        if S::UP {
741            let BumpUp { new_pos, ptr } = bump_up(props)?;
742
743            // SAFETY: allocations never succeed for a dummy chunk
744            unsafe {
745                let chunk = self.as_non_dummy_unchecked();
746                chunk.set_pos_addr(new_pos);
747                Some(chunk.content_ptr_from_addr(ptr))
748            }
749        } else {
750            let ptr = bump_down(props)?;
751
752            // SAFETY: allocations never succeed for a dummy chunk
753            unsafe {
754                let chunk = self.as_non_dummy_unchecked();
755                chunk.set_pos_addr(ptr);
756                Some(chunk.content_ptr_from_addr(ptr))
757            }
758        }
759    }
760
761    /// Prepares allocation for a block of memory.
762    ///
763    /// On success, returns a [`NonNull<u8>`] meeting the size and alignment guarantees of `layout`.
764    ///
765    /// This is like [`alloc`](Self::alloc), except that it won't change the bump pointer.
766    #[inline(always)]
767    pub(crate) fn prepare_allocation(self, layout: impl LayoutProps) -> Option<NonNull<u8>> {
768        let props = self.bump_props(layout);
769
770        let ptr = if S::UP { bump_up(props)?.ptr } else { bump_down(props)? };
771
772        // SAFETY: allocations never succeed for a dummy chunk
773        unsafe {
774            let chunk = self.as_non_dummy_unchecked();
775            Some(chunk.content_ptr_from_addr(ptr))
776        }
777    }
778
779    /// Returns the rest of the capacity of the chunk.
780    /// This does not change the position within the chunk.
781    ///
782    /// This is used in [`MutBumpVec`] where we mutably burrow bump access.
783    /// In this case we do not want to update the bump pointer. This way
784    /// neither reallocations (a new chunk) nor dropping needs to move the bump pointer.
785    /// The bump pointer is only updated when we call [`into_slice`].
786    ///
787    /// - `range.start` and `range.end` are aligned.
788    /// - `layout.size` must not be zero
789    /// - `layout.size` must be a multiple of `layout.align`
790    ///
791    /// [`MutBumpVec`]: crate::MutBumpVec
792    /// [`into_slice`]: crate::MutBumpVec::into_slice
793    #[inline(always)]
794    pub(crate) fn prepare_allocation_range(self, layout: impl LayoutProps) -> Option<Range<NonNull<u8>>> {
795        let props = self.bump_props(layout);
796
797        let range = if S::UP {
798            bump_prepare_up(props)
799        } else {
800            bump_prepare_down(props)
801        }?;
802
803        // SAFETY: allocations never succeed for a dummy chunk
804        unsafe {
805            let chunk = self.as_non_dummy_unchecked();
806            Some(chunk.content_ptr_from_addr_range(range))
807        }
808    }
809
810    #[inline(always)]
811    fn bump_props<L>(self, layout: L) -> BumpProps
812    where
813        L: LayoutProps,
814    {
815        let pos = self.pos().addr().get();
816        let end = unsafe { self.header.as_ref() }.end.addr().get();
817
818        let start = if S::UP { pos } else { end };
819        let end = if S::UP { end } else { pos };
820
821        #[cfg(debug_assertions)]
822        if !matches!(self.classify(), ChunkClass::NonDummy(_)) {
823            assert!(start > end);
824        }
825
826        BumpProps {
827            start,
828            end,
829            layout: *layout,
830            min_align: S::MIN_ALIGN,
831            align_is_const: L::ALIGN_IS_CONST,
832            size_is_const: L::SIZE_IS_CONST,
833            size_is_multiple_of_align: L::SIZE_IS_MULTIPLE_OF_ALIGN,
834        }
835    }
836
837    #[inline(always)]
838    pub(crate) fn pos(self) -> NonNull<u8> {
839        unsafe { self.header.as_ref().pos.get() }
840    }
841
842    #[inline(always)]
843    pub(crate) unsafe fn as_non_dummy_unchecked(self) -> NonDummyChunk<A, S> {
844        debug_assert!(matches!(self.classify(), ChunkClass::NonDummy(_)));
845        NonDummyChunk { raw: self }
846    }
847}
848
849// Methods only available for a non-dummy chunk.
850impl<A, S> NonDummyChunk<A, S>
851where
852    S: BumpAllocatorSettings,
853{
854    pub(crate) fn new<E>(
855        chunk_size: ChunkSize<A, S>,
856        prev: Option<NonDummyChunk<A, S>>,
857        allocator: A,
858    ) -> Result<NonDummyChunk<A, S>, E>
859    where
860        A: Allocator,
861        E: ErrorBehavior,
862    {
863        let layout = chunk_size.layout().ok_or_else(E::capacity_overflow)?;
864
865        let allocation = match allocator.allocate(layout) {
866            Ok(ok) => ok,
867            Err(AllocError) => return Err(E::allocation(layout)),
868        };
869
870        let ptr = non_null::as_non_null_ptr(allocation);
871        let size = allocation.len();
872
873        // Note that the allocation's size may be larger than
874        // the requested layout's size.
875        //
876        // We could be ignoring the allocation's size and just use
877        // our layout's size, but then we would be wasting
878        // the extra space the allocator might have given us.
879        //
880        // This returned size does not satisfy our invariants though
881        // so we need to align it first.
882        //
883        // Follow this method for details.
884        let size = ChunkSize::<A, S>::align_allocation_size(size);
885
886        debug_assert!(size >= layout.size());
887        debug_assert!(size % MIN_CHUNK_ALIGN == 0);
888
889        let prev = Cell::new(prev.map(|c| c.header));
890        let next = Cell::new(None);
891
892        let header = unsafe {
893            if S::UP {
894                let header = ptr.cast::<ChunkHeader<A>>();
895
896                header.write(ChunkHeader {
897                    pos: Cell::new(header.add(1).cast()),
898                    end: ptr.add(size),
899                    prev,
900                    next,
901                    allocator,
902                });
903
904                header
905            } else {
906                let header = ptr.add(size).cast::<ChunkHeader<A>>().sub(1);
907
908                header.write(ChunkHeader {
909                    pos: Cell::new(header.cast()),
910                    end: ptr,
911                    prev,
912                    next,
913                    allocator,
914                });
915
916                header
917            }
918        };
919
920        Ok(NonDummyChunk {
921            raw: RawChunk {
922                header,
923                marker: PhantomData,
924            },
925        })
926    }
927
928    /// # Panic
929    ///
930    /// [`self.next`](RawChunk::next) must return `None`
931    pub(crate) fn append_for<B: ErrorBehavior>(self, layout: Layout) -> Result<Self, B>
932    where
933        A: Allocator + Clone,
934    {
935        debug_assert!(self.next().is_none());
936
937        let required_size = ChunkSizeHint::for_capacity(layout).ok_or_else(B::capacity_overflow)?;
938        let grown_size = self.grow_size()?;
939        let size = required_size.max(grown_size).calc_size().ok_or_else(B::capacity_overflow)?;
940        let allocator = unsafe { self.header.as_ref().allocator.clone() };
941        let new_chunk = Self::new::<B>(size, Some(self), allocator)?;
942
943        unsafe {
944            self.header.as_ref().next.set(Some(new_chunk.header));
945        }
946
947        Ok(new_chunk)
948    }
949
950    #[inline(always)]
951    fn grow_size<B: ErrorBehavior>(self) -> Result<ChunkSizeHint<A, S>, B> {
952        let Some(size) = self.size().get().checked_mul(2) else {
953            return Err(B::capacity_overflow());
954        };
955
956        Ok(ChunkSizeHint::new(size))
957    }
958
959    /// The caller must ensure the returned reference is dead before calling [`deallocate`](Self::deallocate).
960    #[inline(always)]
961    pub(crate) fn allocator<'a>(self) -> &'a A {
962        unsafe { &self.header.as_ref().allocator }
963    }
964
965    #[inline(always)]
966    pub(crate) fn prev(self) -> Option<NonDummyChunk<A, S>> {
967        unsafe {
968            Some(NonDummyChunk {
969                raw: RawChunk {
970                    header: self.header.as_ref().prev.get()?,
971                    marker: PhantomData,
972                },
973            })
974        }
975    }
976
977    #[inline(always)]
978    pub(crate) fn next(self) -> Option<NonDummyChunk<A, S>> {
979        unsafe {
980            Some(NonDummyChunk {
981                raw: RawChunk {
982                    header: self.header.as_ref().next.get()?,
983                    marker: PhantomData,
984                },
985            })
986        }
987    }
988
989    #[inline(always)]
990    pub(crate) fn size(self) -> NonZeroUsize {
991        let start = self.chunk_start().addr().get();
992        let end = self.chunk_end().addr().get();
993        unsafe { NonZeroUsize::new_unchecked(end - start) }
994    }
995
996    #[inline(always)]
997    pub(crate) fn capacity(self) -> usize {
998        let start = self.content_start().addr().get();
999        let end = self.content_end().addr().get();
1000        end - start
1001    }
1002
1003    #[inline(always)]
1004    pub(crate) fn allocated(self) -> usize {
1005        let range = self.allocated_range();
1006        let start = range.start.addr().get();
1007        let end = range.end.addr().get();
1008        end - start
1009    }
1010
1011    #[inline(always)]
1012    pub(crate) fn remaining(self) -> usize {
1013        let range = self.remaining_range();
1014        let start = range.start.addr().get();
1015        let end = range.end.addr().get();
1016        end - start
1017    }
1018
1019    #[inline(always)]
1020    fn reset(self) {
1021        unsafe {
1022            if S::UP {
1023                self.set_pos(self.content_start());
1024            } else {
1025                self.set_pos(self.content_end());
1026            }
1027        }
1028    }
1029
1030    #[inline(always)]
1031    pub(crate) fn chunk_start(self) -> NonNull<u8> {
1032        unsafe { if S::UP { self.header.cast() } else { self.header.as_ref().end } }
1033    }
1034
1035    #[inline(always)]
1036    pub(crate) fn chunk_end(self) -> NonNull<u8> {
1037        unsafe {
1038            if S::UP {
1039                self.header.as_ref().end
1040            } else {
1041                self.after_header()
1042            }
1043        }
1044    }
1045
1046    #[inline(always)]
1047    pub(crate) fn content_start(self) -> NonNull<u8> {
1048        if S::UP { self.after_header() } else { self.chunk_start() }
1049    }
1050
1051    #[inline(always)]
1052    pub(crate) fn content_end(self) -> NonNull<u8> {
1053        if S::UP { self.chunk_end() } else { self.header.cast() }
1054    }
1055
1056    /// # Safety
1057    /// [`contains_addr_or_end`](RawChunk::contains_addr_or_end) must return true
1058    #[inline(always)]
1059    pub(crate) unsafe fn set_pos(self, ptr: NonNull<u8>) {
1060        unsafe { self.set_pos_addr(ptr.addr().get()) };
1061    }
1062
1063    /// # Safety
1064    /// [`contains_addr_or_end`](RawChunk::contains_addr_or_end) must return true
1065    #[inline(always)]
1066    pub(crate) unsafe fn set_pos_addr(self, addr: usize) {
1067        unsafe { self.header.as_ref().pos.set(self.content_ptr_from_addr(addr)) };
1068    }
1069
1070    /// Sets the bump position and aligns it to the required `MIN_ALIGN`.
1071    #[inline(always)]
1072    pub(crate) unsafe fn set_pos_addr_and_align(self, pos: usize) {
1073        unsafe {
1074            let addr = align_pos(S::UP, S::MIN_ALIGN, pos);
1075            self.set_pos_addr(addr);
1076        }
1077    }
1078
1079    /// A version of [`set_pos_addr_and_align`](Self::set_pos_addr_and_align) that only aligns the pointer
1080    /// if it the `pos_align` is smaller than the `MIN_ALIGN`.
1081    ///
1082    /// This should only be called when the `pos_align` is statically known so
1083    /// the branch gets optimized out.
1084    #[inline(always)]
1085    pub(crate) unsafe fn set_pos_addr_and_align_from(self, mut pos: usize, pos_align: usize) {
1086        debug_assert_eq!(pos % pos_align, 0);
1087
1088        if pos_align < S::MIN_ALIGN {
1089            pos = align_pos(S::UP, S::MIN_ALIGN, pos);
1090        }
1091
1092        unsafe { self.set_pos_addr(pos) };
1093    }
1094
1095    /// # Safety
1096    /// [`contains_addr_or_end`](RawChunk::contains_addr_or_end) must return true
1097    #[inline(always)]
1098    unsafe fn content_ptr_from_addr(self, addr: usize) -> NonNull<u8> {
1099        unsafe {
1100            debug_assert!(self.contains_addr_or_end(addr));
1101            let ptr = self.header.cast();
1102            let addr = NonZeroUsize::new_unchecked(addr);
1103            ptr.with_addr(addr)
1104        }
1105    }
1106
1107    #[inline(always)]
1108    pub(crate) unsafe fn content_ptr_from_addr_range(self, range: Range<usize>) -> Range<NonNull<u8>> {
1109        unsafe {
1110            debug_assert!(range.start <= range.end);
1111            let start = self.content_ptr_from_addr(range.start);
1112            let end = self.content_ptr_from_addr(range.end);
1113            start..end
1114        }
1115    }
1116
1117    #[inline(always)]
1118    fn contains_addr_or_end(self, addr: usize) -> bool {
1119        let start = self.content_start().addr().get();
1120        let end = self.content_end().addr().get();
1121        addr >= start && addr <= end
1122    }
1123
1124    #[inline(always)]
1125    fn allocated_range(self) -> Range<NonNull<u8>> {
1126        if S::UP {
1127            self.content_start()..self.pos()
1128        } else {
1129            self.pos()..self.content_end()
1130        }
1131    }
1132
1133    #[inline(always)]
1134    fn remaining_range(self) -> Range<NonNull<u8>> {
1135        if S::UP {
1136            let start = self.pos();
1137            let end = self.content_end();
1138            start..end
1139        } else {
1140            let start = self.content_start();
1141            let end = self.pos();
1142            start..end
1143        }
1144    }
1145
1146    #[inline(always)]
1147    fn after_header(self) -> NonNull<u8> {
1148        unsafe { self.header.add(1).cast() }
1149    }
1150
1151    /// This resolves the next chunk before calling `f`. So calling [`deallocate`](NonDummyChunk::deallocate) on the chunk parameter of `f` is fine.
1152    fn for_each_prev(self, mut f: impl FnMut(NonDummyChunk<A, S>)) {
1153        let mut iter = self.prev();
1154
1155        while let Some(chunk) = iter {
1156            iter = chunk.prev();
1157            f(chunk);
1158        }
1159    }
1160
1161    /// This resolves the next chunk before calling `f`. So calling [`deallocate`](NonDummyChunk::deallocate) on the chunk parameter of `f` is fine.
1162    fn for_each_next(self, mut f: impl FnMut(NonDummyChunk<A, S>)) {
1163        let mut iter = self.next();
1164
1165        while let Some(chunk) = iter {
1166            iter = chunk.next();
1167            f(chunk);
1168        }
1169    }
1170
1171    /// # Safety
1172    /// - self must not be used after calling this.
1173    unsafe fn deallocate(self)
1174    where
1175        A: Allocator,
1176    {
1177        let allocator = unsafe { ptr::read(&raw const self.header.as_ref().allocator) };
1178
1179        let ptr = self.chunk_start();
1180        let layout = self.layout();
1181
1182        unsafe {
1183            allocator.deallocate(ptr, layout);
1184        }
1185    }
1186
1187    #[inline(always)]
1188    pub(crate) fn layout(self) -> Layout {
1189        // SAFETY: this layout fits the one we allocated, which means it must be valid
1190        unsafe { Layout::from_size_align_unchecked(self.size().get(), align_of::<ChunkHeader<A>>()) }
1191    }
1192}
1193
1194pub(crate) enum ChunkClass<A, S: BumpAllocatorSettings> {
1195    Claimed,
1196    Unallocated,
1197    NonDummy(NonDummyChunk<A, S>),
1198}