Skip to main content

bump_scope/traits/
bump_allocator_core.rs

1use core::{alloc::Layout, ops::Range, ptr::NonNull};
2
3use crate::{
4    BaseAllocator, Bump, BumpScope, Checkpoint, WithoutDealloc, WithoutShrink,
5    alloc::{AllocError, Allocator},
6    layout::CustomLayout,
7    raw_bump::RawChunk,
8    settings::BumpAllocatorSettings,
9    stats::AnyStats,
10    traits::{assert_dyn_compatible, assert_implements},
11};
12
13pub trait Sealed {}
14
15impl<B: Sealed + ?Sized> Sealed for &B {}
16impl<B: Sealed + ?Sized> Sealed for &mut B {}
17impl<B: Sealed> Sealed for WithoutDealloc<B> {}
18impl<B: Sealed> Sealed for WithoutShrink<B> {}
19
20impl<A, S> Sealed for Bump<A, S>
21where
22    A: BaseAllocator<S::GuaranteedAllocated>,
23    S: BumpAllocatorSettings,
24{
25}
26
27impl<A, S> Sealed for BumpScope<'_, A, S>
28where
29    A: BaseAllocator<S::GuaranteedAllocated>,
30    S: BumpAllocatorSettings,
31{
32}
33
34/// A bump allocator.
35///
36/// This trait provides additional methods and guarantees on top of an [`Allocator`].
37///
38/// A `BumpAllocatorCore` has laxer safety conditions when using `Allocator` methods:
39/// - You can call `grow*`, `shrink` and `deallocate` with pointers that came from a different `BumpAllocatorCore`. In this case:
40///   - `grow*` will always allocate a new memory block.
41///   - `deallocate` will do nothing
42///   - `shrink` will either do nothing or allocate iff the alignment increases
43/// - Memory blocks can be split.
44/// - `shrink` never errors unless the new alignment is greater
45/// - `deallocate` may always be called when the pointer address is less than 16 and the size is 0
46///
47/// Those invariants are used here:
48/// - Handling of foreign pointers is necessary for implementing [`BumpVec::from_parts`], [`BumpBox::into_box`] and [`Bump(Scope)::dealloc`][Bump::dealloc].
49/// - Memory block splitting is necessary for [`split_off`] and [`split_at`].
50/// - The non-erroring behavior of `shrink` is necessary for [`BumpAllocatorTyped::shrink_slice`]
51/// - `deallocate` with a dangling pointer is used in the drop implementation of [`BumpString`]
52///
53/// # Safety
54///
55/// An implementor must support the conditions described above.
56///
57/// [`BumpVec::from_parts`]: crate::BumpVec::from_parts
58/// [`BumpBox::into_box`]: crate::BumpBox::into_box
59/// [`split_off`]: crate::BumpVec::split_off
60/// [`split_at`]: crate::BumpBox::split_at
61/// [`BumpVec`]: crate::BumpVec
62/// [`BumpAllocatorTyped::shrink_slice`]: crate::traits::BumpAllocatorTyped::shrink_slice
63/// [`BumpString`]: crate::BumpString
64pub unsafe trait BumpAllocatorCore: Allocator + Sealed {
65    /// Returns a type which provides statistics about the memory usage of the bump allocator.
66    #[must_use]
67    fn any_stats(&self) -> AnyStats<'_>;
68
69    /// Creates a checkpoint of the current bump position.
70    ///
71    /// The bump position can be reset to this checkpoint with [`reset_to`].
72    ///
73    /// [`reset_to`]: BumpAllocatorCore::reset_to
74    #[must_use]
75    fn checkpoint(&self) -> Checkpoint;
76
77    /// Resets the bump position to a previously created checkpoint.
78    /// The memory that has been allocated since then will be reused by future allocations.
79    ///
80    /// # Safety
81    ///
82    /// - the checkpoint must have been created by this bump allocator
83    /// - the bump allocator must not have been [`reset`] since creation of this checkpoint
84    /// - there must be no references to allocations made since creation of this checkpoint
85    /// - the checkpoint must not have been created by an`!GUARANTEED_ALLOCATED` when self is `GUARANTEED_ALLOCATED`
86    /// - the bump allocator must be [unclaimed] at the time the checkpoint is created and when this function is called
87    ///
88    /// [`reset`]: crate::Bump::reset
89    /// [unclaimed]: crate::traits::BumpAllocatorScope::claim
90    ///
91    /// # Examples
92    ///
93    /// ```
94    /// # use bump_scope::Bump;
95    /// let bump: Bump = Bump::new();
96    /// let checkpoint = bump.checkpoint();
97    ///
98    /// {
99    ///     let hello = bump.alloc_str("hello");
100    ///     assert_eq!(bump.stats().allocated(), 5);
101    ///     # _ = hello;
102    /// }
103    ///
104    /// unsafe { bump.reset_to(checkpoint); }
105    /// assert_eq!(bump.stats().allocated(), 0);
106    /// ```
107    unsafe fn reset_to(&self, checkpoint: Checkpoint);
108
109    /// Returns true if the bump allocator is currently [claimed].
110    ///
111    /// [claimed]: crate::traits::BumpAllocatorScope::claim
112    #[must_use]
113    fn is_claimed(&self) -> bool;
114
115    /// Returns a pointer range of free space in the bump allocator with a size of at least `layout.size()`.
116    ///
117    /// The start of the range is aligned to `layout.align()`.
118    ///
119    /// The pointer range takes up as much of the free space of the chunk as possible while satisfying the other conditions.
120    ///
121    /// # Errors
122    /// Errors if the allocation fails.
123    fn prepare_allocation(&self, layout: Layout) -> Result<Range<NonNull<u8>>, AllocError>;
124
125    /// Allocate part of the free space returned from a [`prepare_allocation`] call.
126    ///
127    /// # Safety
128    /// - `range` must have been returned from a call to [`prepare_allocation`]
129    /// - no allocation, grow, shrink or deallocate must have taken place since then
130    /// - no resets must have taken place since then
131    /// - `layout` must be less than or equal to the `layout` used when calling
132    ///   [`prepare_allocation`], both in size and alignment
133    /// - the bump allocator must be [unclaimed] at the time [`prepare_allocation`] was called and when calling this function
134    ///
135    /// [`prepare_allocation`]: BumpAllocatorCore::prepare_allocation
136    /// [unclaimed]: crate::traits::BumpAllocatorScope::claim
137    unsafe fn allocate_prepared(&self, layout: Layout, range: Range<NonNull<u8>>) -> NonNull<u8>;
138
139    /// Returns a pointer range of free space in the bump allocator with a size of at least `layout.size()`.
140    ///
141    /// The end of the range is aligned to `layout.align()`.
142    ///
143    /// The pointer range takes up as much of the free space of the chunk as possible while satisfying the other conditions.
144    ///
145    /// # Errors
146    /// Errors if the allocation fails.
147    fn prepare_allocation_rev(&self, layout: Layout) -> Result<Range<NonNull<u8>>, AllocError>;
148
149    /// Allocate part of the free space returned from a [`prepare_allocation_rev`] call starting at the end.
150    ///
151    /// # Safety
152    /// - `range` must have been returned from a call to [`prepare_allocation_rev`]
153    /// - no allocation, grow, shrink or deallocate must have taken place since then
154    /// - no resets must have taken place since then
155    /// - `layout` must be less than or equal to the `layout` used when calling
156    ///   [`prepare_allocation_rev`], both in size and alignment
157    /// - the bump allocator must be [unclaimed] at the time [`prepare_allocation_rev`] was called and when calling this function
158    ///
159    /// [`prepare_allocation_rev`]: BumpAllocatorCore::prepare_allocation_rev
160    /// [unclaimed]: crate::traits::BumpAllocatorScope::claim
161    unsafe fn allocate_prepared_rev(&self, layout: Layout, range: Range<NonNull<u8>>) -> NonNull<u8>;
162}
163
164assert_dyn_compatible!(BumpAllocatorCore);
165
166assert_implements! {
167    [BumpAllocatorCore + ?Sized]
168
169    Bump
170    &Bump
171    &mut Bump
172
173    BumpScope
174    &BumpScope
175    &mut BumpScope
176
177    dyn BumpAllocatorCore
178    &dyn BumpAllocatorCore
179    &mut dyn BumpAllocatorCore
180
181    dyn BumpAllocatorCoreScope
182    &dyn BumpAllocatorCoreScope
183    &mut dyn BumpAllocatorCoreScope
184
185    dyn MutBumpAllocatorCore
186    &dyn MutBumpAllocatorCore
187    &mut dyn MutBumpAllocatorCore
188
189    dyn MutBumpAllocatorCoreScope
190    &dyn MutBumpAllocatorCoreScope
191    &mut dyn MutBumpAllocatorCoreScope
192}
193
194macro_rules! impl_for_ref {
195    ($($ty:ty)*) => {
196        $(
197            unsafe impl<B: BumpAllocatorCore + ?Sized> BumpAllocatorCore for $ty {
198                #[inline(always)]
199                fn any_stats(&self) -> AnyStats<'_> {
200                    B::any_stats(self)
201                }
202
203                #[inline(always)]
204                fn checkpoint(&self) -> Checkpoint {
205                    B::checkpoint(self)
206                }
207
208                #[inline(always)]
209                unsafe fn reset_to(&self, checkpoint: Checkpoint) {
210                    unsafe { B::reset_to(self, checkpoint) };
211                }
212
213                #[inline(always)]
214                fn is_claimed(&self) -> bool {
215                    B::is_claimed(self)
216                }
217
218                #[inline(always)]
219                fn prepare_allocation(&self, layout: Layout) -> Result<Range<NonNull<u8>>, AllocError> {
220                    B::prepare_allocation(self, layout)
221                }
222
223                #[inline(always)]
224                unsafe fn allocate_prepared(&self, layout: Layout, range: Range<NonNull<u8>>) -> NonNull<u8> {
225                    unsafe { B::allocate_prepared(self, layout, range) }
226                }
227
228                #[inline(always)]
229                fn prepare_allocation_rev(&self, layout: Layout) -> Result<Range<NonNull<u8>>, AllocError> {
230                    B::prepare_allocation_rev(self, layout)
231                }
232
233                #[inline(always)]
234                unsafe fn allocate_prepared_rev(&self, layout: Layout, range: Range<NonNull<u8>>) -> NonNull<u8> {
235                    unsafe { B::allocate_prepared_rev(self, layout, range) }
236                }
237            }
238        )*
239    };
240}
241
242impl_for_ref! {
243    &B
244    &mut B
245}
246
247unsafe impl<B: BumpAllocatorCore> BumpAllocatorCore for WithoutDealloc<B> {
248    #[inline(always)]
249    fn any_stats(&self) -> AnyStats<'_> {
250        B::any_stats(&self.0)
251    }
252
253    #[inline(always)]
254    fn checkpoint(&self) -> Checkpoint {
255        B::checkpoint(&self.0)
256    }
257
258    #[inline(always)]
259    unsafe fn reset_to(&self, checkpoint: Checkpoint) {
260        unsafe { B::reset_to(&self.0, checkpoint) };
261    }
262
263    #[inline(always)]
264    fn is_claimed(&self) -> bool {
265        B::is_claimed(&self.0)
266    }
267
268    #[inline(always)]
269    fn prepare_allocation(&self, layout: Layout) -> Result<Range<NonNull<u8>>, AllocError> {
270        B::prepare_allocation(&self.0, layout)
271    }
272
273    #[inline(always)]
274    unsafe fn allocate_prepared(&self, layout: Layout, range: Range<NonNull<u8>>) -> NonNull<u8> {
275        unsafe { B::allocate_prepared(&self.0, layout, range) }
276    }
277
278    #[inline(always)]
279    fn prepare_allocation_rev(&self, layout: Layout) -> Result<Range<NonNull<u8>>, AllocError> {
280        B::prepare_allocation_rev(&self.0, layout)
281    }
282
283    #[inline(always)]
284    unsafe fn allocate_prepared_rev(&self, layout: Layout, range: Range<NonNull<u8>>) -> NonNull<u8> {
285        unsafe { B::allocate_prepared_rev(&self.0, layout, range) }
286    }
287}
288
289unsafe impl<B: BumpAllocatorCore> BumpAllocatorCore for WithoutShrink<B> {
290    #[inline(always)]
291    fn any_stats(&self) -> AnyStats<'_> {
292        B::any_stats(&self.0)
293    }
294
295    #[inline(always)]
296    fn checkpoint(&self) -> Checkpoint {
297        B::checkpoint(&self.0)
298    }
299
300    #[inline(always)]
301    unsafe fn reset_to(&self, checkpoint: Checkpoint) {
302        unsafe { B::reset_to(&self.0, checkpoint) };
303    }
304
305    #[inline(always)]
306    fn is_claimed(&self) -> bool {
307        B::is_claimed(&self.0)
308    }
309
310    #[inline(always)]
311    fn prepare_allocation(&self, layout: Layout) -> Result<Range<NonNull<u8>>, AllocError> {
312        B::prepare_allocation(&self.0, layout)
313    }
314
315    #[inline(always)]
316    unsafe fn allocate_prepared(&self, layout: Layout, range: Range<NonNull<u8>>) -> NonNull<u8> {
317        unsafe { B::allocate_prepared(&self.0, layout, range) }
318    }
319
320    #[inline(always)]
321    fn prepare_allocation_rev(&self, layout: Layout) -> Result<Range<NonNull<u8>>, AllocError> {
322        B::prepare_allocation_rev(&self.0, layout)
323    }
324
325    #[inline(always)]
326    unsafe fn allocate_prepared_rev(&self, layout: Layout, range: Range<NonNull<u8>>) -> NonNull<u8> {
327        unsafe { B::allocate_prepared_rev(&self.0, layout, range) }
328    }
329}
330
331unsafe impl<A, S> BumpAllocatorCore for Bump<A, S>
332where
333    A: BaseAllocator<S::GuaranteedAllocated>,
334    S: BumpAllocatorSettings,
335{
336    #[inline(always)]
337    fn any_stats(&self) -> AnyStats<'_> {
338        self.as_scope().any_stats()
339    }
340
341    #[inline(always)]
342    fn checkpoint(&self) -> Checkpoint {
343        self.as_scope().checkpoint()
344    }
345
346    #[inline(always)]
347    unsafe fn reset_to(&self, checkpoint: Checkpoint) {
348        unsafe { self.as_scope().reset_to(checkpoint) };
349    }
350
351    #[inline(always)]
352    fn is_claimed(&self) -> bool {
353        self.as_scope().is_claimed()
354    }
355
356    #[inline(always)]
357    fn prepare_allocation(&self, layout: Layout) -> Result<Range<NonNull<u8>>, AllocError> {
358        self.as_scope().prepare_allocation(layout)
359    }
360
361    #[inline(always)]
362    unsafe fn allocate_prepared(&self, layout: Layout, range: Range<NonNull<u8>>) -> NonNull<u8> {
363        unsafe { self.as_scope().allocate_prepared(layout, range) }
364    }
365
366    #[inline(always)]
367    fn prepare_allocation_rev(&self, layout: Layout) -> Result<Range<NonNull<u8>>, AllocError> {
368        self.as_scope().prepare_allocation_rev(layout)
369    }
370
371    #[inline(always)]
372    unsafe fn allocate_prepared_rev(&self, layout: Layout, range: Range<NonNull<u8>>) -> NonNull<u8> {
373        unsafe { self.as_scope().allocate_prepared_rev(layout, range) }
374    }
375}
376
377unsafe impl<A, S> BumpAllocatorCore for BumpScope<'_, A, S>
378where
379    A: BaseAllocator<S::GuaranteedAllocated>,
380    S: BumpAllocatorSettings,
381{
382    #[inline(always)]
383    fn any_stats(&self) -> AnyStats<'_> {
384        self.stats().into()
385    }
386
387    #[inline(always)]
388    fn checkpoint(&self) -> Checkpoint {
389        self.raw.checkpoint()
390    }
391
392    #[inline]
393    unsafe fn reset_to(&self, checkpoint: Checkpoint) {
394        unsafe { self.raw.reset_to(checkpoint) }
395    }
396
397    #[inline(always)]
398    fn is_claimed(&self) -> bool {
399        self.raw.is_claimed()
400    }
401
402    #[inline(always)]
403    fn prepare_allocation(&self, layout: Layout) -> Result<Range<NonNull<u8>>, AllocError> {
404        #[cold]
405        #[inline(never)]
406        unsafe fn prepare_allocation_in_another_chunk<A, S>(
407            this: &BumpScope<'_, A, S>,
408            layout: Layout,
409        ) -> Result<Range<NonNull<u8>>, AllocError>
410        where
411            A: BaseAllocator<S::GuaranteedAllocated>,
412            S: BumpAllocatorSettings,
413        {
414            unsafe {
415                this.raw
416                    .in_another_chunk(CustomLayout(layout), RawChunk::prepare_allocation_range)
417            }
418        }
419
420        match self.raw.chunk.get().prepare_allocation_range(CustomLayout(layout)) {
421            Some(ptr) => Ok(ptr),
422            None => unsafe { prepare_allocation_in_another_chunk(self, layout) },
423        }
424    }
425
426    #[inline(always)]
427    unsafe fn allocate_prepared(&self, layout: Layout, range: Range<NonNull<u8>>) -> NonNull<u8> {
428        debug_assert_eq!(range.start.addr().get() % layout.align(), 0);
429        debug_assert_eq!(range.end.addr().get() % layout.align(), 0);
430        debug_assert_eq!(layout.size() % layout.align(), 0);
431
432        unsafe {
433            // a successful `prepare_allocation` guarantees a non-dummy-chunk
434            let chunk = self.raw.chunk.get().as_non_dummy_unchecked();
435
436            if S::UP {
437                let end = range.start.add(layout.size());
438                chunk.set_pos_addr_and_align(end.addr().get());
439                range.start
440            } else {
441                let src = range.start;
442                let dst_end = range.end;
443                let dst = dst_end.sub(layout.size());
444                src.copy_to(dst, layout.size());
445                chunk.set_pos_addr_and_align(dst.addr().get());
446                dst
447            }
448        }
449    }
450
451    #[inline(always)]
452    fn prepare_allocation_rev(&self, layout: Layout) -> Result<Range<NonNull<u8>>, AllocError> {
453        // for now the implementation for both methods is the same
454        self.prepare_allocation(layout)
455    }
456
457    #[inline(always)]
458    unsafe fn allocate_prepared_rev(&self, layout: Layout, range: Range<NonNull<u8>>) -> NonNull<u8> {
459        debug_assert_eq!(range.start.addr().get() % layout.align(), 0);
460        debug_assert_eq!(range.end.addr().get() % layout.align(), 0);
461        debug_assert_eq!(layout.size() % layout.align(), 0);
462
463        unsafe {
464            // a successful `prepare_allocation` guarantees a non-dummy-chunk
465            let chunk = self.raw.chunk.get().as_non_dummy_unchecked();
466
467            if S::UP {
468                let dst = range.start;
469                let dst_end = dst.add(layout.size());
470
471                let src_end = range.end;
472                let src = src_end.sub(layout.size());
473
474                src.copy_to(dst, layout.size());
475
476                chunk.set_pos_addr_and_align(dst_end.addr().get());
477
478                dst
479            } else {
480                let dst_end = range.end;
481                let dst = dst_end.sub(layout.size());
482                chunk.set_pos_addr_and_align(dst.addr().get());
483                dst
484            }
485        }
486    }
487}