Skip to main content

bump_scope/traits/
bump_allocator_core.rs

1use core::{alloc::Layout, ops::Range, ptr::NonNull};
2
3use crate::{
4    BaseAllocator, Bump, BumpScope, Checkpoint, WithoutDealloc, WithoutShrink,
5    alloc::{AllocError, Allocator},
6    layout::CustomLayout,
7    raw_bump::RawChunk,
8    settings::BumpAllocatorSettings,
9    stats::AnyStats,
10    traits::{assert_dyn_compatible, assert_implements},
11};
12
13pub trait Sealed {}
14
15impl<B: Sealed + ?Sized> Sealed for &B {}
16impl<B: Sealed + ?Sized> Sealed for &mut B {}
17impl<B: Sealed> Sealed for WithoutDealloc<B> {}
18impl<B: Sealed> Sealed for WithoutShrink<B> {}
19
20impl<A, S> Sealed for Bump<A, S>
21where
22    A: BaseAllocator<S::GuaranteedAllocated>,
23    S: BumpAllocatorSettings,
24{
25}
26
27impl<A, S> Sealed for BumpScope<'_, A, S>
28where
29    A: BaseAllocator<S::GuaranteedAllocated>,
30    S: BumpAllocatorSettings,
31{
32}
33
34/// A bump allocator.
35///
36/// This trait provides additional methods and guarantees on top of an [`Allocator`].
37///
38/// A `BumpAllocatorCore` has laxer safety conditions when using `Allocator` methods:
39/// - You can call `grow*`, `shrink` and `deallocate` with pointers that came from a different `BumpAllocatorCore`. In this case:
40///   - `grow*` will always allocate a new memory block.
41///   - `deallocate` will do nothing
42///   - `shrink` will either do nothing or allocate iff the alignment increases
43/// - Memory blocks can be split.
44/// - `shrink` never errors unless the new alignment is greater
45/// - `deallocate` may always be called when the pointer address is less than 16 and the size is 0
46///
47/// Those invariants are used here:
48/// - Handling of foreign pointers is necessary for implementing [`BumpVec::from_parts`], [`BumpBox::into_box`] and [`Bump(Scope)::dealloc`][Bump::dealloc].
49/// - Memory block splitting is necessary for [`split_off`] and [`split_at`].
50/// - The non-erroring behavior of `shrink` is necessary for [`BumpAllocatorTyped::shrink_slice`]
51/// - `deallocate` with a dangling pointer is used in the drop implementation of [`BumpString`]
52///
53/// # Safety
54///
55/// An implementor must support the conditions described above.
56///
57/// [`BumpVec::from_parts`]: crate::BumpVec::from_parts
58/// [`BumpBox::into_box`]: crate::BumpBox::into_box
59/// [`split_off`]: crate::BumpVec::split_off
60/// [`split_at`]: crate::BumpBox::split_at
61/// [`BumpVec`]: crate::BumpVec
62/// [`BumpAllocatorTyped::shrink_slice`]: crate::traits::BumpAllocatorTyped::shrink_slice
63/// [`BumpString`]: crate::BumpString
64pub unsafe trait BumpAllocatorCore: Allocator + Sealed {
65    /// Returns a type which provides statistics about the memory usage of the bump allocator.
66    #[must_use]
67    fn any_stats(&self) -> AnyStats<'_>;
68
69    /// Creates a checkpoint of the current bump position.
70    ///
71    /// The bump position can be reset to this checkpoint with [`reset_to`].
72    ///
73    /// [`reset_to`]: BumpAllocatorCore::reset_to
74    #[must_use]
75    fn checkpoint(&self) -> Checkpoint;
76
77    /// Resets the bump position to a previously created checkpoint.
78    ///
79    /// The memory that has been allocated since then will be reused by future allocations.
80    ///
81    /// # Safety
82    ///
83    /// - the checkpoint must have been created by this bump allocator
84    /// - the bump allocator must not have been [`reset`] since creation of this checkpoint
85    /// - there must be no references to allocations made since creation of this checkpoint
86    /// - the checkpoint must not have been created by an`!GUARANTEED_ALLOCATED` when self is `GUARANTEED_ALLOCATED`
87    /// - the bump allocator must be [unclaimed] at the time the checkpoint is created and when this function is called
88    ///
89    /// [`reset`]: crate::Bump::reset
90    /// [unclaimed]: crate::traits::BumpAllocatorScope::claim
91    ///
92    /// # Examples
93    ///
94    /// ```
95    /// # use bump_scope::Bump;
96    /// let bump: Bump = Bump::new();
97    /// let checkpoint = bump.checkpoint();
98    ///
99    /// {
100    ///     let hello = bump.alloc_str("hello");
101    ///     assert_eq!(bump.stats().allocated(), 5);
102    ///     # _ = hello;
103    /// }
104    ///
105    /// unsafe { bump.reset_to(checkpoint); }
106    /// assert_eq!(bump.stats().allocated(), 0);
107    /// ```
108    unsafe fn reset_to(&self, checkpoint: Checkpoint);
109
110    /// Returns true if the bump allocator is currently [claimed].
111    ///
112    /// [claimed]: crate::traits::BumpAllocatorScope::claim
113    #[must_use]
114    fn is_claimed(&self) -> bool;
115
116    /// Returns a pointer range of free space in the bump allocator with a size of at least `layout.size()`.
117    ///
118    /// The start of the range is aligned to `layout.align()`.
119    ///
120    /// The pointer range takes up as much of the free space of the chunk as possible while satisfying the other conditions.
121    ///
122    /// # Errors
123    /// Errors if the allocation fails.
124    fn prepare_allocation(&self, layout: Layout) -> Result<Range<NonNull<u8>>, AllocError>;
125
126    /// Allocate part of the free space returned from a [`prepare_allocation`] call.
127    ///
128    /// # Safety
129    /// - `range` must have been returned from a call to [`prepare_allocation`]
130    /// - no allocation, grow, shrink or deallocate must have taken place since then
131    /// - no resets must have taken place since then
132    /// - `layout` must be less than or equal to the `layout` used when calling
133    ///   [`prepare_allocation`], both in size and alignment
134    /// - the bump allocator must be [unclaimed] at the time [`prepare_allocation`] was called and when calling this function
135    ///
136    /// [`prepare_allocation`]: BumpAllocatorCore::prepare_allocation
137    /// [unclaimed]: crate::traits::BumpAllocatorScope::claim
138    unsafe fn allocate_prepared(&self, layout: Layout, range: Range<NonNull<u8>>) -> NonNull<u8>;
139
140    /// Returns a pointer range of free space in the bump allocator with a size of at least `layout.size()`.
141    ///
142    /// The end of the range is aligned to `layout.align()`.
143    ///
144    /// The pointer range takes up as much of the free space of the chunk as possible while satisfying the other conditions.
145    ///
146    /// # Errors
147    /// Errors if the allocation fails.
148    fn prepare_allocation_rev(&self, layout: Layout) -> Result<Range<NonNull<u8>>, AllocError>;
149
150    /// Allocate part of the free space returned from a [`prepare_allocation_rev`] call starting at the end.
151    ///
152    /// # Safety
153    /// - `range` must have been returned from a call to [`prepare_allocation_rev`]
154    /// - no allocation, grow, shrink or deallocate must have taken place since then
155    /// - no resets must have taken place since then
156    /// - `layout` must be less than or equal to the `layout` used when calling
157    ///   [`prepare_allocation_rev`], both in size and alignment
158    /// - the bump allocator must be [unclaimed] at the time [`prepare_allocation_rev`] was called and when calling this function
159    ///
160    /// [`prepare_allocation_rev`]: BumpAllocatorCore::prepare_allocation_rev
161    /// [unclaimed]: crate::traits::BumpAllocatorScope::claim
162    unsafe fn allocate_prepared_rev(&self, layout: Layout, range: Range<NonNull<u8>>) -> NonNull<u8>;
163}
164
165assert_dyn_compatible!(BumpAllocatorCore);
166
167assert_implements! {
168    [BumpAllocatorCore + ?Sized]
169
170    Bump
171    &Bump
172    &mut Bump
173
174    BumpScope
175    &BumpScope
176    &mut BumpScope
177
178    dyn BumpAllocatorCore
179    &dyn BumpAllocatorCore
180    &mut dyn BumpAllocatorCore
181
182    dyn BumpAllocatorCoreScope
183    &dyn BumpAllocatorCoreScope
184    &mut dyn BumpAllocatorCoreScope
185
186    dyn MutBumpAllocatorCore
187    &dyn MutBumpAllocatorCore
188    &mut dyn MutBumpAllocatorCore
189
190    dyn MutBumpAllocatorCoreScope
191    &dyn MutBumpAllocatorCoreScope
192    &mut dyn MutBumpAllocatorCoreScope
193}
194
195macro_rules! impl_for_ref {
196    ($($ty:ty)*) => {
197        $(
198            unsafe impl<B: BumpAllocatorCore + ?Sized> BumpAllocatorCore for $ty {
199                #[inline(always)]
200                fn any_stats(&self) -> AnyStats<'_> {
201                    B::any_stats(self)
202                }
203
204                #[inline(always)]
205                fn checkpoint(&self) -> Checkpoint {
206                    B::checkpoint(self)
207                }
208
209                #[inline(always)]
210                unsafe fn reset_to(&self, checkpoint: Checkpoint) {
211                    unsafe { B::reset_to(self, checkpoint) };
212                }
213
214                #[inline(always)]
215                fn is_claimed(&self) -> bool {
216                    B::is_claimed(self)
217                }
218
219                #[inline(always)]
220                fn prepare_allocation(&self, layout: Layout) -> Result<Range<NonNull<u8>>, AllocError> {
221                    B::prepare_allocation(self, layout)
222                }
223
224                #[inline(always)]
225                unsafe fn allocate_prepared(&self, layout: Layout, range: Range<NonNull<u8>>) -> NonNull<u8> {
226                    unsafe { B::allocate_prepared(self, layout, range) }
227                }
228
229                #[inline(always)]
230                fn prepare_allocation_rev(&self, layout: Layout) -> Result<Range<NonNull<u8>>, AllocError> {
231                    B::prepare_allocation_rev(self, layout)
232                }
233
234                #[inline(always)]
235                unsafe fn allocate_prepared_rev(&self, layout: Layout, range: Range<NonNull<u8>>) -> NonNull<u8> {
236                    unsafe { B::allocate_prepared_rev(self, layout, range) }
237                }
238            }
239        )*
240    };
241}
242
243impl_for_ref! {
244    &B
245    &mut B
246}
247
248unsafe impl<B: BumpAllocatorCore> BumpAllocatorCore for WithoutDealloc<B> {
249    #[inline(always)]
250    fn any_stats(&self) -> AnyStats<'_> {
251        B::any_stats(&self.0)
252    }
253
254    #[inline(always)]
255    fn checkpoint(&self) -> Checkpoint {
256        B::checkpoint(&self.0)
257    }
258
259    #[inline(always)]
260    unsafe fn reset_to(&self, checkpoint: Checkpoint) {
261        unsafe { B::reset_to(&self.0, checkpoint) };
262    }
263
264    #[inline(always)]
265    fn is_claimed(&self) -> bool {
266        B::is_claimed(&self.0)
267    }
268
269    #[inline(always)]
270    fn prepare_allocation(&self, layout: Layout) -> Result<Range<NonNull<u8>>, AllocError> {
271        B::prepare_allocation(&self.0, layout)
272    }
273
274    #[inline(always)]
275    unsafe fn allocate_prepared(&self, layout: Layout, range: Range<NonNull<u8>>) -> NonNull<u8> {
276        unsafe { B::allocate_prepared(&self.0, layout, range) }
277    }
278
279    #[inline(always)]
280    fn prepare_allocation_rev(&self, layout: Layout) -> Result<Range<NonNull<u8>>, AllocError> {
281        B::prepare_allocation_rev(&self.0, layout)
282    }
283
284    #[inline(always)]
285    unsafe fn allocate_prepared_rev(&self, layout: Layout, range: Range<NonNull<u8>>) -> NonNull<u8> {
286        unsafe { B::allocate_prepared_rev(&self.0, layout, range) }
287    }
288}
289
290unsafe impl<B: BumpAllocatorCore> BumpAllocatorCore for WithoutShrink<B> {
291    #[inline(always)]
292    fn any_stats(&self) -> AnyStats<'_> {
293        B::any_stats(&self.0)
294    }
295
296    #[inline(always)]
297    fn checkpoint(&self) -> Checkpoint {
298        B::checkpoint(&self.0)
299    }
300
301    #[inline(always)]
302    unsafe fn reset_to(&self, checkpoint: Checkpoint) {
303        unsafe { B::reset_to(&self.0, checkpoint) };
304    }
305
306    #[inline(always)]
307    fn is_claimed(&self) -> bool {
308        B::is_claimed(&self.0)
309    }
310
311    #[inline(always)]
312    fn prepare_allocation(&self, layout: Layout) -> Result<Range<NonNull<u8>>, AllocError> {
313        B::prepare_allocation(&self.0, layout)
314    }
315
316    #[inline(always)]
317    unsafe fn allocate_prepared(&self, layout: Layout, range: Range<NonNull<u8>>) -> NonNull<u8> {
318        unsafe { B::allocate_prepared(&self.0, layout, range) }
319    }
320
321    #[inline(always)]
322    fn prepare_allocation_rev(&self, layout: Layout) -> Result<Range<NonNull<u8>>, AllocError> {
323        B::prepare_allocation_rev(&self.0, layout)
324    }
325
326    #[inline(always)]
327    unsafe fn allocate_prepared_rev(&self, layout: Layout, range: Range<NonNull<u8>>) -> NonNull<u8> {
328        unsafe { B::allocate_prepared_rev(&self.0, layout, range) }
329    }
330}
331
332unsafe impl<A, S> BumpAllocatorCore for Bump<A, S>
333where
334    A: BaseAllocator<S::GuaranteedAllocated>,
335    S: BumpAllocatorSettings,
336{
337    #[inline(always)]
338    fn any_stats(&self) -> AnyStats<'_> {
339        self.as_scope().any_stats()
340    }
341
342    #[inline(always)]
343    fn checkpoint(&self) -> Checkpoint {
344        self.as_scope().checkpoint()
345    }
346
347    #[inline(always)]
348    unsafe fn reset_to(&self, checkpoint: Checkpoint) {
349        unsafe { self.as_scope().reset_to(checkpoint) };
350    }
351
352    #[inline(always)]
353    fn is_claimed(&self) -> bool {
354        self.as_scope().is_claimed()
355    }
356
357    #[inline(always)]
358    fn prepare_allocation(&self, layout: Layout) -> Result<Range<NonNull<u8>>, AllocError> {
359        self.as_scope().prepare_allocation(layout)
360    }
361
362    #[inline(always)]
363    unsafe fn allocate_prepared(&self, layout: Layout, range: Range<NonNull<u8>>) -> NonNull<u8> {
364        unsafe { self.as_scope().allocate_prepared(layout, range) }
365    }
366
367    #[inline(always)]
368    fn prepare_allocation_rev(&self, layout: Layout) -> Result<Range<NonNull<u8>>, AllocError> {
369        self.as_scope().prepare_allocation_rev(layout)
370    }
371
372    #[inline(always)]
373    unsafe fn allocate_prepared_rev(&self, layout: Layout, range: Range<NonNull<u8>>) -> NonNull<u8> {
374        unsafe { self.as_scope().allocate_prepared_rev(layout, range) }
375    }
376}
377
378unsafe impl<A, S> BumpAllocatorCore for BumpScope<'_, A, S>
379where
380    A: BaseAllocator<S::GuaranteedAllocated>,
381    S: BumpAllocatorSettings,
382{
383    #[inline(always)]
384    fn any_stats(&self) -> AnyStats<'_> {
385        self.stats().into()
386    }
387
388    #[inline(always)]
389    fn checkpoint(&self) -> Checkpoint {
390        self.raw.checkpoint()
391    }
392
393    #[inline]
394    unsafe fn reset_to(&self, checkpoint: Checkpoint) {
395        unsafe { self.raw.reset_to(checkpoint) }
396    }
397
398    #[inline(always)]
399    fn is_claimed(&self) -> bool {
400        self.raw.is_claimed()
401    }
402
403    #[inline(always)]
404    fn prepare_allocation(&self, layout: Layout) -> Result<Range<NonNull<u8>>, AllocError> {
405        #[cold]
406        #[inline(never)]
407        unsafe fn prepare_allocation_in_another_chunk<A, S>(
408            this: &BumpScope<'_, A, S>,
409            layout: Layout,
410        ) -> Result<Range<NonNull<u8>>, AllocError>
411        where
412            A: BaseAllocator<S::GuaranteedAllocated>,
413            S: BumpAllocatorSettings,
414        {
415            unsafe {
416                this.raw
417                    .in_another_chunk(CustomLayout(layout), RawChunk::prepare_allocation_range)
418            }
419        }
420
421        match self.raw.chunk.get().prepare_allocation_range(CustomLayout(layout)) {
422            Some(ptr) => Ok(ptr),
423            None => unsafe { prepare_allocation_in_another_chunk(self, layout) },
424        }
425    }
426
427    #[inline(always)]
428    unsafe fn allocate_prepared(&self, layout: Layout, range: Range<NonNull<u8>>) -> NonNull<u8> {
429        debug_assert_eq!(range.start.addr().get() % layout.align(), 0);
430        debug_assert_eq!(range.end.addr().get() % layout.align(), 0);
431        debug_assert_eq!(layout.size() % layout.align(), 0);
432
433        unsafe {
434            // a successful `prepare_allocation` guarantees a non-dummy-chunk
435            let chunk = self.raw.chunk.get().as_non_dummy_unchecked();
436
437            if S::UP {
438                let end = range.start.add(layout.size());
439                chunk.set_pos_addr_and_align(end.addr().get());
440                range.start
441            } else {
442                let src = range.start;
443                let dst_end = range.end;
444                let dst = dst_end.sub(layout.size());
445                src.copy_to(dst, layout.size());
446                chunk.set_pos_addr_and_align(dst.addr().get());
447                dst
448            }
449        }
450    }
451
452    #[inline(always)]
453    fn prepare_allocation_rev(&self, layout: Layout) -> Result<Range<NonNull<u8>>, AllocError> {
454        // for now the implementation for both methods is the same
455        self.prepare_allocation(layout)
456    }
457
458    #[inline(always)]
459    unsafe fn allocate_prepared_rev(&self, layout: Layout, range: Range<NonNull<u8>>) -> NonNull<u8> {
460        debug_assert_eq!(range.start.addr().get() % layout.align(), 0);
461        debug_assert_eq!(range.end.addr().get() % layout.align(), 0);
462        debug_assert_eq!(layout.size() % layout.align(), 0);
463
464        unsafe {
465            // a successful `prepare_allocation` guarantees a non-dummy-chunk
466            let chunk = self.raw.chunk.get().as_non_dummy_unchecked();
467
468            if S::UP {
469                let dst = range.start;
470                let dst_end = dst.add(layout.size());
471
472                let src_end = range.end;
473                let src = src_end.sub(layout.size());
474
475                src.copy_to(dst, layout.size());
476
477                chunk.set_pos_addr_and_align(dst_end.addr().get());
478
479                dst
480            } else {
481                let dst_end = range.end;
482                let dst = dst_end.sub(layout.size());
483                chunk.set_pos_addr_and_align(dst.addr().get());
484                dst
485            }
486        }
487    }
488}