Skip to main content

bump_scope/
allocator_impl.rs

1// FIXME: Figure out a way to make coverage report not suck because of `if UP { ... } else { ... }`.
2
3use core::{alloc::Layout, num::NonZeroUsize, ptr::NonNull};
4
5use crate::{
6    BaseAllocator, Bump, BumpScope,
7    alloc::{AllocError, Allocator},
8    bump_down,
9    layout::CustomLayout,
10    polyfill::non_null,
11    raw_bump::RawBump,
12    settings::BumpAllocatorSettings,
13    up_align_usize_unchecked,
14};
15
16unsafe impl<A, S> Allocator for Bump<A, S>
17where
18    A: BaseAllocator<S::GuaranteedAllocated>,
19    S: BumpAllocatorSettings,
20{
21    #[inline(always)]
22    fn allocate(&self, layout: Layout) -> Result<NonNull<[u8]>, AllocError> {
23        allocate(&self.raw, layout)
24    }
25
26    #[inline(always)]
27    unsafe fn deallocate(&self, ptr: NonNull<u8>, layout: Layout) {
28        unsafe { deallocate(&self.raw, ptr, layout) };
29    }
30
31    #[inline(always)]
32    unsafe fn grow(&self, ptr: NonNull<u8>, old_layout: Layout, new_layout: Layout) -> Result<NonNull<[u8]>, AllocError> {
33        unsafe { grow(&self.raw, ptr, old_layout, new_layout) }
34    }
35
36    #[inline(always)]
37    unsafe fn grow_zeroed(
38        &self,
39        ptr: NonNull<u8>,
40        old_layout: Layout,
41        new_layout: Layout,
42    ) -> Result<NonNull<[u8]>, AllocError> {
43        unsafe { grow_zeroed(&self.raw, ptr, old_layout, new_layout) }
44    }
45
46    #[inline(always)]
47    unsafe fn shrink(&self, ptr: NonNull<u8>, old_layout: Layout, new_layout: Layout) -> Result<NonNull<[u8]>, AllocError> {
48        unsafe { shrink(&self.raw, ptr, old_layout, new_layout) }
49    }
50}
51
52unsafe impl<A, S> Allocator for BumpScope<'_, A, S>
53where
54    A: BaseAllocator<S::GuaranteedAllocated>,
55    S: BumpAllocatorSettings,
56{
57    #[inline(always)]
58    fn allocate(&self, layout: Layout) -> Result<NonNull<[u8]>, AllocError> {
59        allocate(&self.raw, layout)
60    }
61
62    #[inline(always)]
63    unsafe fn deallocate(&self, ptr: NonNull<u8>, layout: Layout) {
64        unsafe { deallocate(&self.raw, ptr, layout) };
65    }
66
67    #[inline(always)]
68    unsafe fn grow(&self, ptr: NonNull<u8>, old_layout: Layout, new_layout: Layout) -> Result<NonNull<[u8]>, AllocError> {
69        unsafe { grow(&self.raw, ptr, old_layout, new_layout) }
70    }
71
72    #[inline(always)]
73    unsafe fn grow_zeroed(
74        &self,
75        ptr: NonNull<u8>,
76        old_layout: Layout,
77        new_layout: Layout,
78    ) -> Result<NonNull<[u8]>, AllocError> {
79        unsafe { grow_zeroed(&self.raw, ptr, old_layout, new_layout) }
80    }
81
82    #[inline(always)]
83    unsafe fn shrink(&self, ptr: NonNull<u8>, old_layout: Layout, new_layout: Layout) -> Result<NonNull<[u8]>, AllocError> {
84        unsafe { shrink(&self.raw, ptr, old_layout, new_layout) }
85    }
86}
87
88#[inline(always)]
89fn allocate<A, S>(bump: &RawBump<A, S>, layout: Layout) -> Result<NonNull<[u8]>, AllocError>
90where
91    A: BaseAllocator<S::GuaranteedAllocated>,
92    S: BumpAllocatorSettings,
93{
94    Ok(NonNull::slice_from_raw_parts(
95        bump.alloc::<AllocError>(layout)?,
96        layout.size(),
97    ))
98}
99
100#[inline(always)]
101unsafe fn deallocate<A, S>(bump: &RawBump<A, S>, ptr: NonNull<u8>, layout: Layout)
102where
103    A: BaseAllocator<S::GuaranteedAllocated>,
104    S: BumpAllocatorSettings,
105{
106    if !S::DEALLOCATES {
107        return;
108    }
109
110    unsafe {
111        // free allocated space if this is the last allocation
112        if is_last(bump, ptr, layout) {
113            deallocate_assume_last(bump, ptr, layout);
114        }
115    }
116}
117
118#[inline(always)]
119unsafe fn deallocate_assume_last<A, S>(bump: &RawBump<A, S>, ptr: NonNull<u8>, layout: Layout)
120where
121    A: BaseAllocator<S::GuaranteedAllocated>,
122    S: BumpAllocatorSettings,
123{
124    if !S::DEALLOCATES {
125        return;
126    }
127
128    unsafe {
129        let chunk = bump.chunk.get().as_non_dummy_unchecked();
130        if S::UP {
131            chunk.set_pos_addr_and_align(ptr.addr().get());
132        } else {
133            let mut addr = ptr.addr().get();
134            addr += layout.size();
135            chunk.set_pos_addr_and_align(addr);
136        }
137    }
138}
139
140/// Checks if the memory block is the last one in the bump allocator.
141///
142/// The given memory block must have been allocated by some bump allocator,
143/// not necessarily the given one (but if it isn't then it will always return false).
144///
145/// This condition must be true to be able to do grow, shrink or deallocate in place.
146///
147/// If this function returns true then the bump is guaranteed to have a non-dummy
148/// chunk since a memory block is only returned from a bump allocator with a non-dummy
149/// chunk.
150#[inline(always)]
151unsafe fn is_last<A, S>(bump: &RawBump<A, S>, ptr: NonNull<u8>, layout: Layout) -> bool
152where
153    A: BaseAllocator<S::GuaranteedAllocated>,
154    S: BumpAllocatorSettings,
155{
156    if S::UP {
157        (unsafe { ptr.add(layout.size()) }) == bump.chunk.get().pos()
158    } else {
159        ptr == bump.chunk.get().pos()
160    }
161}
162
163#[inline(always)]
164unsafe fn grow<A, S>(
165    bump: &RawBump<A, S>,
166    old_ptr: NonNull<u8>,
167    old_layout: Layout,
168    new_layout: Layout,
169) -> Result<NonNull<[u8]>, AllocError>
170where
171    A: BaseAllocator<S::GuaranteedAllocated>,
172    S: BumpAllocatorSettings,
173{
174    debug_assert!(
175        new_layout.size() >= old_layout.size(),
176        "`new_layout.size()` must be greater than or equal to `old_layout.size()`"
177    );
178
179    unsafe {
180        if S::UP {
181            if is_last(bump, old_ptr, old_layout) & align_fits(old_ptr, old_layout, new_layout) {
182                // We may be able to grow in place! Just need to check if there is enough space.
183
184                // `is_last` returned true, which guarantees a non-dummy
185                let chunk = bump.chunk.get().as_non_dummy_unchecked();
186                let chunk_end = chunk.content_end();
187                let remaining = chunk_end.addr().get() - old_ptr.addr().get();
188
189                if new_layout.size() <= remaining {
190                    // There is enough space! We will grow in place. Just need to update the bump pointer.
191
192                    let old_addr = old_ptr.addr();
193
194                    // Up-aligning a pointer inside a chunks content by `MIN_ALIGN` never overflows.
195                    let new_pos = up_align_usize_unchecked(old_addr.get() + new_layout.size(), S::MIN_ALIGN);
196
197                    chunk.set_pos_addr(new_pos);
198
199                    Ok(NonNull::slice_from_raw_parts(old_ptr, new_layout.size()))
200                } else {
201                    // The current chunk doesn't have enough space to allocate this layout. We need to allocate in another chunk.
202                    let new_ptr = bump.alloc_in_another_chunk::<AllocError>(new_layout)?;
203                    old_ptr.copy_to_nonoverlapping(new_ptr, old_layout.size());
204                    Ok(NonNull::slice_from_raw_parts(new_ptr, new_layout.size()))
205                }
206            } else {
207                // We can't grow in place. We have to make a new allocation.
208                let new_ptr = bump.alloc::<AllocError>(new_layout)?;
209                old_ptr.copy_to_nonoverlapping(new_ptr, old_layout.size());
210                Ok(NonNull::slice_from_raw_parts(new_ptr, new_layout.size()))
211            }
212        } else {
213            if is_last(bump, old_ptr, old_layout) {
214                // `is_last` returned true, which guarantees a non-dummy
215                let chunk = bump.chunk.get().as_non_dummy_unchecked();
216
217                // We may be able to reuse the currently allocated space. Just need to check if the current chunk has enough space for that.
218                let additional_size = new_layout.size() - old_layout.size();
219
220                let old_addr = old_ptr.addr();
221                let new_addr = bump_down(old_addr, additional_size, new_layout.align().max(S::MIN_ALIGN));
222
223                let very_start = chunk.content_start().addr();
224
225                if new_addr >= very_start.get() {
226                    // There is enough space in the current chunk! We will reuse the allocated space.
227
228                    let new_addr = NonZeroUsize::new_unchecked(new_addr);
229                    let new_addr_end = new_addr.get() + new_layout.size();
230
231                    let new_ptr = old_ptr.with_addr(new_addr);
232
233                    // Check if the regions don't overlap so we may use the faster `copy_nonoverlapping`.
234                    if new_addr_end < old_addr.get() {
235                        old_ptr.copy_to_nonoverlapping(new_ptr, old_layout.size());
236                    } else {
237                        old_ptr.copy_to(new_ptr, old_layout.size());
238                    }
239
240                    // `is_last_and_allocated` returned true
241                    chunk.set_pos_addr(new_addr.get());
242
243                    Ok(NonNull::slice_from_raw_parts(new_ptr, new_layout.size()))
244                } else {
245                    // The current chunk doesn't have enough space to allocate this layout. We need to allocate in another chunk.
246                    let new_ptr = bump.alloc_in_another_chunk::<AllocError>(new_layout)?;
247                    old_ptr.copy_to_nonoverlapping(new_ptr, old_layout.size());
248                    Ok(NonNull::slice_from_raw_parts(new_ptr, new_layout.size()))
249                }
250            } else {
251                // We can't reuse the allocated space. We have to make a new allocation.
252                let new_ptr = bump.alloc::<AllocError>(new_layout)?;
253                old_ptr.copy_to_nonoverlapping(new_ptr, old_layout.size());
254                Ok(NonNull::slice_from_raw_parts(new_ptr, new_layout.size()))
255            }
256        }
257    }
258}
259
260#[inline(always)]
261unsafe fn grow_zeroed<A, S>(
262    bump: &RawBump<A, S>,
263    old_ptr: NonNull<u8>,
264    old_layout: Layout,
265    new_layout: Layout,
266) -> Result<NonNull<[u8]>, AllocError>
267where
268    A: BaseAllocator<S::GuaranteedAllocated>,
269    S: BumpAllocatorSettings,
270{
271    unsafe {
272        let new_ptr = grow(bump, old_ptr, old_layout, new_layout)?;
273
274        let delta = new_layout.size() - old_layout.size();
275        new_ptr.cast::<u8>().add(old_layout.size()).write_bytes(0, delta);
276
277        Ok(new_ptr)
278    }
279}
280
281/// This shrink implementation always tries to reuse old memory if it can.
282///
283/// That's different to bumpalo's shrink implementation, which only shrinks if it can do so with `copy_nonoverlapping`
284/// and doesn't attempt to recover memory if the alignment doesn't fit.
285#[inline(always)]
286unsafe fn shrink<A, S>(
287    bump: &RawBump<A, S>,
288    old_ptr: NonNull<u8>,
289    old_layout: Layout,
290    new_layout: Layout,
291) -> Result<NonNull<[u8]>, AllocError>
292where
293    A: BaseAllocator<S::GuaranteedAllocated>,
294    S: BumpAllocatorSettings,
295{
296    /// Called when `new_layout` doesn't fit alignment.
297    #[cold]
298    #[inline(never)]
299    unsafe fn shrink_unfit<A, S>(
300        bump: &RawBump<A, S>,
301        old_ptr: NonNull<u8>,
302        old_layout: Layout,
303        new_layout: Layout,
304    ) -> Result<NonNull<[u8]>, AllocError>
305    where
306        A: BaseAllocator<S::GuaranteedAllocated>,
307        S: BumpAllocatorSettings,
308    {
309        unsafe {
310            if S::SHRINKS && is_last(bump, old_ptr, old_layout) {
311                let old_pos = bump.chunk.get().pos();
312                deallocate_assume_last(bump, old_ptr, old_layout);
313
314                let overlaps;
315                let new_ptr;
316
317                if let Some(in_chunk) = bump.chunk.get().alloc(CustomLayout(new_layout)) {
318                    new_ptr = in_chunk;
319                    overlaps = if S::UP {
320                        let old_ptr_end = old_ptr.add(new_layout.size());
321                        old_ptr_end > new_ptr
322                    } else {
323                        let new_ptr_end = new_ptr.add(new_layout.size());
324                        new_ptr_end > old_ptr
325                    }
326                } else {
327                    // `is_last` returned true, which guarantees a non-dummy chunk
328                    bump.chunk.get().as_non_dummy_unchecked().set_pos(old_pos);
329
330                    new_ptr = bump.alloc_in_another_chunk::<AllocError>(new_layout)?;
331                    overlaps = false;
332                }
333
334                if overlaps {
335                    old_ptr.copy_to(new_ptr, new_layout.size());
336                } else {
337                    old_ptr.copy_to_nonoverlapping(new_ptr, new_layout.size());
338                }
339
340                Ok(NonNull::slice_from_raw_parts(new_ptr, new_layout.size()))
341            } else {
342                let new_ptr = bump.alloc::<AllocError>(new_layout)?;
343                old_ptr.copy_to_nonoverlapping(new_ptr, new_layout.size());
344                Ok(NonNull::slice_from_raw_parts(new_ptr, new_layout.size()))
345            }
346        }
347    }
348
349    debug_assert!(
350        new_layout.size() <= old_layout.size(),
351        "`new_layout.size()` must be smaller than or equal to `old_layout.size()`"
352    );
353
354    unsafe {
355        if !align_fits(old_ptr, old_layout, new_layout) {
356            return shrink_unfit(bump, old_ptr, old_layout, new_layout);
357        }
358
359        // if that's not the last allocation, there is nothing we can do
360        if !S::SHRINKS || !is_last(bump, old_ptr, old_layout) {
361            // we return the size of the old layout
362            return Ok(NonNull::slice_from_raw_parts(old_ptr, old_layout.size()));
363        }
364
365        if S::UP {
366            let end = old_ptr.addr().get() + new_layout.size();
367
368            // Up-aligning a pointer inside a chunk by `MIN_ALIGN` never overflows.
369            let new_pos = up_align_usize_unchecked(end, S::MIN_ALIGN);
370
371            // `is_last` returned true, which guarantees a non-dummy
372            bump.chunk.get().as_non_dummy_unchecked().set_pos_addr(new_pos);
373
374            Ok(NonNull::slice_from_raw_parts(old_ptr, new_layout.size()))
375        } else {
376            let old_addr = old_ptr.addr();
377            let old_end_addr = NonZeroUsize::new_unchecked(old_addr.get() + old_layout.size());
378
379            let new_addr = bump_down(old_end_addr, new_layout.size(), new_layout.align().max(S::MIN_ALIGN));
380            let new_addr = NonZeroUsize::new_unchecked(new_addr);
381            let new_ptr = old_ptr.with_addr(new_addr);
382
383            let copy_src_end = NonZeroUsize::new_unchecked(old_addr.get() + new_layout.size());
384            let copy_dst_start = new_addr;
385            let overlaps = copy_src_end > copy_dst_start;
386
387            if overlaps {
388                old_ptr.copy_to(new_ptr, new_layout.size());
389            } else {
390                old_ptr.copy_to_nonoverlapping(new_ptr, new_layout.size());
391            }
392
393            // `is_last` returned true, which guarantees a non-dummy
394            bump.chunk.get().as_non_dummy_unchecked().set_pos(new_ptr);
395
396            Ok(NonNull::slice_from_raw_parts(new_ptr, new_layout.size()))
397        }
398    }
399}
400
401#[inline(always)]
402fn align_fits(old_ptr: NonNull<u8>, _old_layout: Layout, new_layout: Layout) -> bool {
403    non_null::is_aligned_to(old_ptr, new_layout.align())
404}