bump_scope/
allocator_impl.rs

1// FIXME: Figure out a way to make coverage report not suck because of `if UP { ... } else { ... }`.
2
3use core::{alloc::Layout, num::NonZeroUsize, ptr::NonNull};
4
5use crate::{
6    BaseAllocator, BumpScope, MinimumAlignment, SupportedMinimumAlignment, alloc::AllocError, bump_down, polyfill::non_null,
7    up_align_usize_unchecked,
8};
9
10#[inline(always)]
11pub(crate) fn allocate<
12    A,
13    const MIN_ALIGN: usize,
14    const UP: bool,
15    const GUARANTEED_ALLOCATED: bool,
16    const DEALLOCATES: bool,
17>(
18    bump: &BumpScope<A, MIN_ALIGN, UP, GUARANTEED_ALLOCATED, DEALLOCATES>,
19    layout: Layout,
20) -> Result<NonNull<[u8]>, AllocError>
21where
22    MinimumAlignment<MIN_ALIGN>: SupportedMinimumAlignment,
23    A: BaseAllocator<GUARANTEED_ALLOCATED>,
24{
25    Ok(NonNull::slice_from_raw_parts(bump.try_alloc_layout(layout)?, layout.size()))
26}
27
28#[inline(always)]
29pub(crate) unsafe fn deallocate<
30    A,
31    const MIN_ALIGN: usize,
32    const UP: bool,
33    const GUARANTEED_ALLOCATED: bool,
34    const DEALLOCATES: bool,
35>(
36    bump: &BumpScope<A, MIN_ALIGN, UP, GUARANTEED_ALLOCATED, DEALLOCATES>,
37    ptr: NonNull<u8>,
38    layout: Layout,
39) where
40    MinimumAlignment<MIN_ALIGN>: SupportedMinimumAlignment,
41    A: BaseAllocator<GUARANTEED_ALLOCATED>,
42{
43    if !DEALLOCATES {
44        return;
45    }
46
47    unsafe {
48        // free allocated space if this is the last allocation
49        if is_last_and_allocated(bump, ptr, layout) {
50            deallocate_assume_last(bump, ptr, layout);
51        }
52    }
53}
54
55#[inline(always)]
56unsafe fn deallocate_assume_last<
57    A,
58    const MIN_ALIGN: usize,
59    const UP: bool,
60    const GUARANTEED_ALLOCATED: bool,
61    const DEALLOCATES: bool,
62>(
63    bump: &BumpScope<A, MIN_ALIGN, UP, GUARANTEED_ALLOCATED, DEALLOCATES>,
64    ptr: NonNull<u8>,
65    layout: Layout,
66) where
67    MinimumAlignment<MIN_ALIGN>: SupportedMinimumAlignment,
68    A: BaseAllocator<GUARANTEED_ALLOCATED>,
69{
70    if !DEALLOCATES {
71        return;
72    }
73
74    unsafe {
75        debug_assert!(is_last_and_allocated(bump, ptr, layout));
76
77        if UP {
78            bump.set_pos(ptr.addr());
79        } else {
80            let mut addr = ptr.addr().get();
81            addr += layout.size();
82            bump.set_pos(NonZeroUsize::new_unchecked(addr));
83        }
84    }
85}
86
87#[inline(always)]
88unsafe fn is_last_and_allocated<
89    A,
90    const MIN_ALIGN: usize,
91    const UP: bool,
92    const GUARANTEED_ALLOCATED: bool,
93    const DEALLOCATES: bool,
94>(
95    bump: &BumpScope<A, MIN_ALIGN, UP, GUARANTEED_ALLOCATED, DEALLOCATES>,
96    ptr: NonNull<u8>,
97    layout: Layout,
98) -> bool
99where
100    MinimumAlignment<MIN_ALIGN>: SupportedMinimumAlignment,
101{
102    if bump.chunk.get().is_unallocated() {
103        return false;
104    }
105
106    unsafe {
107        if UP {
108            ptr.as_ptr().add(layout.size()) == bump.chunk.get().pos().as_ptr()
109        } else {
110            ptr == bump.chunk.get().pos()
111        }
112    }
113}
114
115#[inline(always)]
116pub(crate) unsafe fn grow<
117    A,
118    const MIN_ALIGN: usize,
119    const UP: bool,
120    const GUARANTEED_ALLOCATED: bool,
121    const DEALLOCATES: bool,
122>(
123    bump: &BumpScope<A, MIN_ALIGN, UP, GUARANTEED_ALLOCATED, DEALLOCATES>,
124    old_ptr: NonNull<u8>,
125    old_layout: Layout,
126    new_layout: Layout,
127) -> Result<NonNull<[u8]>, AllocError>
128where
129    MinimumAlignment<MIN_ALIGN>: SupportedMinimumAlignment,
130    A: BaseAllocator<GUARANTEED_ALLOCATED>,
131{
132    debug_assert!(
133        new_layout.size() >= old_layout.size(),
134        "`new_layout.size()` must be greater than or equal to `old_layout.size()`"
135    );
136
137    unsafe {
138        if UP {
139            if is_last_and_allocated(bump, old_ptr, old_layout) & align_fits(old_ptr, old_layout, new_layout) {
140                // We may be able to grow in place! Just need to check if there is enough space.
141
142                let chunk = bump.chunk.get().guaranteed_allocated_unchecked();
143                let chunk_end = chunk.content_end();
144                let remaining = chunk_end.addr().get() - old_ptr.addr().get();
145
146                if new_layout.size() <= remaining {
147                    // There is enough space! We will grow in place. Just need to update the bump pointer.
148
149                    let old_addr = old_ptr.addr();
150
151                    // Up-aligning a pointer inside a chunks content by `MIN_ALIGN` never overflows.
152                    let new_pos = up_align_usize_unchecked(old_addr.get() + new_layout.size(), MIN_ALIGN);
153
154                    // `is_last_and_allocated` returned true
155                    chunk.set_pos_addr(new_pos);
156
157                    Ok(NonNull::slice_from_raw_parts(old_ptr, new_layout.size()))
158                } else {
159                    // The current chunk doesn't have enough space to allocate this layout. We need to allocate in another chunk.
160                    let new_ptr = bump.alloc_in_another_chunk::<AllocError>(new_layout)?;
161                    old_ptr.copy_to_nonoverlapping(new_ptr, old_layout.size());
162                    Ok(NonNull::slice_from_raw_parts(new_ptr, new_layout.size()))
163                }
164            } else {
165                // We can't grow in place. We have to make a new allocation.
166                let new_ptr = bump.try_alloc_layout(new_layout)?;
167                old_ptr.copy_to_nonoverlapping(new_ptr, old_layout.size());
168                Ok(NonNull::slice_from_raw_parts(new_ptr, new_layout.size()))
169            }
170        } else {
171            if is_last_and_allocated(bump, old_ptr, old_layout) {
172                // We may be able to reuse the currently allocated space. Just need to check if the current chunk has enough space for that.
173                let additional_size = new_layout.size() - old_layout.size();
174
175                let old_addr = old_ptr.addr();
176                let new_addr = bump_down(old_addr, additional_size, new_layout.align().max(MIN_ALIGN));
177
178                let chunk = bump.chunk.get().guaranteed_allocated_unchecked();
179                let very_start = chunk.content_start().addr();
180
181                if new_addr >= very_start.get() {
182                    // There is enough space in the current chunk! We will reuse the allocated space.
183
184                    let new_addr = NonZeroUsize::new_unchecked(new_addr);
185                    let new_addr_end = new_addr.get() + new_layout.size();
186
187                    let new_ptr = old_ptr.with_addr(new_addr);
188
189                    // Check if the regions don't overlap so we may use the faster `copy_nonoverlapping`.
190                    if new_addr_end < old_addr.get() {
191                        old_ptr.copy_to_nonoverlapping(new_ptr, old_layout.size());
192                    } else {
193                        old_ptr.copy_to(new_ptr, old_layout.size());
194                    }
195
196                    // `is_last_and_allocated` returned true
197                    chunk.set_pos_addr(new_addr.get());
198
199                    Ok(NonNull::slice_from_raw_parts(new_ptr, new_layout.size()))
200                } else {
201                    // The current chunk doesn't have enough space to allocate this layout. We need to allocate in another chunk.
202                    let new_ptr = bump.alloc_in_another_chunk::<AllocError>(new_layout)?;
203                    old_ptr.copy_to_nonoverlapping(new_ptr, old_layout.size());
204                    Ok(NonNull::slice_from_raw_parts(new_ptr, new_layout.size()))
205                }
206            } else {
207                // We can't reuse the allocated space. We have to make a new allocation.
208                let new_ptr = bump.try_alloc_layout(new_layout)?;
209                old_ptr.copy_to_nonoverlapping(new_ptr, old_layout.size());
210                Ok(NonNull::slice_from_raw_parts(new_ptr, new_layout.size()))
211            }
212        }
213    }
214}
215
216#[inline(always)]
217pub(crate) unsafe fn grow_zeroed<
218    A,
219    const MIN_ALIGN: usize,
220    const UP: bool,
221    const GUARANTEED_ALLOCATED: bool,
222    const DEALLOCATES: bool,
223>(
224    bump: &BumpScope<A, MIN_ALIGN, UP, GUARANTEED_ALLOCATED, DEALLOCATES>,
225    old_ptr: NonNull<u8>,
226    old_layout: Layout,
227    new_layout: Layout,
228) -> Result<NonNull<[u8]>, AllocError>
229where
230    MinimumAlignment<MIN_ALIGN>: SupportedMinimumAlignment,
231    A: BaseAllocator<GUARANTEED_ALLOCATED>,
232{
233    unsafe {
234        let new_ptr = grow(bump, old_ptr, old_layout, new_layout)?;
235
236        let delta = new_layout.size() - old_layout.size();
237        new_ptr.cast::<u8>().as_ptr().add(old_layout.size()).write_bytes(0, delta);
238
239        Ok(new_ptr)
240    }
241}
242
243/// This shrink implementation always tries to reuse old memory if it can.
244///
245/// That's different to bumpalo's shrink implementation, which only shrinks if it can do so with `copy_nonoverlapping`
246/// and doesn't attempt to recover memory if the alignment doesn't fit.
247#[inline(always)]
248pub(crate) unsafe fn shrink<
249    A,
250    const MIN_ALIGN: usize,
251    const UP: bool,
252    const GUARANTEED_ALLOCATED: bool,
253    const DEALLOCATES: bool,
254>(
255    bump: &BumpScope<A, MIN_ALIGN, UP, GUARANTEED_ALLOCATED, DEALLOCATES>,
256    old_ptr: NonNull<u8>,
257    old_layout: Layout,
258    new_layout: Layout,
259) -> Result<NonNull<[u8]>, AllocError>
260where
261    MinimumAlignment<MIN_ALIGN>: SupportedMinimumAlignment,
262    A: BaseAllocator<GUARANTEED_ALLOCATED>,
263{
264    /// Called when `new_layout` doesn't fit alignment.
265    #[cold]
266    #[inline(never)]
267    unsafe fn shrink_unfit<
268        A,
269        const MIN_ALIGN: usize,
270        const UP: bool,
271        const GUARANTEED_ALLOCATED: bool,
272        const DEALLOCATES: bool,
273    >(
274        bump: &BumpScope<A, MIN_ALIGN, UP, GUARANTEED_ALLOCATED, DEALLOCATES>,
275        old_ptr: NonNull<u8>,
276        old_layout: Layout,
277        new_layout: Layout,
278    ) -> Result<NonNull<[u8]>, AllocError>
279    where
280        MinimumAlignment<MIN_ALIGN>: SupportedMinimumAlignment,
281        A: BaseAllocator<GUARANTEED_ALLOCATED>,
282    {
283        unsafe {
284            if DEALLOCATES && is_last_and_allocated(bump, old_ptr, old_layout) {
285                let old_pos = bump.chunk.get().pos();
286                deallocate_assume_last(bump, old_ptr, old_layout);
287
288                let overlaps;
289                let new_ptr;
290
291                if let Some(in_chunk) = bump.alloc_in_current_chunk(new_layout) {
292                    new_ptr = in_chunk;
293                    overlaps = if UP {
294                        let old_ptr_end = old_ptr.add(new_layout.size());
295                        old_ptr_end > new_ptr
296                    } else {
297                        let new_ptr_end = new_ptr.add(new_layout.size());
298                        new_ptr_end > old_ptr
299                    }
300                } else {
301                    new_ptr = match bump.alloc_in_another_chunk(new_layout) {
302                        Ok(new_ptr) => new_ptr,
303                        Err(error) => {
304                            // Need to reset the bump pointer to the old position.
305
306                            // `is_last_and_allocated` returned true
307                            bump.chunk.get().guaranteed_allocated_unchecked().set_pos(old_pos);
308                            return Err(error);
309                        }
310                    };
311                    overlaps = false;
312                }
313
314                if overlaps {
315                    old_ptr.copy_to(new_ptr, new_layout.size());
316                } else {
317                    old_ptr.copy_to_nonoverlapping(new_ptr, new_layout.size());
318                }
319
320                Ok(NonNull::slice_from_raw_parts(new_ptr, new_layout.size()))
321            } else {
322                let new_ptr = bump.try_alloc_layout(new_layout)?;
323                old_ptr.copy_to_nonoverlapping(new_ptr, new_layout.size());
324                Ok(NonNull::slice_from_raw_parts(new_ptr, new_layout.size()))
325            }
326        }
327    }
328
329    debug_assert!(
330        new_layout.size() <= old_layout.size(),
331        "`new_layout.size()` must be smaller than or equal to `old_layout.size()`"
332    );
333
334    unsafe {
335        if !align_fits(old_ptr, old_layout, new_layout) {
336            return shrink_unfit(bump, old_ptr, old_layout, new_layout);
337        }
338
339        // if that's not the last allocation, there is nothing we can do
340        if !DEALLOCATES || !is_last_and_allocated(bump, old_ptr, old_layout) {
341            // we return the size of the old layout
342            return Ok(NonNull::slice_from_raw_parts(old_ptr, old_layout.size()));
343        }
344
345        if UP {
346            let end = old_ptr.addr().get() + new_layout.size();
347
348            // Up-aligning a pointer inside a chunk by `MIN_ALIGN` never overflows.
349            let new_pos = up_align_usize_unchecked(end, MIN_ALIGN);
350
351            // `is_last_and_allocated` returned true
352            bump.chunk.get().guaranteed_allocated_unchecked().set_pos_addr(new_pos);
353
354            Ok(NonNull::slice_from_raw_parts(old_ptr, new_layout.size()))
355        } else {
356            let old_addr = old_ptr.addr();
357            let old_end_addr = NonZeroUsize::new_unchecked(old_addr.get() + old_layout.size());
358
359            let new_addr = bump_down(old_end_addr, new_layout.size(), new_layout.align().max(MIN_ALIGN));
360            let new_addr = NonZeroUsize::new_unchecked(new_addr);
361            let new_ptr = old_ptr.with_addr(new_addr);
362
363            let copy_src_end = NonZeroUsize::new_unchecked(old_addr.get() + new_layout.size());
364            let copy_dst_start = new_addr;
365            let overlaps = copy_src_end > copy_dst_start;
366
367            if overlaps {
368                old_ptr.copy_to(new_ptr, new_layout.size());
369            } else {
370                old_ptr.copy_to_nonoverlapping(new_ptr, new_layout.size());
371            }
372
373            // `is_last_and_allocated` returned true
374            bump.chunk.get().guaranteed_allocated_unchecked().set_pos(new_ptr);
375
376            Ok(NonNull::slice_from_raw_parts(new_ptr, new_layout.size()))
377        }
378    }
379}
380
381#[inline(always)]
382fn align_fits(old_ptr: NonNull<u8>, _old_layout: Layout, new_layout: Layout) -> bool {
383    non_null::is_aligned_to(old_ptr, new_layout.align())
384}