bump_scope/
allocator_impl.rs

1// FIXME: Figure out a way to make coverage report not suck because of `if UP { ... } else { ... }`.
2#![allow(clippy::unnecessary_wraps)]
3
4use core::{alloc::Layout, num::NonZeroUsize, ptr::NonNull};
5
6use crate::{
7    BaseAllocator, BumpScope, MinimumAlignment, SupportedMinimumAlignment, alloc::AllocError, bump_down, polyfill::non_null,
8    up_align_usize_unchecked,
9};
10
11#[inline(always)]
12pub(crate) fn allocate<
13    A,
14    const MIN_ALIGN: usize,
15    const UP: bool,
16    const GUARANTEED_ALLOCATED: bool,
17    const DEALLOCATES: bool,
18>(
19    bump: &BumpScope<A, MIN_ALIGN, UP, GUARANTEED_ALLOCATED, DEALLOCATES>,
20    layout: Layout,
21) -> Result<NonNull<[u8]>, AllocError>
22where
23    MinimumAlignment<MIN_ALIGN>: SupportedMinimumAlignment,
24    A: BaseAllocator<GUARANTEED_ALLOCATED>,
25{
26    Ok(NonNull::slice_from_raw_parts(bump.try_alloc_layout(layout)?, layout.size()))
27}
28
29#[inline(always)]
30pub(crate) unsafe fn deallocate<
31    A,
32    const MIN_ALIGN: usize,
33    const UP: bool,
34    const GUARANTEED_ALLOCATED: bool,
35    const DEALLOCATES: bool,
36>(
37    bump: &BumpScope<A, MIN_ALIGN, UP, GUARANTEED_ALLOCATED, DEALLOCATES>,
38    ptr: NonNull<u8>,
39    layout: Layout,
40) where
41    MinimumAlignment<MIN_ALIGN>: SupportedMinimumAlignment,
42    A: BaseAllocator<GUARANTEED_ALLOCATED>,
43{
44    if !DEALLOCATES {
45        return;
46    }
47
48    unsafe {
49        // free allocated space if this is the last allocation
50        if is_last_and_allocated(bump, ptr, layout) {
51            deallocate_assume_last(bump, ptr, layout);
52        }
53    }
54}
55
56#[inline(always)]
57unsafe fn deallocate_assume_last<
58    A,
59    const MIN_ALIGN: usize,
60    const UP: bool,
61    const GUARANTEED_ALLOCATED: bool,
62    const DEALLOCATES: bool,
63>(
64    bump: &BumpScope<A, MIN_ALIGN, UP, GUARANTEED_ALLOCATED, DEALLOCATES>,
65    ptr: NonNull<u8>,
66    layout: Layout,
67) where
68    MinimumAlignment<MIN_ALIGN>: SupportedMinimumAlignment,
69    A: BaseAllocator<GUARANTEED_ALLOCATED>,
70{
71    if !DEALLOCATES {
72        return;
73    }
74
75    unsafe {
76        debug_assert!(is_last_and_allocated(bump, ptr, layout));
77
78        if UP {
79            bump.set_pos(ptr.addr());
80        } else {
81            let mut addr = ptr.addr().get();
82            addr += layout.size();
83            bump.set_pos(NonZeroUsize::new_unchecked(addr));
84        }
85    }
86}
87
88#[inline(always)]
89unsafe fn is_last_and_allocated<
90    A,
91    const MIN_ALIGN: usize,
92    const UP: bool,
93    const GUARANTEED_ALLOCATED: bool,
94    const DEALLOCATES: bool,
95>(
96    bump: &BumpScope<A, MIN_ALIGN, UP, GUARANTEED_ALLOCATED, DEALLOCATES>,
97    ptr: NonNull<u8>,
98    layout: Layout,
99) -> bool
100where
101    MinimumAlignment<MIN_ALIGN>: SupportedMinimumAlignment,
102{
103    if bump.chunk.get().is_unallocated() {
104        return false;
105    }
106
107    unsafe {
108        if UP {
109            ptr.as_ptr().add(layout.size()) == bump.chunk.get().pos().as_ptr()
110        } else {
111            ptr == bump.chunk.get().pos()
112        }
113    }
114}
115
116#[inline(always)]
117pub(crate) unsafe fn grow<
118    A,
119    const MIN_ALIGN: usize,
120    const UP: bool,
121    const GUARANTEED_ALLOCATED: bool,
122    const DEALLOCATES: bool,
123>(
124    bump: &BumpScope<A, MIN_ALIGN, UP, GUARANTEED_ALLOCATED, DEALLOCATES>,
125    old_ptr: NonNull<u8>,
126    old_layout: Layout,
127    new_layout: Layout,
128) -> Result<NonNull<[u8]>, AllocError>
129where
130    MinimumAlignment<MIN_ALIGN>: SupportedMinimumAlignment,
131    A: BaseAllocator<GUARANTEED_ALLOCATED>,
132{
133    debug_assert!(
134        new_layout.size() >= old_layout.size(),
135        "`new_layout.size()` must be greater than or equal to `old_layout.size()`"
136    );
137
138    unsafe {
139        if UP {
140            if is_last_and_allocated(bump, old_ptr, old_layout) & align_fits(old_ptr, old_layout, new_layout) {
141                // We may be able to grow in place! Just need to check if there is enough space.
142
143                let chunk = bump.chunk.get().guaranteed_allocated_unchecked();
144                let chunk_end = chunk.content_end();
145                let remaining = chunk_end.addr().get() - old_ptr.addr().get();
146
147                if new_layout.size() <= remaining {
148                    // There is enough space! We will grow in place. Just need to update the bump pointer.
149
150                    let old_addr = old_ptr.addr();
151
152                    // Up-aligning a pointer inside a chunks content by `MIN_ALIGN` never overflows.
153                    let new_pos = up_align_usize_unchecked(old_addr.get() + new_layout.size(), MIN_ALIGN);
154
155                    // `is_last_and_allocated` returned true
156                    chunk.set_pos_addr(new_pos);
157
158                    Ok(NonNull::slice_from_raw_parts(old_ptr, new_layout.size()))
159                } else {
160                    // The current chunk doesn't have enough space to allocate this layout. We need to allocate in another chunk.
161                    let new_ptr = bump.alloc_in_another_chunk::<AllocError>(new_layout)?;
162                    old_ptr.copy_to_nonoverlapping(new_ptr, old_layout.size());
163                    Ok(NonNull::slice_from_raw_parts(new_ptr, new_layout.size()))
164                }
165            } else {
166                // We can't grow in place. We have to make a new allocation.
167                let new_ptr = bump.try_alloc_layout(new_layout)?;
168                old_ptr.copy_to_nonoverlapping(new_ptr, old_layout.size());
169                Ok(NonNull::slice_from_raw_parts(new_ptr, new_layout.size()))
170            }
171        } else {
172            if is_last_and_allocated(bump, old_ptr, old_layout) {
173                // We may be able to reuse the currently allocated space. Just need to check if the current chunk has enough space for that.
174                let additional_size = new_layout.size() - old_layout.size();
175
176                let old_addr = old_ptr.addr();
177                let new_addr = bump_down(old_addr, additional_size, new_layout.align().max(MIN_ALIGN));
178
179                let chunk = bump.chunk.get().guaranteed_allocated_unchecked();
180                let very_start = chunk.content_start().addr();
181
182                if new_addr >= very_start.get() {
183                    // There is enough space in the current chunk! We will reuse the allocated space.
184
185                    let new_addr = NonZeroUsize::new_unchecked(new_addr);
186                    let new_addr_end = new_addr.get() + new_layout.size();
187
188                    let new_ptr = old_ptr.with_addr(new_addr);
189
190                    // Check if the regions don't overlap so we may use the faster `copy_nonoverlapping`.
191                    if new_addr_end < old_addr.get() {
192                        old_ptr.copy_to_nonoverlapping(new_ptr, old_layout.size());
193                    } else {
194                        old_ptr.copy_to(new_ptr, old_layout.size());
195                    }
196
197                    // `is_last_and_allocated` returned true
198                    chunk.set_pos_addr(new_addr.get());
199
200                    Ok(NonNull::slice_from_raw_parts(new_ptr, new_layout.size()))
201                } else {
202                    // The current chunk doesn't have enough space to allocate this layout. We need to allocate in another chunk.
203                    let new_ptr = bump.alloc_in_another_chunk::<AllocError>(new_layout)?;
204                    old_ptr.copy_to_nonoverlapping(new_ptr, old_layout.size());
205                    Ok(NonNull::slice_from_raw_parts(new_ptr, new_layout.size()))
206                }
207            } else {
208                // We can't reuse the allocated space. We have to make a new allocation.
209                let new_ptr = bump.try_alloc_layout(new_layout)?;
210                old_ptr.copy_to_nonoverlapping(new_ptr, old_layout.size());
211                Ok(NonNull::slice_from_raw_parts(new_ptr, new_layout.size()))
212            }
213        }
214    }
215}
216
217#[inline(always)]
218pub(crate) unsafe fn grow_zeroed<
219    A,
220    const MIN_ALIGN: usize,
221    const UP: bool,
222    const GUARANTEED_ALLOCATED: bool,
223    const DEALLOCATES: bool,
224>(
225    bump: &BumpScope<A, MIN_ALIGN, UP, GUARANTEED_ALLOCATED, DEALLOCATES>,
226    old_ptr: NonNull<u8>,
227    old_layout: Layout,
228    new_layout: Layout,
229) -> Result<NonNull<[u8]>, AllocError>
230where
231    MinimumAlignment<MIN_ALIGN>: SupportedMinimumAlignment,
232    A: BaseAllocator<GUARANTEED_ALLOCATED>,
233{
234    unsafe {
235        let new_ptr = grow(bump, old_ptr, old_layout, new_layout)?;
236
237        let delta = new_layout.size() - old_layout.size();
238        new_ptr.cast::<u8>().as_ptr().add(old_layout.size()).write_bytes(0, delta);
239
240        Ok(new_ptr)
241    }
242}
243
244/// This shrink implementation always tries to reuse old memory if it can.
245///
246/// That's different to bumpalo's shrink implementation, which only shrinks if it can do so with `copy_nonoverlapping`
247/// and doesn't attempt to recover memory if the alignment doesn't fit.
248#[inline(always)]
249pub(crate) unsafe fn shrink<
250    A,
251    const MIN_ALIGN: usize,
252    const UP: bool,
253    const GUARANTEED_ALLOCATED: bool,
254    const DEALLOCATES: bool,
255>(
256    bump: &BumpScope<A, MIN_ALIGN, UP, GUARANTEED_ALLOCATED, DEALLOCATES>,
257    old_ptr: NonNull<u8>,
258    old_layout: Layout,
259    new_layout: Layout,
260) -> Result<NonNull<[u8]>, AllocError>
261where
262    MinimumAlignment<MIN_ALIGN>: SupportedMinimumAlignment,
263    A: BaseAllocator<GUARANTEED_ALLOCATED>,
264{
265    /// Called when `new_layout` doesn't fit alignment.
266    #[cold]
267    #[inline(never)]
268    unsafe fn shrink_unfit<
269        A,
270        const MIN_ALIGN: usize,
271        const UP: bool,
272        const GUARANTEED_ALLOCATED: bool,
273        const DEALLOCATES: bool,
274    >(
275        bump: &BumpScope<A, MIN_ALIGN, UP, GUARANTEED_ALLOCATED, DEALLOCATES>,
276        old_ptr: NonNull<u8>,
277        old_layout: Layout,
278        new_layout: Layout,
279    ) -> Result<NonNull<[u8]>, AllocError>
280    where
281        MinimumAlignment<MIN_ALIGN>: SupportedMinimumAlignment,
282        A: BaseAllocator<GUARANTEED_ALLOCATED>,
283    {
284        unsafe {
285            if DEALLOCATES && is_last_and_allocated(bump, old_ptr, old_layout) {
286                let old_pos = bump.chunk.get().pos();
287                deallocate_assume_last(bump, old_ptr, old_layout);
288
289                let overlaps;
290                let new_ptr;
291
292                if let Some(in_chunk) = bump.alloc_in_current_chunk(new_layout) {
293                    new_ptr = in_chunk;
294                    overlaps = if UP {
295                        let old_ptr_end = old_ptr.add(new_layout.size());
296                        old_ptr_end > new_ptr
297                    } else {
298                        let new_ptr_end = new_ptr.add(new_layout.size());
299                        new_ptr_end > old_ptr
300                    }
301                } else {
302                    new_ptr = match bump.alloc_in_another_chunk(new_layout) {
303                        Ok(new_ptr) => new_ptr,
304                        Err(error) => {
305                            // Need to reset the bump pointer to the old position.
306
307                            // `is_last_and_allocated` returned true
308                            bump.chunk.get().guaranteed_allocated_unchecked().set_pos(old_pos);
309                            return Err(error);
310                        }
311                    };
312                    overlaps = false;
313                }
314
315                if overlaps {
316                    old_ptr.copy_to(new_ptr, new_layout.size());
317                } else {
318                    old_ptr.copy_to_nonoverlapping(new_ptr, new_layout.size());
319                }
320
321                Ok(NonNull::slice_from_raw_parts(new_ptr, new_layout.size()))
322            } else {
323                let new_ptr = bump.try_alloc_layout(new_layout)?;
324                old_ptr.copy_to_nonoverlapping(new_ptr, new_layout.size());
325                Ok(NonNull::slice_from_raw_parts(new_ptr, new_layout.size()))
326            }
327        }
328    }
329
330    debug_assert!(
331        new_layout.size() <= old_layout.size(),
332        "`new_layout.size()` must be smaller than or equal to `old_layout.size()`"
333    );
334
335    unsafe {
336        if !align_fits(old_ptr, old_layout, new_layout) {
337            return shrink_unfit(bump, old_ptr, old_layout, new_layout);
338        }
339
340        // if that's not the last allocation, there is nothing we can do
341        if !DEALLOCATES || !is_last_and_allocated(bump, old_ptr, old_layout) {
342            // we return the size of the old layout
343            return Ok(NonNull::slice_from_raw_parts(old_ptr, old_layout.size()));
344        }
345
346        if UP {
347            let end = old_ptr.addr().get() + new_layout.size();
348
349            // Up-aligning a pointer inside a chunk by `MIN_ALIGN` never overflows.
350            let new_pos = up_align_usize_unchecked(end, MIN_ALIGN);
351
352            // `is_last_and_allocated` returned true
353            bump.chunk.get().guaranteed_allocated_unchecked().set_pos_addr(new_pos);
354
355            Ok(NonNull::slice_from_raw_parts(old_ptr, new_layout.size()))
356        } else {
357            let old_addr = old_ptr.addr();
358            let old_end_addr = NonZeroUsize::new_unchecked(old_addr.get() + old_layout.size());
359
360            let new_addr = bump_down(old_end_addr, new_layout.size(), new_layout.align().max(MIN_ALIGN));
361            let new_addr = NonZeroUsize::new_unchecked(new_addr);
362            let new_ptr = old_ptr.with_addr(new_addr);
363
364            let copy_src_end = NonZeroUsize::new_unchecked(old_addr.get() + new_layout.size());
365            let copy_dst_start = new_addr;
366            let overlaps = copy_src_end > copy_dst_start;
367
368            if overlaps {
369                old_ptr.copy_to(new_ptr, new_layout.size());
370            } else {
371                old_ptr.copy_to_nonoverlapping(new_ptr, new_layout.size());
372            }
373
374            // `is_last_and_allocated` returned true
375            bump.chunk.get().guaranteed_allocated_unchecked().set_pos(new_ptr);
376
377            Ok(NonNull::slice_from_raw_parts(new_ptr, new_layout.size()))
378        }
379    }
380}
381
382#[inline(always)]
383fn align_fits(old_ptr: NonNull<u8>, _old_layout: Layout, new_layout: Layout) -> bool {
384    non_null::is_aligned_to(old_ptr, new_layout.align())
385}