Skip to main content

bump_scope/
allocator_impl.rs

1use core::{alloc::Layout, num::NonZeroUsize, ptr::NonNull};
2
3use crate::{
4    BaseAllocator, alloc::AllocError, bump_down, layout::CustomLayout, polyfill::non_null, raw_bump::RawBump,
5    settings::BumpAllocatorSettings, up_align_usize_unchecked,
6};
7
8#[inline(always)]
9pub fn allocate<A, S>(bump: &RawBump<A, S>, layout: Layout) -> Result<NonNull<[u8]>, AllocError>
10where
11    A: BaseAllocator<S::GuaranteedAllocated>,
12    S: BumpAllocatorSettings,
13{
14    Ok(NonNull::slice_from_raw_parts(
15        bump.alloc::<AllocError>(layout)?,
16        layout.size(),
17    ))
18}
19
20#[inline(always)]
21pub unsafe fn deallocate<A, S>(bump: &RawBump<A, S>, ptr: NonNull<u8>, layout: Layout)
22where
23    A: BaseAllocator<S::GuaranteedAllocated>,
24    S: BumpAllocatorSettings,
25{
26    if !S::DEALLOCATES {
27        return;
28    }
29
30    unsafe {
31        // free allocated space if this is the last allocation
32        if is_last(bump, ptr, layout) {
33            deallocate_assume_last(bump, ptr, layout);
34        }
35    }
36}
37
38#[inline(always)]
39unsafe fn deallocate_assume_last<A, S>(bump: &RawBump<A, S>, ptr: NonNull<u8>, layout: Layout)
40where
41    A: BaseAllocator<S::GuaranteedAllocated>,
42    S: BumpAllocatorSettings,
43{
44    if !S::DEALLOCATES {
45        return;
46    }
47
48    unsafe {
49        let chunk = bump.chunk.get().as_non_dummy_unchecked();
50        if S::UP {
51            chunk.set_pos_addr_and_align(ptr.addr().get());
52        } else {
53            let mut addr = ptr.addr().get();
54            addr += layout.size();
55            chunk.set_pos_addr_and_align(addr);
56        }
57    }
58}
59
60/// Checks if the memory block is the last one in the bump allocator.
61///
62/// The given memory block must have been allocated by some bump allocator,
63/// not necessarily the given one (but if it isn't then it will always return false).
64///
65/// This condition must be true to be able to do grow, shrink or deallocate in place.
66///
67/// If this function returns true then the bump is guaranteed to have a non-dummy
68/// chunk since a memory block is only returned from a bump allocator with a non-dummy
69/// chunk.
70#[inline(always)]
71unsafe fn is_last<A, S>(bump: &RawBump<A, S>, ptr: NonNull<u8>, layout: Layout) -> bool
72where
73    A: BaseAllocator<S::GuaranteedAllocated>,
74    S: BumpAllocatorSettings,
75{
76    if S::UP {
77        (unsafe { ptr.add(layout.size()) }) == bump.chunk.get().pos()
78    } else {
79        ptr == bump.chunk.get().pos()
80    }
81}
82
83#[inline(always)]
84pub unsafe fn grow<A, S>(
85    bump: &RawBump<A, S>,
86    old_ptr: NonNull<u8>,
87    old_layout: Layout,
88    new_layout: Layout,
89) -> Result<NonNull<[u8]>, AllocError>
90where
91    A: BaseAllocator<S::GuaranteedAllocated>,
92    S: BumpAllocatorSettings,
93{
94    debug_assert!(
95        new_layout.size() >= old_layout.size(),
96        "`new_layout.size()` must be greater than or equal to `old_layout.size()`"
97    );
98
99    unsafe {
100        if S::UP {
101            if is_last(bump, old_ptr, old_layout) & align_fits(old_ptr, old_layout, new_layout) {
102                // We may be able to grow in place! Just need to check if there is enough space.
103
104                // `is_last` returned true, which guarantees a non-dummy
105                let chunk = bump.chunk.get().as_non_dummy_unchecked();
106                let chunk_end = chunk.content_end();
107                let remaining = chunk_end.addr().get() - old_ptr.addr().get();
108
109                if new_layout.size() <= remaining {
110                    // There is enough space! We will grow in place. Just need to update the bump pointer.
111
112                    let old_addr = old_ptr.addr();
113
114                    // Up-aligning a pointer inside a chunks content by `MIN_ALIGN` never overflows.
115                    let new_pos = up_align_usize_unchecked(old_addr.get() + new_layout.size(), S::MIN_ALIGN);
116
117                    chunk.set_pos_addr(new_pos);
118
119                    Ok(NonNull::slice_from_raw_parts(old_ptr, new_layout.size()))
120                } else {
121                    // The current chunk doesn't have enough space to allocate this layout. We need to allocate in another chunk.
122                    let new_ptr = bump.alloc_in_another_chunk::<AllocError>(new_layout)?;
123                    old_ptr.copy_to_nonoverlapping(new_ptr, old_layout.size());
124                    Ok(NonNull::slice_from_raw_parts(new_ptr, new_layout.size()))
125                }
126            } else {
127                // We can't grow in place. We have to make a new allocation.
128                let new_ptr = bump.alloc::<AllocError>(new_layout)?;
129                old_ptr.copy_to_nonoverlapping(new_ptr, old_layout.size());
130                Ok(NonNull::slice_from_raw_parts(new_ptr, new_layout.size()))
131            }
132        } else {
133            if is_last(bump, old_ptr, old_layout) {
134                // `is_last` returned true, which guarantees a non-dummy
135                let chunk = bump.chunk.get().as_non_dummy_unchecked();
136
137                // We may be able to reuse the currently allocated space. Just need to check if the current chunk has enough space for that.
138                let additional_size = new_layout.size() - old_layout.size();
139
140                let old_addr = old_ptr.addr();
141                let new_addr = bump_down(old_addr, additional_size, new_layout.align().max(S::MIN_ALIGN));
142
143                let very_start = chunk.content_start().addr();
144
145                if new_addr >= very_start.get() {
146                    // There is enough space in the current chunk! We will reuse the allocated space.
147
148                    let new_addr = NonZeroUsize::new_unchecked(new_addr);
149                    let new_addr_end = new_addr.get() + new_layout.size();
150
151                    let new_ptr = old_ptr.with_addr(new_addr);
152
153                    // Check if the regions don't overlap so we may use the faster `copy_nonoverlapping`.
154                    if new_addr_end < old_addr.get() {
155                        old_ptr.copy_to_nonoverlapping(new_ptr, old_layout.size());
156                    } else {
157                        old_ptr.copy_to(new_ptr, old_layout.size());
158                    }
159
160                    // `is_last_and_allocated` returned true
161                    chunk.set_pos_addr(new_addr.get());
162
163                    Ok(NonNull::slice_from_raw_parts(new_ptr, new_layout.size()))
164                } else {
165                    // The current chunk doesn't have enough space to allocate this layout. We need to allocate in another chunk.
166                    let new_ptr = bump.alloc_in_another_chunk::<AllocError>(new_layout)?;
167                    old_ptr.copy_to_nonoverlapping(new_ptr, old_layout.size());
168                    Ok(NonNull::slice_from_raw_parts(new_ptr, new_layout.size()))
169                }
170            } else {
171                // We can't reuse the allocated space. We have to make a new allocation.
172                let new_ptr = bump.alloc::<AllocError>(new_layout)?;
173                old_ptr.copy_to_nonoverlapping(new_ptr, old_layout.size());
174                Ok(NonNull::slice_from_raw_parts(new_ptr, new_layout.size()))
175            }
176        }
177    }
178}
179
180#[inline(always)]
181pub unsafe fn grow_zeroed<A, S>(
182    bump: &RawBump<A, S>,
183    old_ptr: NonNull<u8>,
184    old_layout: Layout,
185    new_layout: Layout,
186) -> Result<NonNull<[u8]>, AllocError>
187where
188    A: BaseAllocator<S::GuaranteedAllocated>,
189    S: BumpAllocatorSettings,
190{
191    unsafe {
192        let new_ptr = grow(bump, old_ptr, old_layout, new_layout)?;
193
194        let delta = new_layout.size() - old_layout.size();
195        new_ptr.cast::<u8>().add(old_layout.size()).write_bytes(0, delta);
196
197        Ok(new_ptr)
198    }
199}
200
201/// This shrink implementation always tries to reuse old memory if it can.
202///
203/// That's different to bumpalo's shrink implementation, which only shrinks if it can do so with `copy_nonoverlapping`
204/// and doesn't attempt to recover memory if the alignment doesn't fit.
205#[inline(always)]
206pub unsafe fn shrink<A, S>(
207    bump: &RawBump<A, S>,
208    old_ptr: NonNull<u8>,
209    old_layout: Layout,
210    new_layout: Layout,
211) -> Result<NonNull<[u8]>, AllocError>
212where
213    A: BaseAllocator<S::GuaranteedAllocated>,
214    S: BumpAllocatorSettings,
215{
216    /// Called when `new_layout` doesn't fit alignment.
217    #[cold]
218    #[inline(never)]
219    unsafe fn shrink_unfit<A, S>(
220        bump: &RawBump<A, S>,
221        old_ptr: NonNull<u8>,
222        old_layout: Layout,
223        new_layout: Layout,
224    ) -> Result<NonNull<[u8]>, AllocError>
225    where
226        A: BaseAllocator<S::GuaranteedAllocated>,
227        S: BumpAllocatorSettings,
228    {
229        unsafe {
230            if S::SHRINKS && is_last(bump, old_ptr, old_layout) {
231                let old_pos = bump.chunk.get().pos();
232                deallocate_assume_last(bump, old_ptr, old_layout);
233
234                let overlaps;
235                let new_ptr;
236
237                if let Some(in_chunk) = bump.chunk.get().alloc(CustomLayout(new_layout)) {
238                    new_ptr = in_chunk;
239                    overlaps = if S::UP {
240                        let old_ptr_end = old_ptr.add(new_layout.size());
241                        old_ptr_end > new_ptr
242                    } else {
243                        let new_ptr_end = new_ptr.add(new_layout.size());
244                        new_ptr_end > old_ptr
245                    }
246                } else {
247                    // `is_last` returned true, which guarantees a non-dummy chunk
248                    bump.chunk.get().as_non_dummy_unchecked().set_pos(old_pos);
249
250                    new_ptr = bump.alloc_in_another_chunk::<AllocError>(new_layout)?;
251                    overlaps = false;
252                }
253
254                if overlaps {
255                    old_ptr.copy_to(new_ptr, new_layout.size());
256                } else {
257                    old_ptr.copy_to_nonoverlapping(new_ptr, new_layout.size());
258                }
259
260                Ok(NonNull::slice_from_raw_parts(new_ptr, new_layout.size()))
261            } else {
262                let new_ptr = bump.alloc::<AllocError>(new_layout)?;
263                old_ptr.copy_to_nonoverlapping(new_ptr, new_layout.size());
264                Ok(NonNull::slice_from_raw_parts(new_ptr, new_layout.size()))
265            }
266        }
267    }
268
269    debug_assert!(
270        new_layout.size() <= old_layout.size(),
271        "`new_layout.size()` must be smaller than or equal to `old_layout.size()`"
272    );
273
274    unsafe {
275        if !align_fits(old_ptr, old_layout, new_layout) {
276            return shrink_unfit(bump, old_ptr, old_layout, new_layout);
277        }
278
279        // if that's not the last allocation, there is nothing we can do
280        if !S::SHRINKS || !is_last(bump, old_ptr, old_layout) {
281            // we return the size of the old layout
282            return Ok(NonNull::slice_from_raw_parts(old_ptr, old_layout.size()));
283        }
284
285        if S::UP {
286            let end = old_ptr.addr().get() + new_layout.size();
287
288            // Up-aligning a pointer inside a chunk by `MIN_ALIGN` never overflows.
289            let new_pos = up_align_usize_unchecked(end, S::MIN_ALIGN);
290
291            // `is_last` returned true, which guarantees a non-dummy
292            bump.chunk.get().as_non_dummy_unchecked().set_pos_addr(new_pos);
293
294            Ok(NonNull::slice_from_raw_parts(old_ptr, new_layout.size()))
295        } else {
296            let old_addr = old_ptr.addr();
297            let old_end_addr = NonZeroUsize::new_unchecked(old_addr.get() + old_layout.size());
298
299            let new_addr = bump_down(old_end_addr, new_layout.size(), new_layout.align().max(S::MIN_ALIGN));
300            let new_addr = NonZeroUsize::new_unchecked(new_addr);
301            let new_ptr = old_ptr.with_addr(new_addr);
302
303            let copy_src_end = NonZeroUsize::new_unchecked(old_addr.get() + new_layout.size());
304            let copy_dst_start = new_addr;
305            let overlaps = copy_src_end > copy_dst_start;
306
307            if overlaps {
308                old_ptr.copy_to(new_ptr, new_layout.size());
309            } else {
310                old_ptr.copy_to_nonoverlapping(new_ptr, new_layout.size());
311            }
312
313            // `is_last` returned true, which guarantees a non-dummy
314            bump.chunk.get().as_non_dummy_unchecked().set_pos(new_ptr);
315
316            Ok(NonNull::slice_from_raw_parts(new_ptr, new_layout.size()))
317        }
318    }
319}
320
321#[inline(always)]
322fn align_fits(old_ptr: NonNull<u8>, _old_layout: Layout, new_layout: Layout) -> bool {
323    non_null::is_aligned_to(old_ptr, new_layout.align())
324}