static_alloc/
bump.rs

1//! The bump allocator.
2//!
3//! Basics of usage and the connection between the structs is discussed in the documentation of the
4//! [`Bump`] itself.
5//!
6//! [`Bump`]: struct.Bump.html
7use core::alloc::{GlobalAlloc, Layout};
8use core::cell::UnsafeCell;
9use core::mem::{self, MaybeUninit};
10use core::ptr::{NonNull, null_mut};
11
12#[cfg(not(feature = "polyfill"))]
13use core::sync::atomic::{AtomicUsize, Ordering};
14
15#[cfg(feature = "polyfill")]
16use atomic_polyfill::{AtomicUsize, Ordering};
17
18use crate::leaked::LeakBox;
19use alloc_traits::{AllocTime, LocalAlloc, NonZeroLayout};
20
21/// Allocator drawing from an inner, statically sized memory resource.
22///
23/// The type parameter `T` is used only to annotate the required size and alignment of the region
24/// and has no futher use. Note that in particular there is no safe way to retrieve or unwrap an
25/// inner instance even if the `Bump` was not constructed as a shared global static. Nevertheless,
26/// the choice of type makes it easier to reason about potentially required extra space due to
27/// alignment padding.
28///
29/// This type is *always* `Sync` to allow creating `static` instances. This works only because
30/// there is no actual instance of `T` contained inside.
31///
32/// ## Usage as global allocator
33///
34/// You can use the stable rust attribute to use an instance of this type as the global allocator.
35///
36/// ```rust,no_run
37/// use static_alloc::Bump;
38///
39/// #[global_allocator]
40/// static A: Bump<[u8; 1 << 16]> = Bump::uninit();
41///
42/// fn main() { }
43/// ```
44///
45/// Take care, some runtime features of Rust will allocate some memory before or after your own
46/// code. In particular, it was found to be be tricky to predict the usage of the builtin test
47/// framework which seemingly allocates some structures per test.
48///
49/// ## Usage as a non-dropping local allocator
50///
51/// It is also possible to use a `Bump` as a stack local allocator or a specialized allocator. The
52/// interface offers some utilities for allocating values from references to shared or unshared
53/// instances directly. **Note**: this will never call the `Drop` implementation of the allocated
54/// type. In particular, it would almost surely not be safe to `Pin` the values, except if there is
55/// a guarantee for the `Bump` itself to not be deallocated either.
56///
57/// ```rust
58/// use static_alloc::Bump;
59///
60/// let local: Bump<[u64; 3]> = Bump::uninit();
61///
62/// let one = local.leak(0_u64).unwrap();
63/// let two = local.leak(1_u64).unwrap();
64/// let three = local.leak(2_u64).unwrap();
65///
66/// // Exhausted the space.
67/// assert!(local.leak(3_u64).is_err());
68/// ```
69///
70/// Mind that the supplied type parameter influenced *both* size and alignment and a `[u8; 24]`
71/// does not guarantee being able to allocation three `u64` even though most targets have a minimum
72/// alignment requirement of 16 and it works fine on those.
73///
74/// ```rust
75/// # use static_alloc::Bump;
76/// // Just enough space for `u128` but no alignment requirement.
77/// let local: Bump<[u8; 16]> = Bump::uninit();
78///
79/// // May or may not return an err.
80/// let _ = local.leak(0_u128);
81/// ```
82///
83/// Instead use the type parameter to `Bump` as a hint for the best alignment.
84///
85/// ```rust
86/// # use static_alloc::Bump;
87/// // Enough space and align for `u128`.
88/// let local: Bump<[u128; 1]> = Bump::uninit();
89///
90/// assert!(local.leak(0_u128).is_ok());
91/// ```
92///
93/// ## Usage as a (local) bag of bits
94///
95/// It is of course entirely possible to use a local instance instead of a single global allocator.
96/// For example you could utilize the pointer interface directly to build a `#[no_std]` dynamic
97/// data structure in an environment without `extern lib alloc`. This feature was the original
98/// motivation behind the crate but no such data structures are provided here so a quick sketch of
99/// the idea must do:
100///
101/// ```
102/// use core::alloc;
103/// use static_alloc::Bump;
104///
105/// #[repr(align(4096))]
106/// struct PageTable {
107///     // some non-trivial type.
108/// #   _private: [u8; 4096],
109/// }
110///
111/// impl PageTable {
112///     /// Avoid stack allocation of the full struct.
113///     pub unsafe fn new(into: *mut u8) -> &'static mut Self {
114///         // ...
115/// #       &mut *(into as *mut Self)
116///     }
117/// }
118///
119/// // Allocator for pages for page tables. Provides 64 pages. When the
120/// // program/kernel is provided as an ELF the bootloader reserves
121/// // memory for us as part of the loading process that we can use
122/// // purely for page tables. Replaces asm `paging: .BYTE <size>;`
123/// static Paging: Bump<[u8; 1 << 18]> = Bump::uninit();
124///
125/// fn main() {
126///     let layout = alloc::Layout::new::<PageTable>();
127///     let memory = Paging.alloc(layout).unwrap();
128///     let table = unsafe {
129///         PageTable::new(memory.as_ptr())
130///     };
131/// }
132/// ```
133///
134/// A similar structure would of course work to allocate some non-`'static' objects from a
135/// temporary `Bump`.
136///
137/// ## More insights
138///
139/// The ordering used is currently `SeqCst`. This enforces a single global sequence of observed
140/// effects on the slab level. The author is fully aware that this is not strictly necessary. In
141/// fact, even `AcqRel` may not be required as the monotonic bump allocator does not synchronize
142/// other memory itself. If you bring forward a PR with a formalized reasoning for relaxing the
143/// requirements to `Relaxed` (llvm `Monotonic`) it will be greatly appreciated (even more if you
144/// demonstrate performance gains).
145///
146/// WIP: slices.
147pub struct Bump<T> {
148    /// While in shared state, an monotonic atomic counter of consumed bytes.
149    ///
150    /// While shared it is only mutated in `bump` which guarantees its invariants. In the mutable
151    /// reference state it is modified arbitrarily.
152    consumed: AtomicUsize,
153
154    /// Outer unsafe cell due to thread safety.
155    /// Inner MaybeUninit because we padding may destroy initialization invariant
156    /// on the bytes themselves, and hence drop etc must not assumed inited.
157    storage: UnsafeCell<MaybeUninit<T>>,
158}
159
160/// A value could not be moved into a slab allocation.
161///
162/// The error contains the value for which the allocation failed. Storing the value in the error
163/// keeps it alive in all cases. This prevents the `Drop` implementation from running and preserves
164/// resources which may otherwise not be trivial to restore.
165#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
166pub struct LeakError<T> {
167    val: T,
168    failure: Failure,
169}
170
171/// Specifies an amount of consumed space of a slab.
172///
173/// Each allocation of the `Bump` increases the current level as they must not be empty. By
174/// ensuring that an allocation is performed at a specific level it is thus possible to check that
175/// multiple allocations happened in succession without other intermediate allocations. This
176/// ability in turns makes it possible to group allocations together, for example to initialize a
177/// `#[repr(C)]` struct member-by-member or to extend a slice.
178///
179/// ## Usage
180///
181/// The main use is successively allocating a slice without requiring all data to be present at
182/// once. Other similar interface often require an internal locking mechanism but `Level` leaves
183/// the choice to the user. This is not yet encapsulate in a safe API yet `Level` makes it easy to
184/// reason about.
185///
186/// See [`MemBump::get_unchecked`][crate::unsync::MemBump] for redeeming a value.
187///
188/// ## Unsound usage.
189///
190/// FIXME: the below is UB because we don't gain provenance over the complete array, only each
191/// individual element. Instead, we must derive a new pointer from the allocator!
192///
193/// ```
194/// # use core::slice;
195/// # use static_alloc::bump::{Level, Bump};
196/// static BUMP: Bump<[u64; 4]> = Bump::uninit();
197///
198/// /// Gathers as much data as possible.
199/// ///
200/// /// An arbitrary amount of data, can't stack allocate!
201/// fn gather_data(mut iter: impl Iterator<Item=u64>) -> &'static mut [u64] {
202///     let first = match iter.next() {
203///         Some(item) => item,
204///         None => return &mut [],
205///     };
206///
207///     let mut level: Level = BUMP.level();
208///     let mut begin: *mut u64;
209///     let mut count;
210///
211///     match BUMP.leak_at(first, level) {
212///         Ok((first, first_level)) => {
213///             begin = first;
214///             level = first_level;
215///             count = 1;
216///         },
217///         _ => return &mut [],
218///     }
219///
220///     let _ = iter.try_for_each(|value: u64| {
221///         match BUMP.leak_at(value, level) {
222///             Err(err) => return Err(err),
223///             Ok((_, new_level)) => level = new_level,
224///         };
225///         count += 1;
226///         Ok(())
227///     });
228///
229///     unsafe {
230///         // SAFETY: all `count` allocations are contiguous, begin is well aligned and no
231///         // reference is currently pointing at any of the values. The lifetime is `'static` as
232///         // the BUMP itself is static.
233///         slice::from_raw_parts_mut(begin, count)
234///     }
235/// }
236///
237/// fn main() {
238///     // There is no other thread running, so this succeeds.
239///     let slice = gather_data(0..=3);
240///     assert_eq!(slice, [0, 1, 2, 3]);
241/// }
242/// ```
243#[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)]
244pub struct Level(pub(crate) usize);
245
246/// A successful allocation and current [`Level`].
247///
248/// [`Level`]: struct.Level.html
249#[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord)]
250pub struct Allocation<'a, T=u8> {
251    /// Pointer to the uninitialized region with specified layout.
252    pub ptr: NonNull<T>,
253
254    /// The lifetime of the allocation.
255    pub lifetime: AllocTime<'a>,
256
257    /// The observed amount of consumed bytes after the allocation.
258    pub level: Level,
259}
260
261/// Reason for a failed allocation at an exact [`Level`].
262///
263/// [`Level`]: struct.Level.html
264#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
265pub enum Failure {
266    /// No space left for that allocation.
267    Exhausted,
268
269    /// The allocation would not have used the expected base location.
270    ///
271    /// Reports the location that was observed. When only levels from the same slab are used (which
272    /// should normally be the case) then the observed level is monotonically increasing.
273    Mismatch {
274        /// The observed level that was different from the requested one.
275        observed: Level,
276    },
277}
278
279impl<T> Bump<T> {
280    /// Make a new allocatable slab of certain byte size and alignment.
281    ///
282    /// The storage will contain uninitialized bytes.
283    pub const fn uninit() -> Self {
284        Bump {
285            consumed: AtomicUsize::new(0),
286            storage: UnsafeCell::new(MaybeUninit::uninit()),
287        }
288    }
289
290    /// Make a new allocatable slab of certain byte size and alignment.
291    ///
292    /// The storage will contain zeroed bytes. This is not *yet* available
293    /// as a `const fn` which currently limits its potential usefulness
294    /// but there is no good reason not to provide it regardless.
295    pub fn zeroed() -> Self {
296        Bump {
297            consumed: AtomicUsize::new(0),
298            storage: UnsafeCell::new(MaybeUninit::zeroed()),
299        }
300    }
301
302    /// Make a new allocatable slab provided with some bytes it can hand out.
303    ///
304    /// Note that `storage` will never be dropped and there is no way to get it back.
305    pub const fn new(storage: T) -> Self {
306        Bump {
307            consumed: AtomicUsize::new(0),
308            storage: UnsafeCell::new(MaybeUninit::new(storage)),
309        }
310    }
311
312    /// Reset the bump allocator.
313    ///
314    /// Requires a mutable reference, as no allocations can be active when doing it. This behaves
315    /// as if a fresh instance was assigned but it does not overwrite the bytes in the backing
316    /// storage. (You can unsafely rely on this).
317    ///
318    /// ## Usage
319    ///
320    /// ```
321    /// # use static_alloc::Bump;
322    /// let mut stack_buf = Bump::<usize>::uninit();
323    ///
324    /// let bytes = stack_buf.leak(0usize.to_be_bytes()).unwrap();
325    /// // Now the bump allocator is full.
326    /// assert!(stack_buf.leak(0u8).is_err());
327    ///
328    /// // We can reuse if we are okay with forgetting the previous value.
329    /// stack_buf.reset();
330    /// let val = stack_buf.leak(0usize).unwrap();
331    /// ```
332    ///
333    /// Trying to use the previous value does not work, as the stack is still borrowed. Note that
334    /// any user unsafely tracking the lifetime must also ensure this through proper lifetimes that
335    /// guarantee that borrows are alive for appropriate times.
336    ///
337    /// ```compile_fail
338    /// // error[E0502]: cannot borrow `stack_buf` as mutable because it is also borrowed as immutable
339    /// # use static_alloc::Bump;
340    /// let mut stack_buf = Bump::<usize>::uninit();
341    ///
342    /// let bytes = stack_buf.leak(0usize).unwrap();
343    /// //          --------- immutably borrow occurs here
344    /// stack_buf.reset();
345    /// // ^^^^^^^ mutable borrow occurs here.
346    /// let other = stack_buf.leak(0usize).unwrap();
347    ///
348    /// *bytes += *other;
349    /// // ------------- immutable borrow later used here
350    /// ```
351    pub fn reset(&mut self) {
352        *self.consumed.get_mut() = 0;
353    }
354
355    /// Allocate a region of memory.
356    ///
357    /// This is a safe alternative to [GlobalAlloc::alloc](#impl-GlobalAlloc).
358    ///
359    /// # Panics
360    /// This function will panic if the requested layout has a size of `0`. For the use in a
361    /// `GlobalAlloc` this is explicitely forbidden to request and would allow any behaviour but we
362    /// instead strictly check it.
363    pub fn alloc(&self, layout: Layout) -> Option<NonNull<u8>> {
364        Some(self.try_alloc(layout)?.ptr)
365    }
366
367    /// Try to allocate some layout with a precise base location.
368    ///
369    /// The base location is the currently consumed byte count, without correction for the
370    /// alignment of the allocation. This will succeed if it can be allocate exactly at the
371    /// expected location.
372    ///
373    /// # Panics
374    /// This function may panic if the provided `level` is from a different slab.
375    pub fn alloc_at(&self, layout: Layout, level: Level)
376        -> Result<Allocation, Failure>
377    {
378        let Allocation { ptr, lifetime, level } = self.try_alloc_at(layout, level.0)?;
379
380        Ok(Allocation {
381            ptr: ptr.cast(),
382            lifetime,
383            level,
384        })
385    }
386
387    /// Get an allocation with detailed layout.
388    ///
389    /// Provides an [`Uninit`] wrapping several aspects of initialization in a safe interface,
390    /// bound by the lifetime of the reference to the allocator.
391    ///
392    /// [`Uninit`]: ../uninit/struct.Uninit.html
393    pub fn get_layout(&self, layout: Layout)
394        -> Option<Allocation<'_>>
395    {
396        self.try_alloc(layout)
397    }
398
399    /// Get an allocation with detailed layout at a specific level.
400    ///
401    /// Provides an [`Uninit`] wrapping several aspects of initialization in a safe interface,
402    /// bound by the lifetime of the reference to the allocator.
403    ///
404    /// Since the underlying allocation is the same, it would be `unsafe` but justified to fuse
405    /// this allocation with the preceding or succeeding one.
406    ///
407    /// [`Uninit`]: ../uninit/struct.Uninit.html
408    pub fn get_layout_at(&self, layout: Layout, at: Level)
409        -> Result<Allocation<'_>, Failure>
410    {
411        self.try_alloc_at(layout, at.0)
412    }
413
414    /// Get an allocation for a specific type.
415    ///
416    /// It is not yet initialized but provides a safe interface for that initialization.
417    ///
418    /// ## Usage
419    ///
420    /// ```
421    /// # use static_alloc::Bump;
422    /// use core::cell::{Ref, RefCell};
423    ///
424    /// let slab: Bump<[Ref<'static, usize>; 1]> = Bump::uninit();
425    /// let data = RefCell::new(0xff);
426    ///
427    /// // We can place a `Ref` here but we did not yet.
428    /// let alloc = slab.get::<Ref<usize>>().unwrap();
429    /// let cell_ref = unsafe {
430    ///     alloc.leak(data.borrow())
431    /// };
432    ///
433    /// assert_eq!(**cell_ref, 0xff);
434    /// ```
435    pub fn get<V>(&self) -> Option<Allocation<V>> {
436        if mem::size_of::<V>() == 0 {
437            return Some(self.zst_fake_alloc());
438        }
439
440        let layout = Layout::new::<V>();
441        let Allocation { ptr, lifetime, level, } = self.try_alloc(layout)?;
442
443        Some(Allocation {
444            ptr: ptr.cast(),
445            lifetime,
446            level,
447        })
448    }
449
450    /// Get an allocation for a specific type at a specific level.
451    ///
452    /// See [`get`] for usage.
453    ///
454    /// [`get`]: #method.get
455    pub fn get_at<V>(&self, level: Level) -> Result<Allocation<V>, Failure> {
456        if mem::size_of::<V>() == 0 {
457            let fake = self.zst_fake_alloc();
458            // Note: zst_fake_alloc is a noop on the level, we may as well check after.
459            if fake.level != level {
460                return Err(Failure::Mismatch {
461                    observed: fake.level,
462                });
463            }
464            return Ok(fake);
465        }
466
467        let layout = Layout::new::<V>();
468        let Allocation { ptr, lifetime, level, } = self.try_alloc_at(layout, level.0)?;
469
470        Ok(Allocation {
471            // It has exactly size and alignment for `V` as requested.
472            ptr: ptr.cast(),
473            lifetime,
474            level,
475        })
476    }
477
478    /// Move a value into an owned allocation.
479    ///
480    /// For safely initializing a value _after_ a successful allocation, see [`LeakBox::write`].
481    ///
482    /// [`LeakBox::write`]: ../leaked/struct.LeakBox.html#method.write
483    ///
484    /// ## Usage
485    ///
486    /// This can be used to push the value into a caller provided stack buffer where it lives
487    /// longer than the current stack frame. For example, you might create a linked list with a
488    /// dynamic number of values living in the frame below while still being dropped properly. This
489    /// is impossible to do with a return value.
490    ///
491    /// ```
492    /// # use static_alloc::Bump;
493    /// # use static_alloc::leaked::LeakBox;
494    /// fn rand() -> usize { 4 }
495    ///
496    /// enum Chain<'buf, T> {
497    ///    Tail,
498    ///    Link(T, LeakBox<'buf, Self>),
499    /// }
500    ///
501    /// fn make_chain<Buf, T>(buf: &Bump<Buf>, mut new_node: impl FnMut() -> T)
502    ///     -> Option<Chain<'_, T>>
503    /// {
504    ///     let count = rand();
505    ///     let mut chain = Chain::Tail;
506    ///     for _ in 0..count {
507    ///         let node = new_node();
508    ///         chain = Chain::Link(node, buf.leak_box(chain)?);
509    ///     }
510    ///     Some(chain)
511    /// }
512    ///
513    /// struct Node (usize);
514    /// impl Drop for Node {
515    ///     fn drop(&mut self) {
516    ///         println!("Dropped {}", self.0);
517    ///     }
518    /// }
519    /// let mut counter = 0..;
520    /// let new_node = || Node(counter.next().unwrap());
521    ///
522    /// let buffer: Bump<[u8; 128]> = Bump::uninit();
523    /// let head = make_chain(&buffer, new_node).unwrap();
524    ///
525    /// // Prints the message in reverse order.
526    /// // Dropped 3
527    /// // Dropped 2
528    /// // Dropped 1
529    /// // Dropped 0
530    /// drop(head);
531    /// ```
532    pub fn leak_box<V>(&self, val: V) -> Option<LeakBox<'_, V>> {
533        let Allocation { ptr, lifetime, .. } = self.get::<V>()?;
534        Some(unsafe {
535            LeakBox::new_from_raw_non_null(ptr, val, lifetime)
536        })
537    }
538
539    /// Move a value into an owned allocation.
540    ///
541    /// See [`leak_box`] for usage.
542    ///
543    /// [`leak_box`]: #method.leak_box
544    pub fn leak_box_at<V>(&self, val: V, level: Level) -> Result<LeakBox<'_, V>, Failure> {
545        let Allocation { ptr, lifetime, .. } = self.get_at::<V>(level)?;
546        Ok(unsafe {
547            LeakBox::new_from_raw_non_null(ptr, val, lifetime)
548        })
549    }
550
551    /// Observe the current level.
552    ///
553    /// Keep in mind that concurrent usage of the same slab may modify the level before you are
554    /// able to use it in `alloc_at`. Calling this method provides also no other guarantees on
555    /// synchronization of memory accesses, only that the values observed by the caller are a
556    /// monotonically increasing seequence while a shared reference exists.
557    pub fn level(&self) -> Level {
558        Level(self.consumed.load(Ordering::SeqCst))
559    }
560
561    fn try_alloc(&self, layout: Layout)
562        -> Option<Allocation<'_>>
563    {
564        // Guess zero, this will fail when we try to access it and it isn't.
565        let mut consumed = 0;
566        loop {
567            match self.try_alloc_at(layout, consumed) {
568                Ok(alloc) => return Some(alloc),
569                Err(Failure::Exhausted) => return None,
570                Err(Failure::Mismatch{ observed }) => consumed = observed.0,
571            }
572        }
573    }
574
575    /// Try to allocate some layout with a precise base location.
576    ///
577    /// The base location is the currently consumed byte count, without correction for the
578    /// alignment of the allocation. This will succeed if it can be allocate exactly at the
579    /// expected location.
580    ///
581    /// # Panics
582    /// This function panics if `expect_consumed` is larger than `length`.
583    fn try_alloc_at(&self, layout: Layout, expect_consumed: usize)
584        -> Result<Allocation<'_>, Failure>
585    {
586        assert!(layout.size() > 0);
587        let length = mem::size_of::<T>();
588        let base_ptr = self.storage.get()
589            as *mut T
590            as *mut u8;
591
592        let alignment = layout.align();
593        let requested = layout.size();
594
595        // Ensure no overflows when calculating offets within.
596        assert!(expect_consumed <= length);
597
598        let available = length.checked_sub(expect_consumed).unwrap();
599        let ptr_to = base_ptr.wrapping_add(expect_consumed);
600        let offset = ptr_to.align_offset(alignment);
601
602        if requested > available.saturating_sub(offset) {
603            return Err(Failure::Exhausted); // exhausted
604        }
605
606        // `size` can not be zero, saturation will thus always make this true.
607        assert!(offset < available);
608        let at_aligned = expect_consumed.checked_add(offset).unwrap();
609        let new_consumed = at_aligned.checked_add(requested).unwrap();
610        // new_consumed
611        //    = consumed + offset + requested  [lines above]
612        //   <= consumed + available  [bail out: exhausted]
613        //   <= length  [first line of loop]
614        // So it's ok to store `allocated` into `consumed`.
615        assert!(new_consumed <= length);
616        assert!(at_aligned < length);
617
618        // Try to actually allocate.
619        match self.bump(expect_consumed, new_consumed) {
620            Ok(()) => (),
621            Err(observed) => {
622                // Someone else was faster, if you want it then recalculate again.
623                return Err(Failure::Mismatch { observed: Level(observed) });
624            },
625        }
626
627        let aligned = unsafe {
628            // SAFETY:
629            // * `0 <= at_aligned < length` in bounds as checked above.
630            (base_ptr as *mut u8).add(at_aligned)
631        };
632
633        Ok(Allocation {
634            ptr: NonNull::new(aligned).unwrap(),
635            lifetime: AllocTime::default(),
636            level: Level(new_consumed),
637        })
638    }
639
640    /// Allocate a value for the lifetime of the allocator.
641    ///
642    /// The value is leaked in the sense that
643    ///
644    /// 1. the drop implementation of the allocated value is never called;
645    /// 2. reusing the memory for another allocation in the same `Bump` requires manual unsafe code
646    ///    to handle dropping and reinitialization.
647    ///
648    /// However, it does not mean that the underlying memory used for the allocated value is never
649    /// reclaimed. If the `Bump` itself is a stack value then it will get reclaimed together with
650    /// it.
651    ///
652    /// ## Safety notice
653    ///
654    /// It is important to understand that it is undefined behaviour to reuse the allocation for
655    /// the *whole lifetime* of the returned reference. That is, dropping the allocation in-place
656    /// while the reference is still within its lifetime comes with the exact same unsafety caveats
657    /// as [`ManuallyDrop::drop`].
658    ///
659    /// ```
660    /// # use static_alloc::Bump;
661    /// #[derive(Debug, Default)]
662    /// struct FooBar {
663    ///     // ...
664    /// # _private: [u8; 1],
665    /// }
666    ///
667    /// let local: Bump<[FooBar; 3]> = Bump::uninit();
668    /// let one = local.leak(FooBar::default()).unwrap();
669    ///
670    /// // Dangerous but justifiable.
671    /// let one = unsafe {
672    ///     // Ensures there is no current mutable borrow.
673    ///     core::ptr::drop_in_place(&mut *one);
674    /// };
675    /// ```
676    ///
677    /// ## Usage
678    ///
679    /// ```
680    /// use static_alloc::Bump;
681    ///
682    /// let local: Bump<[u64; 3]> = Bump::uninit();
683    ///
684    /// let one = local.leak(0_u64).unwrap();
685    /// assert_eq!(*one, 0);
686    /// *one = 42;
687    /// ```
688    ///
689    /// ## Limitations
690    ///
691    /// Only sized values can be allocated in this manner for now, unsized values are blocked on
692    /// stabilization of [`ptr::slice_from_raw_parts`]. We can not otherwise get a fat pointer to
693    /// the allocated region.
694    ///
695    /// [`ptr::slice_from_raw_parts`]: https://github.com/rust-lang/rust/issues/36925
696    /// [`ManuallyDrop::drop`]: https://doc.rust-lang.org/beta/std/mem/struct.ManuallyDrop.html#method.drop
697    ///
698    /// TODO: will be deprecated sooner or later in favor of a method that does not move the
699    /// resource on failure.
700    // #[deprecated = "Use leak_box and initialize it with the value. This does not move the value in the failure case."]
701    pub fn leak<V>(&self, val: V) -> Result<&mut V, LeakError<V>> {
702        match self.get::<V>() {
703            // SAFETY: Just allocated this for a `V`.
704            Some(alloc) => Ok(unsafe { alloc.leak(val) }),
705            None => Err(LeakError::new(val, Failure::Exhausted)),
706        }
707    }
708
709    /// Allocate a value with a precise location.
710    ///
711    /// See [`leak`] for basics on allocation of values.
712    ///
713    /// The level is an identifer for a base location (more at [`level`]). This will succeed if it
714    /// can be allocate exactly at the expected location.
715    ///
716    /// This method will return the new level of the slab allocator. A next allocation at the
717    /// returned level will be placed next to this allocation, only separated by necessary padding
718    /// from alignment. In particular, this is the same strategy as applied for the placement of
719    /// `#[repr(C)]` struct members. (Except for the final padding at the last member to the full
720    /// struct alignment.)
721    ///
722    /// ## Usage
723    ///
724    /// ```
725    /// use static_alloc::Bump;
726    ///
727    /// let local: Bump<[u64; 3]> = Bump::uninit();
728    ///
729    /// let base = local.level();
730    /// let (one, level) = local.leak_at(1_u64, base).unwrap();
731    /// // Will panic when an allocation happens in between.
732    /// let (two, _) = local.leak_at(2_u64, level).unwrap();
733    ///
734    /// assert_eq!((one as *const u64).wrapping_offset(1), two);
735    /// ```
736    ///
737    /// [`leak`]: #method.leak
738    /// [`level`]: #method.level
739    ///
740    /// TODO: will be deprecated sooner or later in favor of a method that does not move the
741    /// resource on failure.
742    ///
743    // #[deprecated = "Use leak_box_at and initialize it with the value. This does not move the value in the failure case."]
744    pub fn leak_at<V>(&self, val: V, level: Level)
745        -> Result<(&mut V, Level), LeakError<V>>
746    {
747        let alloc = match self.get_at::<V>(level) {
748            Ok(alloc) => alloc,
749            Err(err) => return Err(LeakError::new(val, err)),
750        };
751
752        // SAFETY: Just allocated this for a `V`.
753        let level = alloc.level;
754        let mutref = unsafe { alloc.leak(val) };
755        Ok((mutref, level))
756    }
757
758    /// 'Allocate' a ZST.
759    fn zst_fake_alloc<Z>(&self) -> Allocation<'_, Z> {
760        Allocation::for_zst(self.level())
761    }
762
763    /// Try to bump the monotonic, atomic consume counter.
764    ///
765    /// This is the only place doing shared modification to `self.consumed`.
766    ///
767    /// Returns `Ok` if the consume counter was as expected. Monotonicty and atomicity guarantees
768    /// to the caller that no overlapping range can succeed as well. This allocates the range to
769    /// the caller.
770    ///
771    /// Returns the observed consume counter in an `Err` if it was not as expected.
772    ///
773    /// ## Panics
774    /// This function panics if either argument exceeds the byte length of the underlying memory.
775    /// It also panics if the expected value is larger than the new value.
776    fn bump(&self, expect_consumed: usize, new_consumed: usize) -> Result<(), usize> {
777        assert!(expect_consumed <= new_consumed);
778        assert!(new_consumed <= mem::size_of::<T>());
779
780        self.consumed.compare_exchange(
781            expect_consumed,
782            new_consumed,
783            Ordering::SeqCst,
784            Ordering::SeqCst,
785        ).map(drop)
786    }
787}
788
789impl<'alloc, T> Allocation<'alloc, T> {
790    /// Write a value into the allocation and leak it.
791    ///
792    /// ## Safety
793    ///
794    /// Must have been allocated for a layout that fits the layout of T previously. The pointer
795    /// must not be aliased.
796    ///
797    /// ## Usage
798    ///
799    /// Consider the alternative [`Bump::leak`] to safely allocate and directly leak a value.
800    ///
801    /// [`Bump::leak`]: struct.Bump.html#method.leak
802    pub unsafe fn leak(self, val: T) -> &'alloc mut T {
803        // The pointer is not borrowed and valid as guaranteed by the caller.
804        core::ptr::write(self.ptr.as_ptr(), val);
805        &mut *self.ptr.as_ptr()
806    }
807
808    /// Write a value into the allocation and own it.
809    ///
810    /// ## Safety
811    ///
812    /// Must have been allocated for a layout that fits the layout of T previously. The pointer
813    /// must not be aliased.
814    ///
815    /// ## Usage
816    ///
817    /// Consider the alternative [`Bump::leak`] to safely allocate and directly leak a value.
818    ///
819    /// [`Bump::leak`]: struct.Bump.html#method.leak
820    pub unsafe fn boxed(self, val: T) -> LeakBox<'alloc, T> {
821        // The pointer is not aliased and valid as guaranteed by the caller.
822        core::ptr::write(self.ptr.as_ptr(), val);
823        // Safety: the instance is valid, was just initialized.
824        LeakBox::from_raw(self.ptr.as_ptr())
825    }
826
827    /// Convert this into a mutable reference to an uninitialized slot.
828    ///
829    /// ## Safety
830    ///
831    /// Must have been allocated for a layout that fits the layout of T previously.
832    pub unsafe fn uninit(self) -> &'alloc mut MaybeUninit<T> {
833        &mut *self.ptr.cast().as_ptr()
834    }
835
836    /// An 'allocation' for an arbitrary ZST, at some arbitrary level.
837    pub(crate) fn for_zst(level: Level) -> Self {
838        assert!(mem::size_of::<T>() == 0);
839        // If `Z` is a ZST, then the stride of any array is equal to 0. Thus, all arrays and slices
840        // havee the same layout which only depends on the alignment. If we need a storage for this
841        // ZST we just take one of those as our base 'allocation' which can also never be aliased.
842        let alloc: &[T; 0] = &[];
843
844        Allocation {
845            ptr: NonNull::from(alloc).cast(),
846            lifetime: AllocTime::default(),
847            level: level,
848        }
849    }
850}
851
852impl<T> LeakError<T> {
853    fn new(val: T, failure: Failure) -> Self {
854        LeakError { val, failure, }
855    }
856
857    /// Inspect the cause of this error.
858    pub fn kind(&self) -> Failure {
859        self.failure
860    }
861
862    /// Retrieve the value that could not be allocated.
863    pub fn into_inner(self) -> T {
864        self.val
865    }
866}
867
868// SAFETY: at most one thread gets a pointer to each chunk of data.
869unsafe impl<T> Sync for Bump<T> { }
870
871unsafe impl<T> GlobalAlloc for Bump<T> {
872    unsafe fn alloc(&self, layout: Layout) -> *mut u8 {
873        Bump::alloc(self, layout)
874            .map(NonNull::as_ptr)
875            .unwrap_or_else(null_mut)
876    }
877
878    unsafe fn realloc(
879        &self,
880        ptr: *mut u8,
881        current: Layout,
882        new_size: usize,
883    ) -> *mut u8 {
884        let current = NonZeroLayout::from_layout(current.into()).unwrap();
885        // As guaranteed, `new_size` is greater than 0.
886        let new_size = core::num::NonZeroUsize::new_unchecked(new_size);
887
888        let target = match layout_reallocated(current, new_size) {
889            Some(target) => target,
890            None => return core::ptr::null_mut(),
891        };
892
893        // Construct an allocation. This is not safe in general but the lifetime is not important.
894        let fake = alloc_traits::Allocation {
895            ptr: NonNull::new_unchecked(ptr),
896            layout: current,
897            lifetime: AllocTime::default(),
898        };
899
900        alloc_traits::LocalAlloc::realloc(self, fake, target)
901            .map(|alloc| alloc.ptr.as_ptr())
902            .unwrap_or_else(core::ptr::null_mut)
903    }
904
905    unsafe fn dealloc(&self, _ptr: *mut u8, _layout: Layout) {
906        // We are a slab allocator and do not deallocate.
907    }
908}
909
910fn layout_reallocated(layout: NonZeroLayout, target: core::num::NonZeroUsize)
911    -> Option<NonZeroLayout>
912{
913    // This may not be a valid layout.
914    let layout = Layout::from_size_align(target.get(), layout.align()).ok()?;
915    // This must succeed though, as the size was non-zero.
916    Some(NonZeroLayout::from_layout(layout.into()).unwrap())
917}
918
919unsafe impl<'alloc, T> LocalAlloc<'alloc> for Bump<T> {
920    fn alloc(&'alloc self, layout: NonZeroLayout)
921        -> Option<alloc_traits::Allocation<'alloc>>
922    {
923        let raw_alloc = Bump::get_layout(self, layout.into())?;
924        Some(alloc_traits::Allocation {
925            ptr: raw_alloc.ptr,
926            layout: layout,
927            lifetime: AllocTime::default(),
928        })
929    }
930
931    // TODO: alloc zeroed if the constructor was `Self::zeroed()`
932
933    /// Reallocates if the layout is strictly smaller and the allocation aligned.
934    ///
935    /// Note that this may succeed spuriously if the previous allocation is incidentally aligned to
936    /// a larger alignment than had been request.
937    ///
938    /// Also not, reallocating to a smaller layout is NOT useless.
939    ///
940    /// It confirms that this allocator does not need the allocated layout to re/deallocate.
941    /// Otherwise, even reallocating to a strictly smaller layout would be impossible without
942    /// storing the prior layout.
943    unsafe fn realloc(
944        &'alloc self,
945        alloc: alloc_traits::Allocation<'alloc>,
946        layout: NonZeroLayout,
947    ) -> Option<alloc_traits::Allocation<'alloc>> {
948        if alloc.ptr.as_ptr() as usize % layout.align() == 0
949            && alloc.layout.size() >= layout.size()
950        {
951            // Obvious fit, nothing to do.
952            return Some(alloc_traits::Allocation {
953                ptr: alloc.ptr,
954                layout,
955                lifetime: alloc.lifetime,
956            });
957        }
958
959        // TODO: we could try to allocate at the exact level that the allocation ends. If this
960        // succeeds, there is no copying necessary. This was the point of `Level` anyways.
961
962        let new_alloc = LocalAlloc::alloc(self, layout)?;
963        core::ptr::copy_nonoverlapping(
964            alloc.ptr.as_ptr(),
965            new_alloc.ptr.as_ptr(),
966            layout.size().min(alloc.layout.size()).into());
967        // No dealloc.
968        return Some(new_alloc);
969    }
970
971    unsafe fn dealloc(&'alloc self, _: alloc_traits::Allocation<'alloc>) {
972        // We are a slab allocator and do not deallocate.
973    }
974}
975
976#[cfg(test)]
977mod tests {
978    use super::*;
979
980    #[test]
981    fn zst_no_drop() {
982        #[derive(Debug)]
983        struct PanicOnDrop;
984
985        impl Drop for PanicOnDrop {
986            fn drop(&mut self) {
987                panic!("No instance of this should ever get dropped");
988            }
989        }
990
991        let alloc = Bump::<()>::uninit();
992        let _ = alloc.leak(PanicOnDrop).unwrap();
993    }
994}