nalloc/
lib.rs

1//! This crate contains [an allocator](struct.NAlloc.html) that can be used to wrap another
2//! allocator to turn allocation on and off. This is meant to be used in unit tests.
3//!
4//! To use it, declare a static variable with the `#[global_allocator]`
5//! attribute. It can wrap any allocator implementing
6//! [`GlobalAlloc`](https://doc.rust-lang.org/std/alloc/trait.GlobalAlloc.html).
7//!
8//! ```rust
9//! # extern crate std;
10//! #[global_allocator]
11//! static ALLOCATOR: nalloc::NAlloc<std::alloc::System> = {
12//!     nalloc::NAlloc::new(std::alloc::System)
13//! };
14//! ```
15//!
16//! Allocation is allowed by default. To prevent it, call the `deny` method
17//! on the allocator. When allocation is attempted while a lock is alive,
18//! the process will abort.
19//!
20//! ```rust,should_panic
21//! # extern crate std;
22//! # #[global_allocator]
23//! # static ALLOCATOR: nalloc::NAlloc<std::alloc::System> = {
24//! #     nalloc::NAlloc::new(std::alloc::System)
25//! # };
26//! let this_is_allowed = vec![1, 2, 3];
27//!
28//! let _lock = ALLOCATOR.deny();
29//! let this_will_abort = vec![4, 5, 6];
30//! ```
31//!
32//! # Limitations
33//! ## Parallel tests
34//!
35//! Note that by nature, the default test executor will use this allocator if
36//! you add it in your test module. This will cause issues as the test executor
37//! itself allocate memory. You can circumvent this by using
38//! `cargo test -- --test-threads=1`.
39//!
40//! ## Aborting
41//! If allocation is attempted while a lock is alive, the process will abort.
42//! This means the entire process will be killed, rather than a single thread,
43//! and it is not catchable with
44//! [`catch_unwind`](https://doc.rust-lang.org/std/panic/fn.catch_unwind.html).
45
46#![no_std]
47
48#![forbid(warnings)]
49#![forbid(missing_docs)]
50extern crate alloc;
51
52/// A wrapper around an allocator to turn allocation on and off.
53#[derive(Debug)]
54pub struct NAlloc<T> {
55    wrapped: T,
56    /// A counter for locks. Allocation is only allowed when the counter is 0.
57    state: core::sync::atomic::AtomicU64,
58}
59
60impl<T> NAlloc<T> {
61    /// Wraps an allocator.
62    pub const fn new(wrapped: T) -> NAlloc<T> {
63        Self {
64            wrapped,
65            state: core::sync::atomic::AtomicU64::new(0),
66        }
67    }
68
69    /// Forbid allocations.
70    ///
71    /// This functions returns a lock that must be kept alive as long as no
72    /// allocations are allowed.
73    #[must_use = "The lock must stay alive as long as no allocations are allowed."]
74    pub fn deny<'a>(&'a self) -> AllocationLocker<'a, T> {
75        if self
76            .state
77            .fetch_add(1, core::sync::atomic::Ordering::Release)
78            == u64::MAX
79        {
80            panic!("Allocation counter wrapped around");
81        }
82
83        AllocationLocker { allocator: self }
84    }
85
86    fn unlock<'a>(&'a self) {
87        if self
88            .state
89            .fetch_sub(1, core::sync::atomic::Ordering::Release)
90            == 0
91        {
92            panic!("Allocation counter wrapped around");
93        }
94    }
95}
96
97unsafe impl<T: alloc::alloc::GlobalAlloc> alloc::alloc::GlobalAlloc for NAlloc<T> {
98    unsafe fn alloc(&self, layout: alloc::alloc::Layout) -> *mut u8 {
99        if self.state.load(core::sync::atomic::Ordering::Relaxed) == 0 {
100            self.wrapped.alloc(layout)
101        } else {
102            alloc::alloc::handle_alloc_error(layout)
103        }
104    }
105
106    unsafe fn dealloc(&self, ptr: *mut u8, layout: alloc::alloc::Layout) {
107        self.wrapped.dealloc(ptr, layout)
108    }
109
110    unsafe fn alloc_zeroed(&self, layout: alloc::alloc::Layout) -> *mut u8 {
111        if self.state.load(core::sync::atomic::Ordering::Relaxed) == 0 {
112            self.wrapped.alloc_zeroed(layout)
113        } else {
114            alloc::alloc::handle_alloc_error(layout)
115        }
116    }
117
118    unsafe fn realloc(&self, ptr: *mut u8, layout: alloc::alloc::Layout, new_size: usize) -> *mut u8 {
119        if self.state.load(core::sync::atomic::Ordering::Relaxed) == 0 {
120            self.wrapped.realloc(ptr, layout, new_size)
121        } else {
122            alloc::alloc::handle_alloc_error(layout)
123        }
124    }
125}
126
127/// A lock that must be kept alive as long as no allocation is allowed.
128pub struct AllocationLocker<'a, T> {
129    allocator: &'a NAlloc<T>,
130}
131
132impl<'a, T> Drop for AllocationLocker<'a, T> {
133    fn drop(&mut self) {
134        self.allocator.unlock()
135    }
136}