faux_alloc/
lib.rs

1//! Simplified allocations for no-std on stable Rust.
2//!
3//! All allocations require a `*Store` which gives them a `'static` lifetime.
4//!
5//! You can swap out alloc for faux_alloc and as long as the APIs are supported
6//! it will work.
7
8#![no_std]
9
10use core::mem::ManuallyDrop;
11use core::task::{RawWaker, RawWakerVTable, Waker};
12use core::{
13    borrow::{Borrow, BorrowMut},
14    cell::UnsafeCell,
15    mem::MaybeUninit,
16    ops::{Deref, DerefMut},
17    pin::Pin,
18    sync::atomic::{AtomicBool, AtomicUsize, Ordering},
19};
20
21/// A pointer type for heap allocation.
22pub struct Box<T: ?Sized>(*mut T);
23
24impl<T> Box<T> {
25    /// Unimplemented, use [`BoxStore`].
26    #[inline]
27    pub fn new(_: T) -> Self {
28        unimplemented!()
29    }
30    
31    /// Unimplemented, use [`BoxStore`].
32    #[inline]
33    pub fn pin(_: T) -> Pin<Self> {
34        unimplemented!()
35    }
36}
37
38impl<T: ?Sized> Box<T> {
39    /// Construct `Box` from pointer
40    #[inline]
41    pub const unsafe fn from_raw(raw: *mut T) -> Self {
42        Box(raw)
43    }
44
45    /// Convert box to a pointer
46    #[inline]
47    pub const fn into_raw(b: Self) -> *mut T {
48        b.0
49    }
50
51    /// Leak the `Box`
52    #[inline]
53    pub fn leak<'a>(b: Self) -> &'a mut T {
54        unsafe { &mut *b.0 }
55    }
56
57    /// Convert the `Box` into a pinned `Box`
58    #[inline]
59    pub fn into_pin(boxed: Self) -> Pin<Self> {
60        unsafe { Pin::new_unchecked(boxed) }
61    }
62}
63
64impl<F: ?Sized + core::future::Future + Unpin> core::future::Future for Box<F> {
65    type Output = F::Output;
66
67    fn poll(mut self: Pin<&mut Self>, cx: &mut core::task::Context<'_>) -> core::task::Poll<Self::Output> {
68        F::poll(Pin::new(&mut *self), cx)
69    }
70}
71
72impl<T: ?Sized> Borrow<T> for Box<T> {
73    #[inline]
74    fn borrow(&self) -> &T {
75        &*self
76    }
77}
78
79impl<T: ?Sized> BorrowMut<T> for Box<T> {
80    #[inline]
81    fn borrow_mut(&mut self) -> &mut T {
82        &mut *self
83    }
84}
85
86impl<T: ?Sized> AsRef<T> for Box<T> {
87    #[inline]
88    fn as_ref(&self) -> &T {
89        &*self
90    }
91}
92
93impl<T: ?Sized> AsMut<T> for Box<T> {
94    #[inline]
95    fn as_mut(&mut self) -> &mut T {
96        &mut *self
97    }
98}
99
100impl<T: ?Sized> Deref for Box<T> {
101    type Target = T;
102
103    #[inline]
104    fn deref(&self) -> &T {
105        unsafe { &*self.0 }
106    }
107}
108
109impl<T: ?Sized> DerefMut for Box<T> {
110    #[inline]
111    fn deref_mut(&mut self) -> &mut T {
112        unsafe { &mut *self.0 }
113    }
114}
115
116/// A place to store a fake "heap" that can allocate for one `Box`.
117pub struct BoxStore<T>(UnsafeCell<MaybeUninit<T>>, AtomicBool);
118
119unsafe impl<T> Sync for BoxStore<T> {}
120
121impl<T> BoxStore<T> {
122    /// Create a new `BoxStore`
123    #[inline]
124    pub const fn new() -> Self {
125        Self(
126            UnsafeCell::new(MaybeUninit::uninit()),
127            AtomicBool::new(false),
128        )
129    }
130
131    /// Allocate memory.
132    #[inline]
133    pub fn alloc(&'static self, value: T) -> Option<Box<T>> {
134        if self.1.fetch_or(true, Ordering::SeqCst) {
135            None
136        } else {
137            unsafe {
138                let maybe_uninit = &mut *self.0.get();
139                let pointer = maybe_uninit.write(value);
140                Some(Box::from_raw(pointer))
141            }
142        }
143    }
144
145    /// De-allocate memory and drop.
146    ///
147    /// After this is called, the memory may be allocated again.
148    ///
149    /// May error if:
150    ///  - Not the same `Box` that was allocated on this store.
151    #[inline]
152    pub fn dealloc(&'static self, ptr: Box<T>) -> Result<(), ()> {
153        unsafe {
154            let ptr = Box::into_raw(ptr);
155            if (*self.0.get()).as_mut_ptr() == ptr {
156                core::ptr::drop_in_place(ptr);
157                self.1.store(false, Ordering::SeqCst);
158                Ok(())
159            } else {
160                Err(())
161            }
162        }
163    }
164}
165
166impl<T> Drop for BoxStore<T> {
167    fn drop(&mut self) {
168        if self.1.load(Ordering::SeqCst) {
169            unsafe { core::ptr::drop_in_place(self.0.get()) };
170        }
171    }
172}
173
174/// A place to store a fake "heap" that can allocate for one `Arc`.
175pub struct ArcStore<T>(UnsafeCell<MaybeUninit<ArcInner<T>>>, AtomicBool);
176
177unsafe impl<T> Sync for ArcStore<T> {}
178
179impl<T> ArcStore<T> {
180    /// Create a new `ArcStore`
181    #[inline]
182    pub const fn new() -> Self {
183        Self(
184            UnsafeCell::new(MaybeUninit::uninit()),
185            AtomicBool::new(false),
186        )
187    }
188
189    /// Allocate memory.
190    #[inline]
191    pub fn alloc(&'static self, value: T) -> Option<Arc<T>> {
192        if self.1.fetch_or(true, Ordering::SeqCst) {
193            None
194        } else {
195            unsafe {
196                let maybe_uninit = &mut *self.0.get();
197                let pointer = maybe_uninit.write(ArcInner {
198                    count: AtomicUsize::new(1),
199                    data: UnsafeCell::new(value),
200                });
201                Some(Arc(pointer))
202            }
203        }
204    }
205
206    /// De-allocate memory and drop.
207    ///
208    /// After this is called, the memory may be allocated again.
209    ///
210    /// May error if:
211    ///  - Not the same `Arc` that was allocated on this store.
212    ///  - Reference count is not 1
213    #[inline]
214    pub fn dealloc(&'static self, ptr: Arc<T>) -> Result<(), ()> {
215        unsafe {
216            let count = Arc::count(&ptr);
217            let ptr = Arc::into_raw(ptr);
218            let ptr: *const ArcInner<T> = ptr.cast();
219            if count == 1 && ptr == (*self.0.get()).as_ptr() {
220                core::ptr::drop_in_place((*ptr).data.get());
221                self.1.store(false, Ordering::SeqCst);
222                Ok(())
223            } else {
224                Arc::<T>::decrement_count(ptr.cast());
225                Err(())
226            }
227        }
228    }
229}
230
231impl<T: core::fmt::Debug + ?Sized> core::fmt::Debug for Box<T> {
232    fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
233        core::fmt::Debug::fmt(&**self, f)
234    }
235}
236
237impl<T> Drop for ArcStore<T> {
238    fn drop(&mut self) {
239        if self.1.load(Ordering::SeqCst) {
240            unsafe { core::ptr::drop_in_place(self.0.get()) };
241        }
242    }
243}
244
245struct ArcInner<T: ?Sized> {
246    count: AtomicUsize,
247    data: UnsafeCell<T>,
248}
249
250/// Atomically reference-counted pointer
251pub struct Arc<T: ?Sized>(*const ArcInner<T>);
252
253impl<T: ?Sized> Arc<T> {
254    /// Get the number of `Arc`s
255    #[inline]
256    #[must_use]
257    pub fn count(this: &Self) -> usize {
258        unsafe { (*this.0).count.load(Ordering::SeqCst) }
259    }
260
261    /// Check if two arcs point to the same location
262    #[inline]
263    #[must_use]
264    pub fn ptr_eq(this: &Self, other: &Self) -> bool {
265        this.0 == other.0
266    }
267
268    /// Leak the `Arc`
269    #[inline]
270    pub fn leak<'a>(this: Self) -> &'a T {
271        unsafe { &*(*this.0).data.get() }
272    }
273}
274
275impl<T> Arc<T> {
276    /// Unimplemented, use [`ArcStore`].
277    #[inline]
278    pub fn new(_: T) -> Self {
279        unimplemented!()
280    }
281    
282    /// Fake a `Clone` on a raw arc
283    #[inline]
284    pub unsafe fn increment_count(ptr: *const ()) {
285        // Retain Arc, but don't touch refcount by wrapping in ManuallyDrop
286        let arc = ManuallyDrop::new(Arc::<T>::from_raw(ptr));
287        // Now increase refcount, but don't drop new refcount either
288        let _arc_clone: ManuallyDrop<_> = arc.clone();
289    }
290
291    /// Fake a `Drop` on raw arc
292    #[inline]
293    pub unsafe fn decrement_count(ptr: *const ()) {
294        core::mem::drop(Arc::<T>::from_raw(ptr));
295    }
296
297    /// Create arc from raw arc
298    #[inline]
299    pub unsafe fn from_raw(ptr: *const ()) -> Self {
300        Self(ptr.cast())
301    }
302
303    /// Create raw arc from arc
304    #[inline]
305    pub fn into_raw(this: Self) -> *const () {
306        let ptr = this.0;
307        core::mem::forget(this);
308        ptr.cast()
309    }
310}
311
312impl<T: ?Sized> Borrow<T> for Arc<T> {
313    #[inline]
314    fn borrow(&self) -> &T {
315        &*self
316    }
317}
318
319impl<T: ?Sized> AsRef<T> for Arc<T> {
320    #[inline]
321    fn as_ref(&self) -> &T {
322        &*self
323    }
324}
325
326impl<T: ?Sized> Deref for Arc<T> {
327    type Target = T;
328
329    #[inline]
330    fn deref(&self) -> &T {
331        unsafe { &*(*self.0).data.get() }
332    }
333}
334
335impl<T: ?Sized> Clone for Arc<T> {
336    #[inline]
337    fn clone(&self) -> Arc<T> {
338        const MAX: usize = (isize::MAX) as usize;
339
340        if unsafe { (*self.0).count.fetch_add(1, Ordering::Relaxed) } > MAX {
341            panic!();
342        }
343
344        Self(self.0)
345    }
346}
347
348impl<T: ?Sized + core::fmt::Debug> core::fmt::Debug for Arc<T> {
349    fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
350        core::fmt::Debug::fmt(&**self, f)
351    }
352}
353
354impl<T: ?Sized> Drop for Arc<T> {
355    #[inline]
356    fn drop(&mut self) {
357        unsafe { (*self.0).count.fetch_sub(1, Ordering::Release) };
358    }
359}
360
361/// Trait for safely implementing a [`Waker`]
362pub trait Wake {
363    /// Wake this task.
364    fn wake(this: Arc<Self>);
365
366    /// Wake this task without consuming the waker.
367    fn wake_by_ref(this: &Arc<Self>) {
368        Self::wake(this.clone());
369    }
370}
371
372impl<W: Wake + Send + Sync + 'static> From<Arc<W>> for Waker {
373    /// Create waker from `Arc<Wake>`
374    fn from(waker: Arc<W>) -> Waker {
375        unsafe { Waker::from_raw(raw_waker(waker)) }
376    }
377}
378
379impl<W: Wake + Send + Sync + 'static> From<Arc<W>> for RawWaker {
380    fn from(waker: Arc<W>) -> RawWaker {
381        raw_waker(waker)
382    }
383}
384
385#[inline(always)]
386fn raw_waker<W: Wake + Send + Sync + 'static>(waker: Arc<W>) -> RawWaker {
387    // Increment the reference count of the arc to clone it.
388    unsafe fn clone_waker<W: Wake + Send + Sync + 'static>(waker: *const ()) -> RawWaker {
389        Arc::<W>::increment_count(waker);
390        RawWaker::new(
391            waker as *const (),
392            &RawWakerVTable::new(
393                clone_waker::<W>,
394                wake::<W>,
395                wake_by_ref::<W>,
396                drop_waker::<W>,
397            ),
398        )
399    }
400
401    // Wake by value, moving the Arc into the Wake::wake function
402    unsafe fn wake<W: Wake + Send + Sync + 'static>(waker: *const ()) {
403        let waker = Arc::<W>::from_raw(waker);
404        <W as Wake>::wake(waker);
405    }
406
407    // Wake by reference, wrap the waker in ManuallyDrop to avoid dropping it
408    unsafe fn wake_by_ref<W: Wake + Send + Sync + 'static>(waker: *const ()) {
409        let waker = ManuallyDrop::new(Arc::from_raw(waker));
410        <W as Wake>::wake_by_ref(&waker);
411    }
412
413    // Decrement the reference count of the Arc on drop
414    unsafe fn drop_waker<W: Wake + Send + Sync + 'static>(waker: *const ()) {
415        Arc::<W>::decrement_count(waker);
416    }
417
418    RawWaker::new(
419        Arc::into_raw(waker) as *const (),
420        &RawWakerVTable::new(
421            clone_waker::<W>,
422            wake::<W>,
423            wake_by_ref::<W>,
424            drop_waker::<W>,
425        ),
426    )
427}
428
429pub mod boxed {
430    //! Emulating <https://doc.rust-lang.org/alloc/boxed/index.html>
431
432    pub use crate::Box;
433}
434
435pub mod sync {
436    //! Emulating <https://doc.rust-lang.org/alloc/sync/index.html>
437
438    pub use crate::Arc;
439}
440
441pub mod task {
442    //! Emulating <https://doc.rust-lang.org/alloc/task/index.html>
443
444    pub use crate::Wake;
445}
446
447#[cfg(test)]
448mod tests {
449    #[test]
450    fn it_works() {
451        let result = 2 + 2;
452        assert_eq!(result, 4);
453    }
454}