musli_allocator/
system.rs

1use core::cell::UnsafeCell;
2use core::fmt::{self, Arguments};
3use core::ptr::NonNull;
4
5use alloc::boxed::Box;
6use alloc::vec::Vec;
7
8use musli::buf::Error;
9use musli::{Allocator, Buf};
10
11/// Buffer used in combination with an [`Allocator`].
12pub struct System {
13    internal: UnsafeCell<Internal>,
14}
15
16impl System {
17    /// Construct a new allocator.
18    #[inline]
19    pub const fn new() -> Self {
20        Self {
21            internal: UnsafeCell::new(Internal { head: None }),
22        }
23    }
24}
25
26impl Default for System {
27    #[inline]
28    fn default() -> Self {
29        Self::new()
30    }
31}
32
33impl Allocator for System {
34    type Buf<'this> = SystemBuf<'this> where Self: 'this;
35
36    #[inline(always)]
37    fn alloc(&self) -> Option<Self::Buf<'_>> {
38        Some(SystemBuf {
39            region: Internal::alloc(&self.internal),
40            internal: &self.internal,
41        })
42    }
43}
44
45impl Drop for System {
46    fn drop(&mut self) {
47        let internal = unsafe { &mut *self.internal.get() };
48
49        while let Some(mut head) = internal.head.take() {
50            // SAFETY: This collection has exclusive access to any heads it
51            // contain.
52            unsafe {
53                internal.head = head.as_mut().next.take();
54                drop(Box::from_raw(head.as_ptr()));
55            }
56        }
57    }
58}
59
60/// A vector-backed allocation.
61pub struct SystemBuf<'a> {
62    region: &'a mut Region,
63    internal: &'a UnsafeCell<Internal>,
64}
65
66impl<'a> Buf for SystemBuf<'a> {
67    #[inline]
68    fn write(&mut self, bytes: &[u8]) -> bool {
69        self.region.data.extend_from_slice(bytes);
70        true
71    }
72
73    #[inline(always)]
74    fn len(&self) -> usize {
75        self.region.data.len()
76    }
77
78    #[inline(always)]
79    fn as_slice(&self) -> &[u8] {
80        &self.region.data
81    }
82
83    #[inline(always)]
84    fn write_fmt(&mut self, arguments: Arguments<'_>) -> Result<(), Error> {
85        fmt::write(self, arguments).map_err(|_| Error)
86    }
87}
88
89impl fmt::Write for SystemBuf<'_> {
90    #[inline(always)]
91    fn write_str(&mut self, s: &str) -> fmt::Result {
92        self.region.data.extend_from_slice(s.as_bytes());
93        Ok(())
94    }
95}
96
97impl<'a> Drop for SystemBuf<'a> {
98    fn drop(&mut self) {
99        Internal::free(self.internal, self.region);
100    }
101}
102
103/// An allocated region.
104#[repr(C)]
105struct Region {
106    data: Vec<u8>,
107    // Pointer to the next free region.
108    next: Option<NonNull<Region>>,
109}
110
111/// Internals of the allocator.
112struct Internal {
113    // Regions of re-usable allocations we can hand out.
114    head: Option<NonNull<Region>>,
115}
116
117impl Internal {
118    /// Allocate a new region.
119    ///
120    /// Note that this will return a leaked memory region, so the unbound
121    /// lifetime is intentional.
122    fn alloc<'a>(this: &UnsafeCell<Self>) -> &'a mut Region {
123        // SAFETY: We take care to only access internals in a single-threaded
124        // mutable fashion.
125        let internal = unsafe { &mut *this.get() };
126
127        if let Some(mut head) = internal.head.take() {
128            // SAFETY: This collection has exclusive access to any heads it contain.
129            unsafe {
130                let head = head.as_mut();
131                internal.head = head.next.take();
132                head
133            }
134        } else {
135            Box::leak(Box::new(Region {
136                data: Vec::new(),
137                next: None,
138            }))
139        }
140    }
141
142    fn free(this: &UnsafeCell<Self>, region: &mut Region) {
143        unsafe {
144            let this = &mut *this.get();
145            region.data.clear();
146            region.next = this.head;
147            this.head = Some(NonNull::from(region));
148        }
149    }
150}