composable_allocators/
freelist.rs

1use crate::base::*;
2use const_default::ConstDefault;
3use const_default_derive::ConstDefault;
4use core::alloc::{self, AllocError, Allocator};
5use core::mem::{align_of, size_of};
6use core::ptr::{self, NonNull, null_mut};
7use core::sync::atomic::AtomicPtr;
8use sync_no_std::mutex::Mutex;
9
10pub const MIN_LAYOUT_SIZE: usize = size_of::<Node>();
11
12pub const MIN_LAYOUT_ALIGN: usize = align_of::<Node>();
13
14/// # Safety
15///
16/// This trait cannot be implemented outside of this module.
17pub unsafe trait LimitParam {
18    #[doc(hidden)]
19    type ListLen: ConstDefault + Copy;
20
21    #[doc(hidden)]
22    #[allow(clippy::missing_safety_doc)]
23    unsafe fn limit_reached(&self, list_len: Self::ListLen) -> bool;
24
25    #[doc(hidden)]
26    #[allow(clippy::missing_safety_doc)]
27    unsafe fn dec_list_len(&self, list_len: Self::ListLen) -> Self::ListLen;
28
29    #[doc(hidden)]
30    #[allow(clippy::missing_safety_doc)]
31    unsafe fn inc_list_len(&self, list_len: Self::ListLen) -> Self::ListLen;
32}
33
34#[derive(ConstDefault)]
35pub struct NoLimit;
36
37unsafe impl LimitParam for NoLimit {
38    type ListLen = ();
39
40    unsafe fn limit_reached(&self, (): Self::ListLen) -> bool { false }
41
42    unsafe fn dec_list_len(&self, (): Self::ListLen) -> Self::ListLen { }
43
44    unsafe fn inc_list_len(&self, (): Self::ListLen) -> Self::ListLen { }
45}
46
47#[doc(hidden)]
48#[derive(ConstDefault, Clone, Copy)]
49pub struct FixedLimitListLen(usize);
50
51pub struct FixedLimit {
52    limit: usize,
53}
54
55unsafe impl LimitParam for FixedLimit {
56    type ListLen = FixedLimitListLen;
57
58    unsafe fn limit_reached(&self, list_len: Self::ListLen) -> bool {
59        list_len.0 == self.limit
60    }
61
62    unsafe fn dec_list_len(&self, list_len: Self::ListLen) -> Self::ListLen {
63        FixedLimitListLen(list_len.0 - 1)
64    }
65
66    unsafe fn inc_list_len(&self, list_len: Self::ListLen) -> Self::ListLen {
67        FixedLimitListLen(list_len.0 + 1)
68    }
69}
70
71struct Node {
72    next: AtomicPtr<u8>,
73}
74
75struct List<Limit: LimitParam> {
76    head: Node,
77    len: <Limit as LimitParam>::ListLen,
78}
79
80pub struct Freelist<Limit: LimitParam, A: Allocator + Clone> {
81    list: Mutex<List<Limit>, A>,
82    layout: alloc::Layout,
83    tolerance: alloc::Layout,
84    limit: Limit,
85}
86
87unsafe impl<Limit: LimitParam, A: NonUnwinding + Clone> NonUnwinding for Freelist<Limit, A> { }
88
89impl<Limit: LimitParam, A: Allocator + Clone> Freelist<Limit, A> {
90    pub const fn new(layout: alloc::Layout, tolerance: alloc::Layout, limit: Limit, base: A) -> Self {
91        assert!(tolerance.size() <= layout.size() && tolerance.align() <= layout.align());
92        assert!(layout.size() >= MIN_LAYOUT_SIZE && layout.align() >= MIN_LAYOUT_ALIGN);
93        unsafe { Self::new_unchecked(layout, tolerance, limit, base) }
94    }
95
96    /// # Safety
97    ///
98    /// Arguments should satisfy
99    /// `tolerance.size() <= layout.size() && tolerance.align() <= layout.align()`,
100    /// and
101    /// `layout.size() >= MIN_LAYOUT_SIZE && layout.align() >= MIN_LAYOUT_ALIGN`.
102    pub const unsafe fn new_unchecked(layout: alloc::Layout, tolerance: alloc::Layout, limit: Limit, base: A) -> Self {
103        Freelist {
104            list: Mutex::new_in(List {
105                head: Node { next: AtomicPtr::new(null_mut()) },
106                len: ConstDefault::DEFAULT,
107            }, base),
108            layout,
109            tolerance,
110            limit,
111        }
112    }
113
114    fn manages(&self, layout: alloc::Layout) -> bool {
115        (self.tolerance.size() ..= self.layout.size()).contains(&layout.size()) &&
116        (self.tolerance.align() ..= self.layout.size()).contains(&layout.align())
117    }
118
119    fn base(&self) -> &A { self.list.allocator() }
120}
121
122unsafe impl<Limit: LimitParam, A: Fallbackable + Clone> Fallbackable for Freelist<Limit, A> {
123    unsafe fn has_allocated(&self, ptr: NonNull<u8>, layout: alloc::Layout) -> bool {
124        let layout = if self.manages(layout) { self.layout } else { layout };
125        self.base().has_allocated(ptr, layout)
126    }
127
128    fn allows_fallback(&self, layout: alloc::Layout) -> bool {
129        let layout = if self.manages(layout) { self.layout } else { layout };
130        self.base().allows_fallback(layout)
131    }
132}
133
134unsafe impl<Limit: LimitParam, A: Allocator + Clone> Allocator for Freelist<Limit, A> {
135    fn allocate(&self, layout: alloc::Layout) -> Result<NonNull<[u8]>, AllocError> {
136        if !self.manages(layout) {
137            return self.base().allocate(layout);
138        }
139        let mut list = self.list.lock().unwrap();
140        if let Some(next_ptr) = NonNull::new(*list.head.next.get_mut()) {
141            let next = unsafe { ptr::read(next_ptr.as_ptr() as *const Node) }.next;
142            list.head = Node { next };
143            list.len = unsafe { self.limit.dec_list_len(list.len) };
144            Ok(NonNull::slice_from_raw_parts(next_ptr, self.layout.size()))
145        } else {
146            self.base().allocate(self.layout)
147        }
148    }
149
150    fn allocate_zeroed(&self, layout: alloc::Layout) -> Result<NonNull<[u8]>, AllocError> {
151        if !self.manages(layout) {
152            return self.base().allocate_zeroed(layout);
153        }
154        let mut list = self.list.lock().unwrap();
155        if let Some(next_ptr) = NonNull::new(*list.head.next.get_mut()) {
156            let next = unsafe { ptr::read(next_ptr.as_ptr() as *const Node) }.next;
157            list.head = Node { next };
158            list.len = unsafe { self.limit.dec_list_len(list.len) };
159            let ptr = NonNull::slice_from_raw_parts(next_ptr, self.layout.size());
160            unsafe { ptr.as_mut_ptr().write_bytes(0, ptr.len()); }
161            Ok(ptr)
162        } else {
163            self.base().allocate_zeroed(self.layout)
164        }
165    }
166
167    unsafe fn deallocate(&self, ptr: NonNull<u8>, layout: alloc::Layout) {
168        let mut list = self.list.lock().unwrap();
169        if self.limit.limit_reached(list.len) || !self.manages(layout) {
170            return self.base().deallocate(ptr, layout);
171        }
172        ptr::write(ptr.as_ptr() as *mut Node, Node { next: AtomicPtr::new(*list.head.next.get_mut()) });
173        *list.head.next.get_mut() = ptr.as_ptr();
174        list.len = unsafe { self.limit.inc_list_len(list.len) };
175    }
176
177    unsafe fn grow(
178        &self, 
179        ptr: NonNull<u8>, 
180        old_layout: alloc::Layout, 
181        new_layout: alloc::Layout
182    ) -> Result<NonNull<[u8]>, AllocError> {
183        let old_layout = if self.manages(old_layout) { self.layout } else { old_layout };
184        self.base().grow(ptr, old_layout, new_layout)
185    }
186
187    unsafe fn grow_zeroed(
188        &self, 
189        ptr: NonNull<u8>, 
190        old_layout: alloc::Layout, 
191        new_layout: alloc::Layout
192    ) -> Result<NonNull<[u8]>, AllocError> {
193        let old_layout = if self.manages(old_layout) { self.layout } else { old_layout };
194        self.base().grow_zeroed(ptr, old_layout, new_layout)
195    }
196
197    unsafe fn shrink(
198        &self, 
199        ptr: NonNull<u8>, 
200        old_layout: alloc::Layout, 
201        new_layout: alloc::Layout
202    ) -> Result<NonNull<[u8]>, AllocError> {
203        let old_layout = if self.manages(old_layout) {
204            if self.manages(new_layout) {
205                return Ok(NonNull::slice_from_raw_parts(ptr, self.layout.size()));
206            }
207            self.layout
208        } else {
209            old_layout
210        };
211        self.base().shrink(ptr, old_layout, new_layout)
212    }
213}