1use core::alloc::{GlobalAlloc, Layout};
2use core::fmt::{self, Debug, Formatter};
3use core::marker::PhantomData;
4use core::ops::Deref;
5use core::ptr::NonNull;
6
7extern crate std;
8use std::sync::{Mutex, MutexGuard};
9
10use crate::align::{Align, Alignment};
11use crate::{AllocChain, AllocError, ChainableAlloc, UnsafeStalloc};
12
13#[repr(C)]
16pub struct SyncStalloc<const L: usize, const B: usize>(Mutex<()>, UnsafeStalloc<L, B>)
17where
18 Align<B>: Alignment;
19
20pub struct StallocGuard<'a, const L: usize, const B: usize>
25where
26 Align<B>: Alignment,
27{
28 _guard: MutexGuard<'a, ()>,
29 inner: &'a UnsafeStalloc<L, B>,
30 _not_sync: PhantomData<*const ()>,
31}
32
33impl<const L: usize, const B: usize> Deref for StallocGuard<'_, L, B>
34where
35 Align<B>: Alignment,
36{
37 type Target = UnsafeStalloc<L, B>;
38
39 fn deref(&self) -> &Self::Target {
40 self.inner
41 }
42}
43
44impl<const L: usize, const B: usize> SyncStalloc<L, B>
45where
46 Align<B>: Alignment,
47{
48 #[must_use]
57 pub const fn new() -> Self {
58 Self(Mutex::new(()), unsafe { UnsafeStalloc::<L, B>::new() })
61 }
62
63 pub fn is_oom(&self) -> bool {
68 self.acquire_locked().is_oom()
69 }
70
71 pub fn is_empty(&self) -> bool {
77 self.acquire_locked().is_empty()
78 }
79
80 pub unsafe fn clear(&self) {
85 unsafe { self.acquire_locked().clear() }
87 }
88
89 pub unsafe fn allocate_blocks(
100 &self,
101 size: usize,
102 align: usize,
103 ) -> Result<NonNull<u8>, AllocError> {
104 unsafe { self.acquire_locked().allocate_blocks(size, align) }
106 }
107
108 pub unsafe fn deallocate_blocks(&self, ptr: NonNull<u8>, size: usize) {
115 unsafe { self.acquire_locked().deallocate_blocks(ptr, size) }
117 }
118
119 pub unsafe fn shrink_in_place(&self, ptr: NonNull<u8>, old_size: usize, new_size: usize) {
125 unsafe {
127 self.acquire_locked()
128 .shrink_in_place(ptr, old_size, new_size);
129 }
130 }
131
132 pub unsafe fn grow_in_place(
142 &self,
143 ptr: NonNull<u8>,
144 old_size: usize,
145 new_size: usize,
146 ) -> Result<(), AllocError> {
147 unsafe { self.acquire_locked().grow_in_place(ptr, old_size, new_size) }
149 }
150
151 pub unsafe fn grow_up_to(&self, ptr: NonNull<u8>, old_size: usize, new_size: usize) -> usize {
158 unsafe { self.acquire_locked().grow_up_to(ptr, old_size, new_size) }
160 }
161
162 pub fn acquire_locked(&self) -> StallocGuard<'_, L, B> {
181 StallocGuard {
184 _guard: unsafe { self.0.lock().unwrap_unchecked() },
185 inner: &self.1,
186 _not_sync: PhantomData,
187 }
188 }
189}
190
191impl<const L: usize, const B: usize> Default for SyncStalloc<L, B>
192where
193 Align<B>: Alignment,
194{
195 fn default() -> Self {
196 Self::new()
197 }
198}
199
200impl<const L: usize, const B: usize> Debug for SyncStalloc<L, B>
201where
202 Align<B>: Alignment,
203{
204 fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
205 write!(f, "{:?}", self.acquire_locked().inner)
206 }
207}
208
209unsafe impl<const L: usize, const B: usize> GlobalAlloc for SyncStalloc<L, B>
210where
211 Align<B>: Alignment,
212{
213 unsafe fn alloc(&self, layout: Layout) -> *mut u8 {
214 unsafe { self.acquire_locked().alloc(layout) }
216 }
217
218 unsafe fn alloc_zeroed(&self, layout: Layout) -> *mut u8 {
219 unsafe { self.acquire_locked().alloc_zeroed(layout) }
221 }
222
223 unsafe fn dealloc(&self, ptr: *mut u8, layout: Layout) {
224 unsafe { self.acquire_locked().dealloc(ptr, layout) }
226 }
227
228 unsafe fn realloc(&self, ptr: *mut u8, old_layout: Layout, new_size: usize) -> *mut u8 {
229 unsafe { self.acquire_locked().realloc(ptr, old_layout, new_size) }
231 }
232}
233
234#[cfg(any(feature = "allocator-api", feature = "allocator-api2"))]
235use crate::Allocator;
236
237#[cfg(any(feature = "allocator-api", feature = "allocator-api2"))]
238unsafe impl<const L: usize, const B: usize> Allocator for &SyncStalloc<L, B>
239where
240 Align<B>: Alignment,
241{
242 fn allocate(&self, layout: Layout) -> Result<NonNull<[u8]>, AllocError> {
243 (&*self.acquire_locked()).allocate(layout)
244 }
245
246 unsafe fn deallocate(&self, ptr: NonNull<u8>, layout: Layout) {
247 unsafe {
249 (&*self.acquire_locked()).deallocate(ptr, layout);
250 }
251 }
252
253 fn allocate_zeroed(&self, layout: Layout) -> Result<NonNull<[u8]>, AllocError> {
254 (&*self.acquire_locked()).allocate_zeroed(layout)
255 }
256
257 unsafe fn grow(
258 &self,
259 ptr: NonNull<u8>,
260 old_layout: Layout,
261 new_layout: Layout,
262 ) -> Result<NonNull<[u8]>, AllocError> {
263 unsafe { (&*self.acquire_locked()).grow(ptr, old_layout, new_layout) }
265 }
266
267 unsafe fn grow_zeroed(
268 &self,
269 ptr: NonNull<u8>,
270 old_layout: Layout,
271 new_layout: Layout,
272 ) -> Result<NonNull<[u8]>, AllocError> {
273 unsafe { (&*self.acquire_locked()).grow_zeroed(ptr, old_layout, new_layout) }
275 }
276
277 unsafe fn shrink(
278 &self,
279 ptr: NonNull<u8>,
280 old_layout: Layout,
281 new_layout: Layout,
282 ) -> Result<NonNull<[u8]>, AllocError> {
283 unsafe { (&*self.acquire_locked()).shrink(ptr, old_layout, new_layout) }
285 }
286
287 fn by_ref(&self) -> &Self
288 where
289 Self: Sized,
290 {
291 self
292 }
293}
294
295unsafe impl<const L: usize, const B: usize> ChainableAlloc for SyncStalloc<L, B>
296where
297 Align<B>: Alignment,
298{
299 fn claims(&self, ptr: *mut u8, layout: Layout) -> bool {
300 self.1.claims(ptr, layout)
301 }
302}
303
304impl<const L: usize, const B: usize> SyncStalloc<L, B>
305where
306 Align<B>: Alignment,
307{
308 pub const fn chain<T>(self, next: &T) -> AllocChain<'_, Self, T>
310 where
311 Self: Sized,
312 {
313 AllocChain::new(self, next)
314 }
315}