1use crate::stack_req::StackReq;
2use alloc::alloc::handle_alloc_error;
3use core::alloc::Layout;
4use core::mem::{ManuallyDrop, MaybeUninit};
5use core::ptr::NonNull;
6
7use crate::alloc::*;
8extern crate alloc;
9
10impl core::fmt::Display for AllocError {
11 fn fmt(&self, fmt: &mut core::fmt::Formatter<'_>) -> Result<(), core::fmt::Error> {
12 fmt.write_str("memory allocation failed")
13 }
14}
15
16#[cfg(any(feature = "std", feature = "core-error"))]
17impl crate::Error for AllocError {}
18
19use super::*;
20
21#[inline]
22fn to_layout(req: StackReq) -> Result<Layout, AllocError> {
23 req.layout().ok().ok_or(AllocError)
24}
25
26#[cfg(feature = "alloc")]
27#[cfg_attr(docsrs, doc(cfg(feature = "alloc")))]
28impl MemBuffer {
29 pub fn new(req: StackReq) -> Self {
46 Self::new_in(req, Global)
47 }
48
49 pub fn try_new(req: StackReq) -> Result<Self, AllocError> {
64 Self::try_new_in(req, Global)
65 }
66
67 #[inline]
74 pub unsafe fn from_raw_parts(ptr: *mut u8, len: usize, align: usize) -> Self {
75 Self {
76 ptr: NonNull::new_unchecked(ptr),
77 len,
78 align,
79 alloc: Global,
80 }
81 }
82
83 #[inline]
86 pub fn into_raw_parts(self) -> (*mut u8, usize, usize) {
87 let no_drop = ManuallyDrop::new(self);
88 (no_drop.ptr.as_ptr(), no_drop.len, no_drop.align)
89 }
90}
91
92#[cfg(feature = "alloc")]
93#[cfg_attr(docsrs, doc(cfg(feature = "alloc")))]
94impl PodBuffer {
95 pub fn new(req: StackReq) -> Self {
112 Self::new_in(req, Global)
113 }
114
115 pub fn try_new(req: StackReq) -> Result<Self, AllocError> {
130 Self::try_new_in(req, Global)
131 }
132
133 #[inline]
140 pub unsafe fn from_raw_parts(ptr: *mut u8, len: usize, align: usize) -> Self {
141 Self {
142 ptr: NonNull::new_unchecked(ptr),
143 len,
144 align,
145 alloc: Global,
146 }
147 }
148
149 #[inline]
152 pub fn into_raw_parts(self) -> (*mut u8, usize, usize) {
153 let no_drop = ManuallyDrop::new(self);
154 (no_drop.ptr.as_ptr(), no_drop.len, no_drop.align)
155 }
156}
157
158#[cfg(feature = "alloc")]
159pub struct MemBuffer<A: Allocator = Global> {
161 ptr: NonNull<u8>,
162 len: usize,
163 align: usize,
164 alloc: A,
165}
166
167#[cfg(feature = "alloc")]
168pub struct PodBuffer<A: Allocator = Global> {
170 ptr: NonNull<u8>,
171 len: usize,
172 align: usize,
173 alloc: A,
174}
175
176#[cfg(not(feature = "alloc"))]
177pub struct MemBuffer<A: Allocator> {
179 ptr: NonNull<u8>,
180 len: usize,
181 align: usize,
182 alloc: A,
183}
184
185#[cfg(not(feature = "alloc"))]
186pub struct PodBuffer<A: Allocator> {
188 ptr: NonNull<u8>,
189 len: usize,
190 align: usize,
191 alloc: A,
192}
193
194unsafe impl<A: Allocator + Sync> Sync for MemBuffer<A> {}
195unsafe impl<A: Allocator + Send> Send for MemBuffer<A> {}
196
197unsafe impl<A: Allocator + Sync> Sync for PodBuffer<A> {}
198unsafe impl<A: Allocator + Send> Send for PodBuffer<A> {}
199
200impl<A: Allocator> Drop for MemBuffer<A> {
201 #[inline]
202 fn drop(&mut self) {
203 unsafe { self.alloc.deallocate(self.ptr, Layout::from_size_align_unchecked(self.len, self.align)) }
205 }
206}
207
208impl<A: Allocator> Drop for PodBuffer<A> {
209 #[inline]
210 fn drop(&mut self) {
211 unsafe { self.alloc.deallocate(self.ptr, Layout::from_size_align_unchecked(self.len, self.align)) }
213 }
214}
215
216impl<A: Allocator> PodBuffer<A> {
217 pub fn new_in(req: StackReq, alloc: A) -> Self {
235 Self::try_new_in(req, alloc).unwrap_or_else(|_| handle_alloc_error(to_layout(req).unwrap()))
236 }
237
238 pub fn try_new_in(req: StackReq, alloc: A) -> Result<Self, AllocError> {
254 unsafe {
255 let ptr = &mut *(alloc.allocate_zeroed(to_layout(req)?).map_err(|_| AllocError)?.as_ptr() as *mut [MaybeUninit<u8>]);
256 #[cfg(debug_assertions)]
257 ptr.fill(MaybeUninit::new(0xCD));
258
259 let len = ptr.len();
260 let ptr = NonNull::new_unchecked(ptr.as_mut_ptr() as *mut u8);
261 Ok(PodBuffer {
262 alloc,
263 ptr,
264 len,
265 align: req.align_bytes(),
266 })
267 }
268 }
269
270 #[inline]
277 pub unsafe fn from_raw_parts_in(ptr: *mut u8, len: usize, align: usize, alloc: A) -> Self {
278 Self {
279 ptr: NonNull::new_unchecked(ptr),
280 len,
281 align,
282 alloc,
283 }
284 }
285
286 #[inline]
289 pub fn into_raw_parts_with_alloc(self) -> (*mut u8, usize, usize, A) {
290 let me = ManuallyDrop::new(self);
291 (me.ptr.as_ptr(), me.len, me.align, unsafe {
292 core::ptr::read(core::ptr::addr_of!(me.alloc))
293 })
294 }
295}
296
297impl<A: Allocator> MemBuffer<A> {
298 pub fn new_in(req: StackReq, alloc: A) -> Self {
316 Self::try_new_in(req, alloc).unwrap_or_else(|_| handle_alloc_error(to_layout(req).unwrap()))
317 }
318
319 pub fn try_new_in(req: StackReq, alloc: A) -> Result<Self, AllocError> {
335 unsafe {
336 let ptr = &mut *(alloc.allocate(to_layout(req)?).map_err(|_| AllocError)?.as_ptr() as *mut [MaybeUninit<u8>]);
337 let len = ptr.len();
338 let ptr = NonNull::new_unchecked(ptr.as_mut_ptr() as *mut u8);
339 Ok(MemBuffer {
340 alloc,
341 ptr,
342 len,
343 align: req.align_bytes(),
344 })
345 }
346 }
347
348 #[inline]
355 pub unsafe fn from_raw_parts_in(ptr: *mut u8, len: usize, align: usize, alloc: A) -> Self {
356 Self {
357 ptr: NonNull::new_unchecked(ptr),
358 len,
359 align,
360 alloc,
361 }
362 }
363
364 #[inline]
367 pub fn into_raw_parts_with_alloc(self) -> (*mut u8, usize, usize, A) {
368 let me = ManuallyDrop::new(self);
369 (me.ptr.as_ptr(), me.len, me.align, unsafe {
370 core::ptr::read(core::ptr::addr_of!(me.alloc))
371 })
372 }
373}
374
375impl<A: Allocator> core::ops::Deref for MemBuffer<A> {
376 type Target = [MaybeUninit<u8>];
377
378 #[inline]
379 fn deref(&self) -> &Self::Target {
380 unsafe { core::slice::from_raw_parts(self.ptr.as_ptr() as *const MaybeUninit<u8>, self.len) }
381 }
382}
383
384impl<A: Allocator> core::ops::DerefMut for MemBuffer<A> {
385 #[inline]
386 fn deref_mut(&mut self) -> &mut Self::Target {
387 unsafe { core::slice::from_raw_parts_mut(self.ptr.as_ptr() as *mut MaybeUninit<u8>, self.len) }
388 }
389}
390
391impl<A: Allocator> core::ops::Deref for PodBuffer<A> {
392 type Target = [u8];
393
394 #[inline]
395 fn deref(&self) -> &Self::Target {
396 unsafe { core::slice::from_raw_parts(self.ptr.as_ptr(), self.len) }
397 }
398}
399
400impl<A: Allocator> core::ops::DerefMut for PodBuffer<A> {
401 #[inline]
402 fn deref_mut(&mut self) -> &mut Self::Target {
403 unsafe { core::slice::from_raw_parts_mut(self.ptr.as_ptr(), self.len) }
404 }
405}
406
407#[derive(Debug, Clone, Copy, PartialEq, Eq)]
409pub struct AllocError;