dyn_stack/
mem.rs

1use crate::stack_req::StackReq;
2use alloc::alloc::handle_alloc_error;
3use core::alloc::Layout;
4use core::mem::{ManuallyDrop, MaybeUninit};
5use core::ptr::NonNull;
6
7use crate::alloc::*;
8extern crate alloc;
9
10impl core::fmt::Display for AllocError {
11	fn fmt(&self, fmt: &mut core::fmt::Formatter<'_>) -> Result<(), core::fmt::Error> {
12		fmt.write_str("memory allocation failed")
13	}
14}
15
16#[cfg(any(feature = "std", feature = "core-error"))]
17impl crate::Error for AllocError {}
18
19use super::*;
20
21#[inline]
22fn to_layout(req: StackReq) -> Result<Layout, AllocError> {
23	req.layout().ok().ok_or(AllocError)
24}
25
26#[cfg(feature = "alloc")]
27#[cfg_attr(docsrs, doc(cfg(feature = "alloc")))]
28impl MemBuffer {
29	/// Allocate a memory buffer with sufficient storage for the given stack requirements, using the
30	/// global allocator.
31	///
32	/// Calls [`alloc::alloc::handle_alloc_error`] in the case of failure.
33	///
34	/// # Example
35	/// ```
36	/// use dyn_stack::{MemBuffer, MemStack, StackReq};
37	///
38	/// let req = StackReq::new::<i32>(3);
39	/// let mut buf = MemBuffer::new(req);
40	/// let stack = MemStack::new(&mut buf);
41	///
42	/// // use the stack
43	/// let (arr, _) = stack.make_with::<i32>(3, |i| i as i32);
44	/// ```
45	pub fn new(req: StackReq) -> Self {
46		Self::new_in(req, Global)
47	}
48
49	/// Allocate a memory buffer with sufficient storage for the given stack requirements, using the
50	/// global allocator, or an error if the allocation did not succeed.
51	///
52	/// # Example
53	/// ```
54	/// use dyn_stack::{MemBuffer, MemStack, StackReq};
55	///
56	/// let req = StackReq::new::<i32>(3);
57	/// let mut buf = MemBuffer::new(req);
58	/// let stack = MemStack::new(&mut buf);
59	///
60	/// // use the stack
61	/// let (arr, _) = stack.make_with::<i32>(3, |i| i as i32);
62	/// ```
63	pub fn try_new(req: StackReq) -> Result<Self, AllocError> {
64		Self::try_new_in(req, Global)
65	}
66
67	/// Creates a `MemBuffer` from its raw components.
68	///
69	/// # Safety
70	///
71	/// The arguments to this function must have been acquired from a call to
72	/// [`MemBuffer::into_raw_parts`]
73	#[inline]
74	pub unsafe fn from_raw_parts(ptr: *mut u8, len: usize, align: usize) -> Self {
75		Self {
76			ptr: NonNull::new_unchecked(ptr),
77			len,
78			align,
79			alloc: Global,
80		}
81	}
82
83	/// Decomposes a `MemBuffer` into its raw components in this order: ptr, length and
84	/// alignment.
85	#[inline]
86	pub fn into_raw_parts(self) -> (*mut u8, usize, usize) {
87		let no_drop = ManuallyDrop::new(self);
88		(no_drop.ptr.as_ptr(), no_drop.len, no_drop.align)
89	}
90}
91
92#[cfg(feature = "alloc")]
93#[cfg_attr(docsrs, doc(cfg(feature = "alloc")))]
94impl PodBuffer {
95	/// Allocate a memory buffer with sufficient storage for the given stack requirements, using the
96	/// global allocator.
97	///
98	/// Calls [`alloc::alloc::handle_alloc_error`] in the case of failure.
99	///
100	/// # Example
101	/// ```
102	/// use dyn_stack::{PodBuffer, PodStack, StackReq};
103	///
104	/// let req = StackReq::new::<i32>(3);
105	/// let mut buf = PodBuffer::new(req);
106	/// let stack = PodStack::new(&mut buf);
107	///
108	/// // use the stack
109	/// let (arr, _) = stack.make_with::<i32>(3, |i| i as i32);
110	/// ```
111	pub fn new(req: StackReq) -> Self {
112		Self::new_in(req, Global)
113	}
114
115	/// Allocate a memory buffer with sufficient storage for the given stack requirements, using the
116	/// global allocator, or an error if the allocation did not succeed.
117	///
118	/// # Example
119	/// ```
120	/// use dyn_stack::{PodBuffer, PodStack, StackReq};
121	///
122	/// let req = StackReq::new::<i32>(3);
123	/// let mut buf = PodBuffer::new(req);
124	/// let stack = PodStack::new(&mut buf);
125	///
126	/// // use the stack
127	/// let (arr, _) = stack.make_with::<i32>(3, |i| i as i32);
128	/// ```
129	pub fn try_new(req: StackReq) -> Result<Self, AllocError> {
130		Self::try_new_in(req, Global)
131	}
132
133	/// Creates a `PodBuffer` from its raw components.
134	///
135	/// # Safety
136	///
137	/// The arguments to this function must have been acquired from a call to
138	/// [`PodBuffer::into_raw_parts`]
139	#[inline]
140	pub unsafe fn from_raw_parts(ptr: *mut u8, len: usize, align: usize) -> Self {
141		Self {
142			ptr: NonNull::new_unchecked(ptr),
143			len,
144			align,
145			alloc: Global,
146		}
147	}
148
149	/// Decomposes a `PodBuffer` into its raw components in this order: ptr, length and
150	/// alignment.
151	#[inline]
152	pub fn into_raw_parts(self) -> (*mut u8, usize, usize) {
153		let no_drop = ManuallyDrop::new(self);
154		(no_drop.ptr.as_ptr(), no_drop.len, no_drop.align)
155	}
156}
157
158#[cfg(feature = "alloc")]
159/// Buffer of uninitialized bytes to serve as workspace for dynamic arrays.
160pub struct MemBuffer<A: Allocator = Global> {
161	ptr: NonNull<u8>,
162	len: usize,
163	align: usize,
164	alloc: A,
165}
166
167#[cfg(feature = "alloc")]
168/// Buffer of initialized bytes to serve as workspace for dynamic arrays.
169pub struct PodBuffer<A: Allocator = Global> {
170	ptr: NonNull<u8>,
171	len: usize,
172	align: usize,
173	alloc: A,
174}
175
176#[cfg(not(feature = "alloc"))]
177/// Buffer of uninitialized bytes to serve as workspace for dynamic arrays.
178pub struct MemBuffer<A: Allocator> {
179	ptr: NonNull<u8>,
180	len: usize,
181	align: usize,
182	alloc: A,
183}
184
185#[cfg(not(feature = "alloc"))]
186/// Buffer of initialized bytes to serve as workspace for dynamic arrays.
187pub struct PodBuffer<A: Allocator> {
188	ptr: NonNull<u8>,
189	len: usize,
190	align: usize,
191	alloc: A,
192}
193
194unsafe impl<A: Allocator + Sync> Sync for MemBuffer<A> {}
195unsafe impl<A: Allocator + Send> Send for MemBuffer<A> {}
196
197unsafe impl<A: Allocator + Sync> Sync for PodBuffer<A> {}
198unsafe impl<A: Allocator + Send> Send for PodBuffer<A> {}
199
200impl<A: Allocator> Drop for MemBuffer<A> {
201	#[inline]
202	fn drop(&mut self) {
203		// SAFETY: this was initialized with std::alloc::alloc
204		unsafe { self.alloc.deallocate(self.ptr, Layout::from_size_align_unchecked(self.len, self.align)) }
205	}
206}
207
208impl<A: Allocator> Drop for PodBuffer<A> {
209	#[inline]
210	fn drop(&mut self) {
211		// SAFETY: this was initialized with std::alloc::alloc
212		unsafe { self.alloc.deallocate(self.ptr, Layout::from_size_align_unchecked(self.len, self.align)) }
213	}
214}
215
216impl<A: Allocator> PodBuffer<A> {
217	/// Allocate a memory buffer with sufficient storage for the given stack requirements, using the
218	/// provided allocator.
219	///
220	/// Calls [`alloc::alloc::handle_alloc_error`] in the case of failure.
221	///
222	/// # Example
223	/// ```
224	/// use dyn_stack::alloc::Global;
225	/// use dyn_stack::{PodBuffer, PodStack, StackReq};
226	///
227	/// let req = StackReq::new::<i32>(3);
228	/// let mut buf = PodBuffer::new_in(req, Global);
229	/// let stack = PodStack::new(&mut buf);
230	///
231	/// // use the stack
232	/// let (arr, _) = stack.make_with::<i32>(3, |i| i as i32);
233	/// ```
234	pub fn new_in(req: StackReq, alloc: A) -> Self {
235		Self::try_new_in(req, alloc).unwrap_or_else(|_| handle_alloc_error(to_layout(req).unwrap()))
236	}
237
238	/// Allocate a memory buffer with sufficient storage for the given stack requirements, using the
239	/// provided allocator, or an `AllocError` in the case of failure.
240	///
241	/// # Example
242	/// ```
243	/// use dyn_stack::alloc::Global;
244	/// use dyn_stack::{PodBuffer, PodStack, StackReq};
245	///
246	/// let req = StackReq::new::<i32>(3);
247	/// let mut buf = PodBuffer::new_in(req, Global);
248	/// let stack = PodStack::new(&mut buf);
249	///
250	/// // use the stack
251	/// let (arr, _) = stack.make_with::<i32>(3, |i| i as i32);
252	/// ```
253	pub fn try_new_in(req: StackReq, alloc: A) -> Result<Self, AllocError> {
254		unsafe {
255			let ptr = &mut *(alloc.allocate_zeroed(to_layout(req)?).map_err(|_| AllocError)?.as_ptr() as *mut [MaybeUninit<u8>]);
256			#[cfg(debug_assertions)]
257			ptr.fill(MaybeUninit::new(0xCD));
258
259			let len = ptr.len();
260			let ptr = NonNull::new_unchecked(ptr.as_mut_ptr() as *mut u8);
261			Ok(PodBuffer {
262				alloc,
263				ptr,
264				len,
265				align: req.align_bytes(),
266			})
267		}
268	}
269
270	/// Creates a `PodBuffer` from its raw components.
271	///
272	/// # Safety
273	///
274	/// The arguments to this function must have been acquired from a call to
275	/// [`PodBuffer::into_raw_parts`]
276	#[inline]
277	pub unsafe fn from_raw_parts_in(ptr: *mut u8, len: usize, align: usize, alloc: A) -> Self {
278		Self {
279			ptr: NonNull::new_unchecked(ptr),
280			len,
281			align,
282			alloc,
283		}
284	}
285
286	/// Decomposes a `PodBuffer` into its raw components in this order: ptr, length and
287	/// alignment.
288	#[inline]
289	pub fn into_raw_parts_with_alloc(self) -> (*mut u8, usize, usize, A) {
290		let me = ManuallyDrop::new(self);
291		(me.ptr.as_ptr(), me.len, me.align, unsafe {
292			core::ptr::read(core::ptr::addr_of!(me.alloc))
293		})
294	}
295}
296
297impl<A: Allocator> MemBuffer<A> {
298	/// Allocate a memory buffer with sufficient storage for the given stack requirements, using the
299	/// provided allocator.
300	///
301	/// Calls [`alloc::alloc::handle_alloc_error`] in the case of failure.
302	///
303	/// # Example
304	/// ```
305	/// use dyn_stack::alloc::Global;
306	/// use dyn_stack::{MemBuffer, MemStack, StackReq};
307	///
308	/// let req = StackReq::new::<i32>(3);
309	/// let mut buf = MemBuffer::new_in(req, Global);
310	/// let stack = MemStack::new(&mut buf);
311	///
312	/// // use the stack
313	/// let (arr, _) = stack.make_with::<i32>(3, |i| i as i32);
314	/// ```
315	pub fn new_in(req: StackReq, alloc: A) -> Self {
316		Self::try_new_in(req, alloc).unwrap_or_else(|_| handle_alloc_error(to_layout(req).unwrap()))
317	}
318
319	/// Allocate a memory buffer with sufficient storage for the given stack requirements, using the
320	/// provided allocator, or an `AllocError` in the case of failure.
321	///
322	/// # Example
323	/// ```
324	/// use dyn_stack::alloc::Global;
325	/// use dyn_stack::{MemBuffer, MemStack, StackReq};
326	///
327	/// let req = StackReq::new::<i32>(3);
328	/// let mut buf = MemBuffer::new_in(req, Global);
329	/// let stack = MemStack::new(&mut buf);
330	///
331	/// // use the stack
332	/// let (arr, _) = stack.make_with::<i32>(3, |i| i as i32);
333	/// ```
334	pub fn try_new_in(req: StackReq, alloc: A) -> Result<Self, AllocError> {
335		unsafe {
336			let ptr = &mut *(alloc.allocate(to_layout(req)?).map_err(|_| AllocError)?.as_ptr() as *mut [MaybeUninit<u8>]);
337			let len = ptr.len();
338			let ptr = NonNull::new_unchecked(ptr.as_mut_ptr() as *mut u8);
339			Ok(MemBuffer {
340				alloc,
341				ptr,
342				len,
343				align: req.align_bytes(),
344			})
345		}
346	}
347
348	/// Creates a `MemBuffer` from its raw components.
349	///
350	/// # Safety
351	///
352	/// The arguments to this function must have been acquired from a call to
353	/// [`MemBuffer::into_raw_parts`]
354	#[inline]
355	pub unsafe fn from_raw_parts_in(ptr: *mut u8, len: usize, align: usize, alloc: A) -> Self {
356		Self {
357			ptr: NonNull::new_unchecked(ptr),
358			len,
359			align,
360			alloc,
361		}
362	}
363
364	/// Decomposes a `MemBuffer` into its raw components in this order: ptr, length and
365	/// alignment.
366	#[inline]
367	pub fn into_raw_parts_with_alloc(self) -> (*mut u8, usize, usize, A) {
368		let me = ManuallyDrop::new(self);
369		(me.ptr.as_ptr(), me.len, me.align, unsafe {
370			core::ptr::read(core::ptr::addr_of!(me.alloc))
371		})
372	}
373}
374
375impl<A: Allocator> core::ops::Deref for MemBuffer<A> {
376	type Target = [MaybeUninit<u8>];
377
378	#[inline]
379	fn deref(&self) -> &Self::Target {
380		unsafe { core::slice::from_raw_parts(self.ptr.as_ptr() as *const MaybeUninit<u8>, self.len) }
381	}
382}
383
384impl<A: Allocator> core::ops::DerefMut for MemBuffer<A> {
385	#[inline]
386	fn deref_mut(&mut self) -> &mut Self::Target {
387		unsafe { core::slice::from_raw_parts_mut(self.ptr.as_ptr() as *mut MaybeUninit<u8>, self.len) }
388	}
389}
390
391impl<A: Allocator> core::ops::Deref for PodBuffer<A> {
392	type Target = [u8];
393
394	#[inline]
395	fn deref(&self) -> &Self::Target {
396		unsafe { core::slice::from_raw_parts(self.ptr.as_ptr(), self.len) }
397	}
398}
399
400impl<A: Allocator> core::ops::DerefMut for PodBuffer<A> {
401	#[inline]
402	fn deref_mut(&mut self) -> &mut Self::Target {
403		unsafe { core::slice::from_raw_parts_mut(self.ptr.as_ptr(), self.len) }
404	}
405}
406
407/// Error during memory allocation.
408#[derive(Debug, Clone, Copy, PartialEq, Eq)]
409pub struct AllocError;