1#![no_std]
2#![cfg_attr(docsrs, feature(doc_cfg))]
3#![deny(elided_lifetimes_in_paths)]
4#![allow(clippy::missing_transmute_annotations, clippy::type_complexity)]
5
6#[doc(hidden)]
65pub use dyn_stack_macros::alloc_impl_proc;
66
67#[doc(hidden)]
68#[macro_export]
69macro_rules! alloc_impl_rules {
70 (
71 $stack:ident
72 let $pat:pat = $($mac:ident)::+!($($input:tt)*)
73 ) => {
74 $($mac)::*!(@ alloc($stack)($pat)($($input)*) );
75 };
76 (
77 $stack:ident
78 let $pat:pat = $($mac:ident)::+![$($input:tt)*]
79 ) => {
80 $($mac)::*!(@ alloc($stack)($pat)($($input)*) );
81 };
82 (
83 $stack:ident
84 let $pat:pat = $($mac:ident)::+!{$($input:tt)*}
85 ) => {
86 $($mac)::*!(@ alloc($stack)($pat)($($input)*) );
87 };
88 (
89 $stack:ident
90 let $pat:pat = unsafe {$($mac:ident)::+!($($input:tt)*)}
91 ) => {
92 $($mac)::*!(@ alloc unsafe ($stack)($pat)($($input)*) );
93 };
94 (
95 $stack:ident
96 let $pat:pat = unsafe {$($mac:ident)::+![$($input:tt)*]}
97 ) => {
98 $($mac)::*!(@ alloc unsafe ($stack)($pat)($($input)*) );
99 };
100 (
101 $stack:ident
102 let $pat:pat = unsafe {$($mac:ident)::+!{$($input:tt)*}}
103 ) => {
104 $($mac)::*!(@ alloc unsafe ($stack)($pat)($($input)*) );
105 };
106}
107
108#[macro_export]
109macro_rules! alloc {
110 ($stack:lifetime : {
111 $($tt:tt)*
112 }) => {
113 $crate::alloc_impl_proc!(($crate) $stack {$($tt)*});
114 };
115}
116
117#[macro_export]
118macro_rules! slice {
119 (@ alloc $($unsafe:ident)? ($stack:ident) ($var:pat) ($($arg: expr),+ $(,)?)) => {
120 let (mut __slice__, $stack) = $($unsafe)? { $stack.collect(::core::iter::repeat_n($($arg,)*)) };
121 let $var = &mut *__slice__;
122 };
123}
124
125#[cfg(feature = "std")]
126extern crate std;
127
128#[cfg(feature = "std")]
129pub use std::error::Error;
130
131#[cfg(all(feature = "core-error", not(feature = "std")))]
132pub use core::error::Error;
133
134pub mod alloc;
135
136pub mod mem;
137
138pub type DynStack = MemStack;
139
140use bytemuck::Pod;
141
142#[cfg(feature = "alloc")]
143#[cfg_attr(docsrs, doc(cfg(feature = "alloc")))]
144pub use mem::MemBuffer;
145#[cfg(feature = "alloc")]
146#[cfg_attr(docsrs, doc(cfg(feature = "alloc")))]
147pub use mem::PodBuffer;
148
149mod stack_req;
150pub use stack_req::StackReq;
151
152use core::fmt::Debug;
153use core::marker::PhantomData;
154use core::mem::MaybeUninit;
155use core::ops::{Deref, DerefMut};
156use core::ptr::NonNull;
157use core::{fmt, slice};
158
159#[repr(transparent)]
161pub struct MemStack {
162 buffer: [MaybeUninit<u8>],
163}
164#[repr(transparent)]
166pub struct PodStack {
167 buffer: [u8],
168}
169
170pub struct DynArray<'a, T> {
172 ptr: NonNull<T>,
173 len: usize,
174 __marker: PhantomData<(&'a T, T)>,
175}
176
177impl<T> DynArray<'_, T> {
178 #[inline]
179 #[doc(hidden)]
180 pub fn into_raw_parts(self) -> (*mut T, usize) {
181 let this = core::mem::ManuallyDrop::new(self);
182 (this.ptr.as_ptr(), this.len)
183 }
184
185 #[inline]
186 #[doc(hidden)]
187 pub unsafe fn from_raw_parts(ptr: *mut T, len: usize) -> Self {
188 Self {
189 ptr: NonNull::new_unchecked(ptr),
190 len,
191 __marker: PhantomData,
192 }
193 }
194}
195
196pub struct UnpodStack<'a> {
198 ptr: NonNull<u8>,
199 len: usize,
200 __marker: PhantomData<&'a ()>,
201}
202
203impl<T: Debug> Debug for DynArray<'_, T> {
204 fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> Result<(), fmt::Error> {
205 (**self).fmt(fmt)
206 }
207}
208
209unsafe impl<T> Send for DynArray<'_, T> where T: Send {}
210unsafe impl<T> Sync for DynArray<'_, T> where T: Sync {}
211
212unsafe impl Send for UnpodStack<'_> {}
213unsafe impl Sync for UnpodStack<'_> {}
214
215impl<T> Drop for DynArray<'_, T> {
216 #[inline]
217 fn drop(&mut self) {
218 unsafe { core::ptr::drop_in_place(core::ptr::slice_from_raw_parts_mut(self.ptr.as_ptr(), self.len)) };
219 }
220}
221
222macro_rules! if_cfg {
223 (if $cfg: meta $if_true: block else $if_false: block $(,)?) => {
224 #[cfg($cfg)]
225 {
226 $if_true
227 }
228 #[cfg(not($cfg))]
229 {
230 $if_false
231 }
232 };
233}
234
235#[inline(always)]
240unsafe fn launder(ptr: *mut u8, len: usize) {
241 unsafe {
242 if_cfg!(if all(
243 not(debug_assertions),
244 not(miri),
245 any(
246 target_arch = "x86",
247 target_arch = "x86_64",
248 target_arch = "arm",
249 target_arch = "aarch64",
250 target_arch = "loongarch64",
251 target_arch = "riscv32",
252 target_arch = "riscv64",
253 )
254 ) {
255 _ = len;
256 core::arch::asm! { "/* {0} */", in(reg) ptr, options(nostack) }
257 } else {
258 const ARBITRARY_BYTE: u8 = 0xCD;
259 core::ptr::write_bytes(ptr, ARBITRARY_BYTE, len)
260 });
261 }
262}
263
264impl Drop for UnpodStack<'_> {
265 #[inline]
266 fn drop(&mut self) {
267 unsafe { launder(self.ptr.as_ptr(), self.len) };
268 }
269}
270
271impl<T> Deref for DynArray<'_, T> {
272 type Target = [T];
273
274 #[inline]
275 fn deref(&self) -> &'_ Self::Target {
276 unsafe { slice::from_raw_parts(self.ptr.as_ptr(), self.len) }
277 }
278}
279
280impl<T> DerefMut for DynArray<'_, T> {
281 #[inline]
282 fn deref_mut(&mut self) -> &mut Self::Target {
283 unsafe { slice::from_raw_parts_mut(self.ptr.as_ptr(), self.len) }
284 }
285}
286
287impl<T> AsRef<[T]> for DynArray<'_, T> {
288 #[inline]
289 fn as_ref(&self) -> &'_ [T] {
290 unsafe { slice::from_raw_parts(self.ptr.as_ptr(), self.len) }
291 }
292}
293
294impl<T> AsMut<[T]> for DynArray<'_, T> {
295 #[inline]
296 fn as_mut(&mut self) -> &'_ mut [T] {
297 unsafe { slice::from_raw_parts_mut(self.ptr.as_ptr(), self.len) }
298 }
299}
300
301impl Deref for UnpodStack<'_> {
302 type Target = MemStack;
303
304 #[inline]
305 fn deref(&self) -> &'_ Self::Target {
306 unsafe { &*(core::ptr::slice_from_raw_parts(self.ptr.as_ptr(), self.len) as *const MemStack) }
307 }
308}
309
310impl DerefMut for UnpodStack<'_> {
311 #[inline]
312 fn deref_mut(&mut self) -> &mut Self::Target {
313 unsafe { &mut *(core::ptr::slice_from_raw_parts_mut(self.ptr.as_ptr(), self.len) as *mut MemStack) }
314 }
315}
316
317#[inline]
318unsafe fn transmute_slice<T>(slice: &mut [MaybeUninit<u8>], size: usize) -> &mut [T] {
319 slice::from_raw_parts_mut(slice.as_mut_ptr() as *mut T, size)
320}
321#[inline]
322unsafe fn transmute_pod_slice<T: Pod>(slice: &mut [u8], size: usize) -> &mut [T] {
323 slice::from_raw_parts_mut(slice.as_mut_ptr() as *mut T, size)
324}
325
326struct DropGuard<T> {
327 ptr: *mut T,
328 len: usize,
329}
330
331impl<T> Drop for DropGuard<T> {
332 #[inline]
333 fn drop(&mut self) {
334 unsafe { core::ptr::drop_in_place(core::ptr::slice_from_raw_parts_mut(self.ptr, self.len)) };
335 }
336}
337
338#[inline]
339fn init_array_with<T>(mut f: impl FnMut(usize) -> T, array: &mut [MaybeUninit<T>]) -> &mut [T] {
340 let len = array.len();
341 let ptr = array.as_mut_ptr() as *mut T;
342
343 let mut guard = DropGuard { ptr, len: 0 };
344
345 for i in 0..len {
346 guard.len = i;
347 unsafe { ptr.add(i).write(f(i)) };
348 }
349 core::mem::forget(guard);
350
351 unsafe { slice::from_raw_parts_mut(ptr, len) }
352}
353
354#[inline]
355fn init_pod_array_with<T: Pod>(mut f: impl FnMut(usize) -> T, array: &mut [T]) -> &mut [T] {
356 for (i, x) in array.iter_mut().enumerate() {
357 *x = f(i);
358 }
359 array
360}
361
362#[inline]
363unsafe fn init_array_with_iter<T, I: Iterator<Item = T>>(iter: I, ptr: &mut [MaybeUninit<T>]) -> usize {
364 let max_len = ptr.len();
365 let ptr = ptr.as_mut_ptr();
366 let mut guard = DropGuard { ptr, len: 0 };
367
368 iter.take(max_len).enumerate().for_each(|(i, item)| {
369 *ptr.add(i) = MaybeUninit::new(item);
370 guard.len += 1;
371 });
372
373 let len = guard.len;
374 core::mem::forget(guard);
375
376 len
377}
378
379#[inline]
380fn init_pod_array_with_iter<T: Pod, I: Iterator<Item = T>>(iter: I, ptr: &mut [T]) -> usize {
381 let mut len = 0;
382 iter.zip(ptr).for_each(|(item, dst)| {
383 *dst = item;
384 len += 1;
385 });
386 len
387}
388
389#[track_caller]
390#[inline]
391fn check_alignment(align: usize, alignof_val: usize, type_name: &'static str) {
392 assert!(
393 (align & (align.wrapping_sub(1))) == 0,
394 r#"
395requested alignment is not a power of two:
396 - requested alignment: {}
397"#,
398 align
399 );
400 assert!(
401 alignof_val <= align,
402 r#"
403requested alignment is less than the minimum valid alignment for `{}`:
404 - requested alignment: {}
405 - minimum alignment: {}
406"#,
407 type_name,
408 align,
409 alignof_val,
410 );
411}
412
413#[track_caller]
414#[inline]
415fn check_enough_space_for_align_offset(len: usize, align: usize, align_offset: usize) {
416 assert!(
417 len >= align_offset,
418 r#"
419buffer is not large enough to accomodate the requested alignment
420 - buffer length: {}
421 - requested alignment: {}
422 - byte offset for alignment: {}
423"#,
424 len,
425 align,
426 align_offset,
427 );
428}
429
430#[track_caller]
431#[inline]
432fn check_enough_space_for_array(remaining_len: usize, sizeof_val: usize, array_len: usize, type_name: &'static str) {
433 if sizeof_val == 0 {
434 return;
435 }
436 assert!(
437 remaining_len / sizeof_val >= array_len,
438 r#"
439buffer is not large enough to allocate an array of type `{}` of the requested length:
440 - remaining buffer length (after adjusting for alignment): {},
441 - requested array length: {} ({} bytes),
442"#,
443 type_name,
444 remaining_len,
445 array_len,
446 array_len * sizeof_val,
447 );
448}
449
450#[repr(transparent)]
451pub struct Bump<'stack> {
452 ptr: core::cell::UnsafeCell<&'stack mut MemStack>,
453}
454
455unsafe impl alloc::Allocator for Bump<'_> {
456 fn allocate(&self, layout: core::alloc::Layout) -> Result<NonNull<[u8]>, alloc::AllocError> {
457 let ptr = unsafe { &mut *self.ptr.get() };
458 let old = core::mem::replace(ptr, MemStack::new(&mut []));
459
460 if old.can_hold(StackReq::new_aligned::<u8>(layout.size(), layout.align())) {
461 let (alloc, new) = old.make_aligned_uninit::<u8>(layout.size(), layout.align());
462 *ptr = new;
463
464 let len = alloc.len();
465 let ptr = alloc.as_mut_ptr() as *mut u8;
466 Ok(unsafe { NonNull::new_unchecked(core::ptr::slice_from_raw_parts_mut(ptr, len)) })
467 } else {
468 Err(alloc::AllocError)
469 }
470 }
471
472 #[inline]
473 unsafe fn deallocate(&self, ptr: NonNull<u8>, layout: core::alloc::Layout) {
474 let _ = (ptr, layout);
475 }
476}
477
478impl MemStack {
479 #[inline]
481 pub fn new(buffer: &mut [MaybeUninit<u8>]) -> &mut Self {
482 unsafe { &mut *(buffer as *mut [MaybeUninit<u8>] as *mut Self) }
483 }
484
485 #[inline]
487 pub fn new_any<T>(buffer: &mut [MaybeUninit<T>]) -> &mut Self {
488 let len = core::mem::size_of_val(buffer);
489 Self::new(unsafe { slice::from_raw_parts_mut(buffer.as_mut_ptr() as *mut _, len) })
490 }
491
492 #[inline]
495 #[must_use]
496 pub fn can_hold(&self, alloc_req: StackReq) -> bool {
497 let align = alloc_req.align_bytes();
498 let size = alloc_req.size_bytes();
499 let align_offset = self.buffer.as_ptr().align_offset(align);
500 let self_size = self.buffer.len();
501 (self_size >= align_offset) && (self_size - align_offset >= size)
502 }
503
504 #[inline]
506 pub fn len_bytes(&self) -> usize {
507 self.buffer.len()
508 }
509
510 #[inline]
512 pub fn as_ptr(&self) -> *const u8 {
513 self.buffer.as_ptr() as _
514 }
515
516 #[track_caller]
517 #[inline]
518 fn split_buffer<'out>(
519 buffer: &'out mut [MaybeUninit<u8>],
520 size: usize,
521 align: usize,
522 sizeof_val: usize,
523 alignof_val: usize,
524 type_name: &'static str,
525 ) -> (&'out mut [MaybeUninit<u8>], &'out mut [MaybeUninit<u8>]) {
526 let len = buffer.len();
527 let align_offset = buffer.as_mut_ptr().align_offset(align);
528
529 check_alignment(align, alignof_val, type_name);
530 check_enough_space_for_align_offset(len, align, align_offset);
531 check_enough_space_for_array(len - align_offset, sizeof_val, size, type_name);
532
533 let buffer = unsafe { buffer.get_unchecked_mut(align_offset..) };
534 let len = len - align_offset;
535
536 let begin = buffer.as_mut_ptr();
537 let begin_len = size * sizeof_val;
538 let mid = unsafe { begin.add(begin_len) };
539 let mid_len = len - begin_len;
540 unsafe { (slice::from_raw_parts_mut(begin, begin_len), slice::from_raw_parts_mut(mid, mid_len)) }
541 }
542
543 #[track_caller]
550 #[inline]
551 #[must_use]
552 pub fn make_aligned_uninit<T>(&mut self, size: usize, align: usize) -> (&mut [MaybeUninit<T>], &mut Self) {
553 let (taken, remaining) = Self::split_buffer(
554 &mut self.buffer,
555 size,
556 align,
557 core::mem::size_of::<T>(),
558 core::mem::align_of::<T>(),
559 core::any::type_name::<T>(),
560 );
561
562 (unsafe { transmute_slice::<MaybeUninit<T>>(taken, size) }, MemStack::new(remaining))
563 }
564
565 #[track_caller]
573 #[inline]
574 #[must_use]
575 pub fn make_aligned_with<T>(&mut self, size: usize, align: usize, f: impl FnMut(usize) -> T) -> (DynArray<'_, T>, &mut Self) {
576 let (taken, remaining) = self.make_aligned_uninit(size, align);
577 let (len, ptr) = {
578 let taken = init_array_with(f, taken);
579 (taken.len(), taken.as_mut_ptr())
580 };
581 (
582 DynArray {
583 ptr: unsafe { NonNull::<T>::new_unchecked(ptr) },
584 len,
585 __marker: PhantomData,
586 },
587 remaining,
588 )
589 }
590
591 #[track_caller]
592 #[inline]
593 #[must_use]
594 #[doc(hidden)]
595 pub unsafe fn make_raw<T: Pod>(&mut self, size: usize) -> (&mut [T], &mut Self) {
596 self.make_aligned_raw(size, core::mem::align_of::<T>())
597 }
598
599 #[track_caller]
600 #[inline]
601 #[must_use]
602 #[doc(hidden)]
603 pub unsafe fn make_aligned_raw<T: Pod>(&mut self, size: usize, align: usize) -> (&mut [T], &mut Self) {
604 let (mem, stack) = self.make_aligned_uninit::<T>(size, align);
605 unsafe { (&mut *(mem as *mut [MaybeUninit<T>] as *mut [T]), stack) }
606 }
607
608 #[track_caller]
614 #[inline]
615 #[must_use]
616 pub fn make_uninit<T>(&mut self, size: usize) -> (&mut [MaybeUninit<T>], &mut Self) {
617 self.make_aligned_uninit(size, core::mem::align_of::<T>())
618 }
619
620 #[track_caller]
628 #[inline]
629 #[must_use]
630 pub fn make_with<T>(&mut self, size: usize, f: impl FnMut(usize) -> T) -> (DynArray<'_, T>, &mut Self) {
631 self.make_aligned_with(size, core::mem::align_of::<T>(), f)
632 }
633
634 #[track_caller]
643 #[inline]
644 #[must_use]
645 pub fn collect_aligned<I>(&mut self, align: usize, iter: impl IntoIterator<Item = I>) -> (DynArray<'_, I>, &mut Self) {
646 self.collect_aligned_impl(align, iter.into_iter())
647 }
648
649 #[track_caller]
658 #[inline]
659 #[must_use]
660 pub fn collect<I>(&mut self, iter: impl IntoIterator<Item = I>) -> (DynArray<'_, I>, &mut Self) {
661 self.collect_aligned_impl(core::mem::align_of::<I>(), iter.into_iter())
662 }
663
664 #[track_caller]
665 #[inline]
666 fn collect_aligned_impl<I: Iterator>(&mut self, align: usize, iter: I) -> (DynArray<'_, I::Item>, &mut Self) {
667 let sizeof_val = core::mem::size_of::<I::Item>();
668 let alignof_val = core::mem::align_of::<I::Item>();
669 let align_offset = self.buffer.as_mut_ptr().align_offset(align);
670
671 check_alignment(align, alignof_val, core::any::type_name::<I::Item>());
672 check_enough_space_for_align_offset(self.buffer.len(), align, align_offset);
673
674 let buffer = unsafe { self.buffer.get_unchecked_mut(align_offset..) };
675 let buffer_len = buffer.len();
676 let buffer_ptr = buffer.as_mut_ptr();
677 unsafe {
678 let len = init_array_with_iter(
679 iter,
680 slice::from_raw_parts_mut(
681 buffer_ptr as *mut MaybeUninit<I::Item>,
682 if sizeof_val == 0 { usize::MAX } else { buffer_len / sizeof_val },
683 ),
684 );
685
686 let remaining_slice = slice::from_raw_parts_mut(buffer_ptr.add(len * sizeof_val), buffer.len() - len * sizeof_val);
687 (
688 DynArray {
689 ptr: NonNull::new_unchecked(buffer_ptr as *mut I::Item),
690 len,
691 __marker: PhantomData,
692 },
693 Self::new(remaining_slice),
694 )
695 }
696 }
697
698 #[inline]
699 pub fn bump<'bump, 'stack>(self: &'bump mut &'stack mut Self) -> &'bump mut Bump<'stack> {
700 unsafe { &mut *(self as *mut &mut Self as *mut Bump<'stack>) }
701 }
702}
703
704impl PodStack {
705 #[inline]
707 pub fn new(buffer: &mut [u8]) -> &mut Self {
708 unsafe { &mut *(buffer as *mut [u8] as *mut Self) }
709 }
710
711 #[inline]
713 pub fn new_any<T: Pod>(buffer: &mut [T]) -> &mut Self {
714 let len = core::mem::size_of_val(buffer);
715 Self::new(unsafe { slice::from_raw_parts_mut(buffer.as_mut_ptr() as *mut _, len) })
716 }
717
718 #[inline]
721 #[must_use]
722 pub fn can_hold(&self, alloc_req: StackReq) -> bool {
723 let align = alloc_req.align_bytes();
724 let size = alloc_req.size_bytes();
725 let align_offset = self.buffer.as_ptr().align_offset(align);
726 let self_size = self.buffer.len();
727 (self_size >= align_offset) && (self_size - align_offset >= size)
728 }
729
730 #[inline]
732 pub fn len_bytes(&self) -> usize {
733 self.buffer.len()
734 }
735
736 #[inline]
738 pub fn as_ptr(&self) -> *const u8 {
739 self.buffer.as_ptr() as _
740 }
741
742 #[track_caller]
743 #[inline]
744 fn split_buffer<'out>(
745 buffer: &'out mut [u8],
746 size: usize,
747 align: usize,
748 sizeof_val: usize,
749 alignof_val: usize,
750 type_name: &'static str,
751 ) -> (&'out mut [u8], &'out mut [u8]) {
752 let len = buffer.len();
753 let align_offset = buffer.as_mut_ptr().align_offset(align);
754
755 check_alignment(align, alignof_val, type_name);
756 check_enough_space_for_align_offset(len, align, align_offset);
757 check_enough_space_for_array(len - align_offset, sizeof_val, size, type_name);
758
759 let buffer = unsafe { buffer.get_unchecked_mut(align_offset..) };
760 let len = len - align_offset;
761
762 let begin = buffer.as_mut_ptr();
763 let begin_len = size * sizeof_val;
764 let mid = unsafe { begin.add(begin_len) };
765 let mid_len = len - begin_len;
766 unsafe { (slice::from_raw_parts_mut(begin, begin_len), slice::from_raw_parts_mut(mid, mid_len)) }
767 }
768
769 #[track_caller]
776 #[inline]
777 #[must_use]
778 pub fn make_aligned_raw<T: Pod>(&mut self, size: usize, align: usize) -> (&mut [T], &mut Self) {
779 let (taken, remaining) = Self::split_buffer(
780 &mut self.buffer,
781 size,
782 align,
783 core::mem::size_of::<T>(),
784 core::mem::align_of::<T>(),
785 core::any::type_name::<T>(),
786 );
787
788 let taken = unsafe { transmute_pod_slice::<T>(taken, size) };
789 (taken, Self::new(remaining))
790 }
791
792 pub unsafe fn make_aligned_unpod(&mut self, size: usize, align: usize) -> (UnpodStack<'_>, &mut Self) {
804 let (taken, remaining) = Self::split_buffer(&mut self.buffer, size, align, 1, 1, "[Bytes]");
805 (
806 UnpodStack {
807 ptr: NonNull::new_unchecked(taken.as_mut_ptr()),
808 len: size,
809 __marker: PhantomData,
810 },
811 Self::new(remaining),
812 )
813 }
814
815 #[track_caller]
823 #[inline]
824 #[must_use]
825 pub fn make_aligned_with<T: Pod>(&mut self, size: usize, align: usize, f: impl FnMut(usize) -> T) -> (&mut [T], &mut Self) {
826 let (taken, remaining) = self.make_aligned_raw(size, align);
827 let taken = init_pod_array_with(f, taken);
828 (taken, remaining)
829 }
830
831 #[track_caller]
837 #[inline]
838 #[must_use]
839 pub fn make_raw<T: Pod>(&mut self, size: usize) -> (&mut [T], &mut Self) {
840 self.make_aligned_raw(size, core::mem::align_of::<T>())
841 }
842
843 #[track_caller]
851 #[inline]
852 #[must_use]
853 pub fn make_with<T: Pod>(&mut self, size: usize, f: impl FnMut(usize) -> T) -> (&mut [T], &mut Self) {
854 self.make_aligned_with(size, core::mem::align_of::<T>(), f)
855 }
856
857 #[track_caller]
866 #[inline]
867 #[must_use]
868 pub fn collect_aligned<I: Pod>(&mut self, align: usize, iter: impl IntoIterator<Item = I>) -> (&mut [I], &mut Self) {
869 self.collect_aligned_impl(align, iter.into_iter())
870 }
871
872 #[track_caller]
881 #[inline]
882 #[must_use]
883 pub fn collect<I: Pod>(&mut self, iter: impl IntoIterator<Item = I>) -> (&mut [I], &mut Self) {
884 self.collect_aligned_impl(core::mem::align_of::<I>(), iter.into_iter())
885 }
886
887 #[track_caller]
888 #[inline]
889 fn collect_aligned_impl<I: Iterator>(&mut self, align: usize, iter: I) -> (&mut [I::Item], &mut Self)
890 where
891 I::Item: Pod,
892 {
893 let sizeof_val = core::mem::size_of::<I::Item>();
894 let alignof_val = core::mem::align_of::<I::Item>();
895 let align_offset = self.buffer.as_mut_ptr().align_offset(align);
896
897 check_alignment(align, alignof_val, core::any::type_name::<I::Item>());
898 check_enough_space_for_align_offset(self.buffer.len(), align, align_offset);
899
900 let buffer = unsafe { self.buffer.get_unchecked_mut(align_offset..) };
901 let buffer_len = buffer.len();
902 let buffer_ptr = buffer.as_mut_ptr();
903 unsafe {
904 let len = init_pod_array_with_iter(
905 iter,
906 slice::from_raw_parts_mut(
907 buffer_ptr as *mut I::Item,
908 if sizeof_val == 0 { usize::MAX } else { buffer_len / sizeof_val },
909 ),
910 );
911
912 let taken = slice::from_raw_parts_mut(buffer_ptr as *mut I::Item, len);
913 let remaining_slice = slice::from_raw_parts_mut(buffer_ptr.add(len * sizeof_val), buffer_len - len * sizeof_val);
914 (taken, Self::new(remaining_slice))
915 }
916 }
917}
918
919#[cfg(all(test, feature = "alloc"))]
920mod dyn_stack_tests {
921 use super::*;
922 use alloc::Global;
923
924 #[test]
925 fn empty_in() {
926 let mut buf = MemBuffer::new_in(StackReq::new::<i32>(0), Global);
927 let stack = MemStack::new(&mut buf);
928 let (_arr0, _stack) = stack.make_with::<i32>(0, |i| i as i32);
929 }
930
931 #[test]
932 #[should_panic]
933 fn empty_overflow_in() {
934 let mut buf = MemBuffer::new_in(StackReq::new::<i32>(0), Global);
935 let stack = MemStack::new(&mut buf);
936 let (_arr0, _stack) = stack.make_with::<i32>(1, |i| i as i32);
937 }
938
939 #[test]
940 fn empty_collect_in() {
941 let mut buf = MemBuffer::new_in(StackReq::new::<i32>(0), Global);
942 let stack = MemStack::new(&mut buf);
943 let (_arr0, _stack) = stack.collect(0..0);
944 }
945
946 #[test]
947 fn empty_collect_overflow_in() {
948 let mut buf = MemBuffer::new_in(StackReq::new::<i32>(0), Global);
949 let stack = MemStack::new(&mut buf);
950 let (arr0, _stack) = stack.collect(0..1);
951 assert!(arr0.is_empty());
952 }
953
954 #[test]
955 #[should_panic]
956 fn overflow_in() {
957 let mut buf = MemBuffer::new_in(StackReq::new::<i32>(1), Global);
958 let stack = MemStack::new(&mut buf);
959 let (_arr0, _stack) = stack.make_with::<i32>(2, |i| i as i32);
960 }
961
962 #[test]
963 fn collect_overflow_in() {
964 let mut buf = MemBuffer::new_in(StackReq::new::<i32>(1), Global);
965 let stack = MemStack::new(&mut buf);
966 let (arr0, _stack) = stack.collect(1..3);
967 assert_eq!(arr0.len(), 1);
968 assert_eq!(arr0[0], 1)
969 }
970
971 #[test]
972 fn empty() {
973 let mut buf = MemBuffer::new(StackReq::new::<i32>(0));
974 let stack = MemStack::new(&mut buf);
975 let (_arr0, _stack) = stack.make_with::<i32>(0, |i| i as i32);
976 }
977
978 #[test]
979 #[should_panic]
980 fn empty_overflow() {
981 let mut buf = MemBuffer::new(StackReq::new::<i32>(0));
982 let stack = MemStack::new(&mut buf);
983 let (_arr0, _stack) = stack.make_with::<i32>(1, |i| i as i32);
984 }
985
986 #[test]
987 fn empty_collect() {
988 let mut buf = MemBuffer::new(StackReq::new::<i32>(0));
989 let stack = MemStack::new(&mut buf);
990 let (_arr0, _stack) = stack.collect(0..0);
991 }
992
993 #[test]
994 fn empty_collect_overflow() {
995 let mut buf = MemBuffer::new(StackReq::new::<i32>(0));
996 let stack = MemStack::new(&mut buf);
997 let (arr0, _stack) = stack.collect(0..1);
998 assert!(arr0.is_empty());
999 }
1000
1001 #[test]
1002 #[should_panic]
1003 fn overflow() {
1004 let mut buf = MemBuffer::new(StackReq::new::<i32>(1));
1005 let stack = MemStack::new(&mut buf);
1006 let (_arr0, _stack) = stack.make_with::<i32>(2, |i| i as i32);
1007 }
1008
1009 #[test]
1010 fn collect_overflow() {
1011 let mut buf = MemBuffer::new(StackReq::new::<i32>(1));
1012 let stack = MemStack::new(&mut buf);
1013 let (arr0, _stack) = stack.collect(1..3);
1014 assert_eq!(arr0.len(), 1);
1015 assert_eq!(arr0[0], 1)
1016 }
1017
1018 #[test]
1019 fn basic_nested() {
1020 let mut buf = MemBuffer::new(StackReq::new::<i32>(6));
1021
1022 let stack = MemStack::new(&mut buf);
1023 assert!(stack.can_hold(StackReq::new::<i32>(6)));
1024 assert!(!stack.can_hold(StackReq::new::<i32>(7)));
1025
1026 let (arr0, stack) = stack.make_with::<i32>(3, |i| i as i32);
1027 assert_eq!(arr0[0], 0);
1028 assert_eq!(arr0[1], 1);
1029 assert_eq!(arr0[2], 2);
1030
1031 let (arr1, _) = stack.make_with::<i32>(3, |i| i as i32 + 3);
1032
1033 assert_eq!(arr0[0], 0);
1035 assert_eq!(arr0[1], 1);
1036 assert_eq!(arr0[2], 2);
1037
1038 assert_eq!(arr1[0], 3);
1039 assert_eq!(arr1[1], 4);
1040 assert_eq!(arr1[2], 5);
1041 }
1042
1043 #[test]
1044 fn basic_disjoint() {
1045 let mut buf = MemBuffer::new(StackReq::new::<i32>(3));
1046
1047 let stack = MemStack::new(&mut buf);
1048
1049 {
1050 let (arr0, _) = stack.make_with::<i32>(3, |i| i as i32);
1051 assert_eq!(arr0[0], 0);
1052 assert_eq!(arr0[1], 1);
1053 assert_eq!(arr0[2], 2);
1054 }
1055 {
1056 let (arr1, _) = stack.make_with::<i32>(3, |i| i as i32 + 3);
1057
1058 assert_eq!(arr1[0], 3);
1059 assert_eq!(arr1[1], 4);
1060 assert_eq!(arr1[2], 5);
1061 }
1062 }
1063
1064 #[test]
1065 fn basic_nested_collect() {
1066 let mut buf = MemBuffer::new(StackReq::new::<i32>(6));
1067 let stack = MemStack::new(&mut buf);
1068
1069 let (arr0, stack) = stack.collect(0..3_i32);
1070 assert_eq!(arr0[0], 0);
1071 assert_eq!(arr0[1], 1);
1072 assert_eq!(arr0[2], 2);
1073
1074 let (arr1, _) = stack.collect(3..6_i32);
1075
1076 assert_eq!(arr0[0], 0);
1078 assert_eq!(arr0[1], 1);
1079 assert_eq!(arr0[2], 2);
1080
1081 assert_eq!(arr1[0], 3);
1082 assert_eq!(arr1[1], 4);
1083 assert_eq!(arr1[2], 5);
1084 }
1085
1086 #[test]
1087 fn basic_disjoint_collect() {
1088 let mut buf = MemBuffer::new(StackReq::new::<i32>(3));
1089
1090 let stack = MemStack::new(&mut buf);
1091
1092 {
1093 let (arr0, _) = stack.collect(0..3_i32);
1094 assert_eq!(arr0[0], 0);
1095 assert_eq!(arr0[1], 1);
1096 assert_eq!(arr0[2], 2);
1097 }
1098 {
1099 let (arr1, _) = stack.collect(3..6_i32);
1100
1101 assert_eq!(arr1[0], 3);
1102 assert_eq!(arr1[1], 4);
1103 assert_eq!(arr1[2], 5);
1104 }
1105 }
1106
1107 #[test]
1108 fn drop_nested() {
1109 use core::sync::atomic::{AtomicI32, Ordering};
1110 static DROP_COUNT: AtomicI32 = AtomicI32::new(0);
1111
1112 struct CountedDrop;
1113 impl Drop for CountedDrop {
1114 fn drop(&mut self) {
1115 DROP_COUNT.fetch_add(1, Ordering::SeqCst);
1116 }
1117 }
1118
1119 let mut buf = MemBuffer::new(StackReq::new::<CountedDrop>(6));
1120 let stack = MemStack::new(&mut buf);
1121
1122 let stack = {
1123 let (_arr, stack) = stack.make_with(3, |_| CountedDrop);
1124 stack
1125 };
1126 assert_eq!(DROP_COUNT.load(Ordering::SeqCst), 3);
1127 let _stack = {
1128 let (_arr, stack) = stack.make_with(4, |_| CountedDrop);
1129 stack
1130 };
1131 assert_eq!(DROP_COUNT.load(Ordering::SeqCst), 7);
1132 }
1133
1134 #[test]
1135 fn drop_disjoint() {
1136 use core::sync::atomic::{AtomicI32, Ordering};
1137 static DROP_COUNT: AtomicI32 = AtomicI32::new(0);
1138
1139 struct CountedDrop;
1140 impl Drop for CountedDrop {
1141 fn drop(&mut self) {
1142 DROP_COUNT.fetch_add(1, Ordering::SeqCst);
1143 }
1144 }
1145
1146 let mut buf = MemBuffer::new(StackReq::new::<CountedDrop>(6));
1147 let stack = MemStack::new(&mut buf);
1148
1149 {
1150 let _ = stack.make_with(3, |_| CountedDrop);
1151 assert_eq!(DROP_COUNT.load(Ordering::SeqCst), 3);
1152 }
1153
1154 {
1155 let _ = stack.make_with(4, |_| CountedDrop);
1156 assert_eq!(DROP_COUNT.load(Ordering::SeqCst), 7);
1157 }
1158 }
1159}
1160
1161#[cfg(all(test, feature = "alloc"))]
1162mod pod_stack_tests {
1163 use super::*;
1164
1165 #[test]
1166 fn empty() {
1167 let mut buf = PodBuffer::new(StackReq::new::<i32>(0));
1168 let stack = PodStack::new(&mut buf);
1169 let (_arr0, _stack) = stack.make_with::<i32>(0, |i| i as i32);
1170 }
1171
1172 #[test]
1173 #[should_panic]
1174 fn empty_overflow() {
1175 let mut buf = PodBuffer::new(StackReq::new::<i32>(0));
1176 let stack = PodStack::new(&mut buf);
1177 let (_arr0, _stack) = stack.make_with::<i32>(1, |i| i as i32);
1178 }
1179
1180 #[test]
1181 fn empty_collect() {
1182 let mut buf = PodBuffer::new(StackReq::new::<i32>(0));
1183 let stack = PodStack::new(&mut buf);
1184 let (_arr0, _stack) = stack.collect(0..0);
1185 }
1186
1187 #[test]
1188 fn empty_collect_overflow() {
1189 let mut buf = PodBuffer::new(StackReq::new::<i32>(0));
1190 let stack = PodStack::new(&mut buf);
1191 let (arr0, _stack) = stack.collect(0..1);
1192 assert!(arr0.is_empty());
1193 }
1194
1195 #[test]
1196 #[should_panic]
1197 fn overflow() {
1198 let mut buf = PodBuffer::new(StackReq::new::<i32>(1));
1199 let stack = PodStack::new(&mut buf);
1200 let (_arr0, _stack) = stack.make_with::<i32>(2, |i| i as i32);
1201 }
1202
1203 #[test]
1204 fn collect_overflow() {
1205 let mut buf = PodBuffer::new(StackReq::new::<i32>(1));
1206 let stack = PodStack::new(&mut buf);
1207 let (arr0, _stack) = stack.collect(1..3);
1208 assert_eq!(arr0.len(), 1);
1209 assert_eq!(arr0[0], 1)
1210 }
1211
1212 #[test]
1213 fn basic_nested() {
1214 let mut buf = PodBuffer::new(StackReq::new::<i32>(6));
1215
1216 let stack = PodStack::new(&mut buf);
1217 assert!(stack.can_hold(StackReq::new::<i32>(6)));
1218 assert!(!stack.can_hold(StackReq::new::<i32>(7)));
1219
1220 let (arr0, stack) = stack.make_with::<i32>(3, |i| i as i32);
1221 assert_eq!(arr0[0], 0);
1222 assert_eq!(arr0[1], 1);
1223 assert_eq!(arr0[2], 2);
1224
1225 let (arr1, _) = stack.make_with::<i32>(3, |i| i as i32 + 3);
1226
1227 assert_eq!(arr0[0], 0);
1229 assert_eq!(arr0[1], 1);
1230 assert_eq!(arr0[2], 2);
1231
1232 assert_eq!(arr1[0], 3);
1233 assert_eq!(arr1[1], 4);
1234 assert_eq!(arr1[2], 5);
1235 }
1236
1237 #[test]
1238 fn basic_disjoint() {
1239 let mut buf = PodBuffer::new(StackReq::new::<i32>(3));
1240
1241 let stack = PodStack::new(&mut buf);
1242
1243 {
1244 let (arr0, _) = stack.make_with::<i32>(3, |i| i as i32);
1245 assert_eq!(arr0[0], 0);
1246 assert_eq!(arr0[1], 1);
1247 assert_eq!(arr0[2], 2);
1248 }
1249 {
1250 let (arr1, _) = stack.make_with::<i32>(3, |i| i as i32 + 3);
1251
1252 assert_eq!(arr1[0], 3);
1253 assert_eq!(arr1[1], 4);
1254 assert_eq!(arr1[2], 5);
1255 }
1256 }
1257
1258 #[test]
1259 fn basic_nested_collect() {
1260 let mut buf = PodBuffer::new(StackReq::new::<i32>(6));
1261 let stack = PodStack::new(&mut buf);
1262
1263 let (arr0, stack) = stack.collect(0..3_i32);
1264 assert_eq!(arr0[0], 0);
1265 assert_eq!(arr0[1], 1);
1266 assert_eq!(arr0[2], 2);
1267
1268 let (arr1, _) = stack.collect(3..6_i32);
1269
1270 assert_eq!(arr0[0], 0);
1272 assert_eq!(arr0[1], 1);
1273 assert_eq!(arr0[2], 2);
1274
1275 assert_eq!(arr1[0], 3);
1276 assert_eq!(arr1[1], 4);
1277 assert_eq!(arr1[2], 5);
1278 }
1279
1280 #[test]
1281 fn basic_disjoint_collect() {
1282 let mut buf = PodBuffer::new(StackReq::new::<i32>(3));
1283
1284 let stack = PodStack::new(&mut buf);
1285
1286 {
1287 let (arr0, _) = stack.collect(0..3_i32);
1288 assert_eq!(arr0[0], 0);
1289 assert_eq!(arr0[1], 1);
1290 assert_eq!(arr0[2], 2);
1291 }
1292 {
1293 let (arr1, _) = stack.collect(3..6_i32);
1294
1295 assert_eq!(arr1[0], 3);
1296 assert_eq!(arr1[1], 4);
1297 assert_eq!(arr1[2], 5);
1298 }
1299 }
1300
1301 #[test]
1302 fn make_raw() {
1303 let mut buf = PodBuffer::new(StackReq::new::<i32>(3));
1304 buf.fill(0);
1305
1306 let stack = PodStack::new(&mut buf);
1307
1308 {
1309 let (arr0, _) = stack.make_raw::<i32>(3);
1310 assert_eq!(arr0[0], 0);
1311 assert_eq!(arr0[1], 0);
1312 assert_eq!(arr0[2], 0);
1313 }
1314 {
1315 let (arr0, _) = stack.collect(0..3_i32);
1316 assert_eq!(arr0[0], 0);
1317 assert_eq!(arr0[1], 1);
1318 assert_eq!(arr0[2], 2);
1319 }
1320 {
1321 let (arr1, _) = stack.make_raw::<i32>(3);
1322
1323 assert_eq!(arr1[0], 0);
1324 assert_eq!(arr1[1], 1);
1325 assert_eq!(arr1[2], 2);
1326 }
1327 }
1328
1329 #[test]
1330 fn make_unpod() {
1331 let mut buf = PodBuffer::new(StackReq::new::<i32>(3));
1332 let stack = PodStack::new(&mut buf);
1333
1334 {
1335 let (mut stack, _) = unsafe { stack.make_aligned_unpod(12, 4) };
1336
1337 let stack = &mut *stack;
1338 let (mem, _) = stack.make_uninit::<u32>(3);
1339 mem.fill(MaybeUninit::uninit());
1340
1341 let mut stack = stack;
1342 let mut buf = MemBuffer::new_in(StackReq::new::<u32>(3), alloc::DynAlloc::from_mut(stack.bump()));
1343 let stack = MemStack::new(&mut buf);
1344 let _ = stack.make_uninit::<u32>(3);
1345 }
1346
1347 let (mem, _) = stack.make_raw::<u32>(3);
1348 for x in mem {
1349 *x = *x;
1350 }
1351 }
1352}