1#![doc = include_str!("../README.md")]
2#![no_std]
3
4mod iter;
5mod util;
6
7extern crate alloc;
8
9use alloc::alloc::{Layout, alloc, dealloc, handle_alloc_error};
10use core::cell::Cell;
11use core::marker::PhantomData;
12use core::mem::{align_of, size_of};
13use core::ptr::{self, NonNull};
14
15#[derive(Debug)]
16pub struct Stack<T> {
17 current_footer: Cell<NonNull<ChunkFooter>>,
24
25 first_footer: Cell<NonNull<ChunkFooter>>,
28
29 capacity: Cell<usize>,
31
32 length: Cell<usize>,
34
35 _phantom: PhantomData<T>,
36}
37
38impl<T> Stack<T> {
40 pub const fn new() -> Self {
52 Self {
53 current_footer: Cell::new(DEAD_CHUNK.get()),
54 first_footer: Cell::new(DEAD_CHUNK.get()),
55 capacity: Cell::new(0),
56 length: Cell::new(0),
57 _phantom: PhantomData,
58 }
59 }
60
61 pub fn with_capacity(capacity: usize) -> Self {
107 let stack = Self::new();
108 debug_assert!(unsafe { stack.current_footer.get().as_ref().is_dead() });
109 if const { Self::ELEMENT_SIZE != 0 } && capacity != 0 {
110 let chunk_size = Self::chunk_size_for(capacity);
111 let footer = unsafe { stack.alloc_chunk(chunk_size) };
112 stack.current_footer.set(footer);
113 stack.first_footer.set(footer);
114 }
115 stack
116 }
117
118 #[inline]
121 pub const fn capacity(&self) -> usize {
122 if const { Self::ELEMENT_SIZE == 0 } {
123 usize::MAX
124 } else {
125 self.capacity.get()
126 }
127 }
128
129 #[inline]
156 pub const fn len(&self) -> usize {
157 self.length.get()
158 }
159
160 #[inline]
173 pub const fn is_empty(&self) -> bool {
174 self.len() == 0
175 }
176
177 #[inline]
191 #[must_use]
192 pub fn first(&self) -> Option<&T> {
193 if !self.is_empty() {
194 unsafe { Some(self.first_unchecked().as_mut()) }
195 } else {
196 None
197 }
198 }
199
200 #[inline]
217 #[must_use]
218 pub fn first_mut(&mut self) -> Option<&mut T> {
219 if !self.is_empty() {
220 unsafe { Some(self.first_unchecked().as_mut()) }
221 } else {
222 None
223 }
224 }
225
226 #[inline]
240 #[must_use]
241 pub fn last(&self) -> Option<&T> {
242 if !self.is_empty() {
243 unsafe { Some(self.last_unchecked().as_ref()) }
244 } else {
245 None
246 }
247 }
248 #[inline]
267 #[must_use]
268 pub fn last_mut(&mut self) -> Option<&mut T> {
269 if !self.is_empty() {
270 unsafe { Some(self.last_unchecked().as_mut()) }
271 } else {
272 None
273 }
274 }
275
276 #[inline]
303 pub fn push(&self, value: T) -> &T {
304 self.push_with(|| value)
305 }
306
307 #[inline(always)]
331 pub fn push_with<F>(&self, f: F) -> &T
332 where
333 F: FnOnce() -> T,
334 {
335 unsafe {
336 let p = self.alloc_element();
337 util::write_with(p.as_ptr(), f);
338 self.length.update(|len| len + 1);
339 p.as_ref()
340 }
341 }
342
343 #[inline]
355 pub fn pop(&mut self) -> Option<T> {
356 unsafe {
357 if let Some(element_ptr) = self.dealloc_element() {
358 if const { Self::ELEMENT_SIZE == 0 } && self.length.get() == 0 {
359 None
360 } else {
361 self.length.update(|len| len - 1);
362 Some(ptr::read(element_ptr.as_ptr()))
363 }
364 } else {
365 None
366 }
367 }
368 }
369
370 pub fn clear(&mut self) {
386 while let Some(elem) = self.pop() {
388 drop(elem)
389 }
390 }
391
392 pub fn iter(&self) -> impl core::iter::Iterator<Item = &T> {
425 crate::iter::Iter::new(self)
426 }
427}
428
429impl<T> core::default::Default for Stack<T> {
430 #[inline]
434 fn default() -> Self {
435 Self::new()
436 }
437}
438
439impl<T, const N: usize> core::convert::From<&[T; N]> for Stack<T>
440where
441 T: Clone,
442{
443 fn from(slice: &[T; N]) -> Self {
446 let stk = Stack::with_capacity(N);
447 for item in slice {
448 stk.push(item.clone());
449 }
450 stk
451 }
452}
453
454impl<T, const N: usize> core::convert::From<&mut [T; N]> for Stack<T>
455where
456 T: Clone,
457{
458 #[inline]
461 fn from(slice: &mut [T; N]) -> Self {
462 core::convert::From::<&[T; N]>::from(slice)
463 }
464}
465
466impl<T, const N: usize> core::convert::From<[T; N]> for Stack<T>
467where
468 T: Clone,
469{
470 #[inline]
473 fn from(array: [T; N]) -> Self {
474 core::convert::From::<&[T; N]>::from(&array)
475 }
476}
477
478impl<T> core::convert::From<&[T]> for Stack<T>
479where
480 T: Clone,
481{
482 fn from(slice: &[T]) -> Self {
485 let stk = Stack::with_capacity(slice.len());
486 for item in slice {
487 stk.push(item.clone());
488 }
489 stk
490 }
491}
492
493impl<T> core::convert::From<&mut [T]> for Stack<T>
494where
495 T: Clone,
496{
497 fn from(slice: &mut [T]) -> Self {
500 core::convert::From::<&[T]>::from(slice)
501 }
502}
503
504impl<T> core::ops::Drop for Stack<T> {
505 fn drop(&mut self) {
506 self.clear();
507 unsafe {
508 let current_footer = self.current_footer.get().as_ref();
509 if !current_footer.is_dead() {
510 debug_assert!(current_footer.prev.get().as_ref().is_dead());
511 debug_assert!(current_footer.next.get().as_ref().is_dead());
512 self.dealloc_chunk(self.current_footer.get());
513 }
514 }
515 }
516}
517
518impl<'a, T> core::iter::IntoIterator for &'a Stack<T> {
519 type Item = &'a T;
520 type IntoIter = crate::iter::Iter<'a, T>;
521
522 fn into_iter(self) -> Self::IntoIter {
523 crate::iter::Iter::new(self)
524 }
525}
526
527impl<T, U> core::cmp::PartialEq<[U]> for Stack<T>
528where
529 T: core::cmp::PartialEq<U>,
530{
531 fn eq(&self, other: &[U]) -> bool {
532 self.len() == other.len() && self.iter().zip(other.iter()).all(|(a, b)| a == b)
533 }
534}
535
536impl<T, U> core::cmp::PartialEq<&[U]> for Stack<T>
537where
538 T: core::cmp::PartialEq<U>,
539{
540 #[inline]
541 fn eq(&self, other: &&[U]) -> bool {
542 core::cmp::PartialEq::<[U]>::eq(self, other)
543 }
544}
545
546impl<T, U> core::cmp::PartialEq<&mut [U]> for Stack<T>
547where
548 T: core::cmp::PartialEq<U>,
549{
550 #[inline]
551 fn eq(&self, other: &&mut [U]) -> bool {
552 core::cmp::PartialEq::<[U]>::eq(self, other)
553 }
554}
555
556impl<T, U, const N: usize> core::cmp::PartialEq<[U; N]> for Stack<T>
557where
558 T: core::cmp::PartialEq<U>,
559{
560 fn eq(&self, other: &[U; N]) -> bool {
561 self.len() == N && self.iter().zip(other.iter()).all(|(a, b)| a == b)
562 }
563}
564
565impl<T, U, const N: usize> core::cmp::PartialEq<&[U; N]> for Stack<T>
566where
567 T: core::cmp::PartialEq<U>,
568{
569 #[inline]
570 fn eq(&self, other: &&[U; N]) -> bool {
571 core::cmp::PartialEq::<[U; N]>::eq(self, other)
572 }
573}
574
575impl<T, U, const N: usize> core::cmp::PartialEq<&mut [U; N]> for Stack<T>
576where
577 T: core::cmp::PartialEq<U>,
578{
579 #[inline]
580 fn eq(&self, other: &&mut [U; N]) -> bool {
581 core::cmp::PartialEq::<[U; N]>::eq(self, other)
582 }
583}
584
585impl<T, U, const N: usize> core::cmp::PartialEq<Stack<U>> for [T; N]
586where
587 T: core::cmp::PartialEq<U>,
588{
589 fn eq(&self, other: &Stack<U>) -> bool {
590 self.len() == other.len() && self.iter().zip(other.iter()).all(|(a, b)| a == b)
591 }
592}
593
594impl<T, U, const N: usize> core::cmp::PartialEq<Stack<U>> for &[T; N]
595where
596 T: core::cmp::PartialEq<U>,
597{
598 fn eq(&self, other: &Stack<U>) -> bool {
599 *self == other
600 }
601}
602
603impl<T, U, const N: usize> core::cmp::PartialEq<Stack<U>> for &mut [T; N]
604where
605 T: core::cmp::PartialEq<U>,
606{
607 fn eq(&self, other: &Stack<U>) -> bool {
608 *self == other
609 }
610}
611
612#[derive(Debug)]
615struct ChunkFooter {
616 data: NonNull<u8>,
618
619 ptr: Cell<NonNull<u8>>,
621
622 layout: Layout,
624
625 prev: Cell<NonNull<ChunkFooter>>,
630
631 next: Cell<NonNull<ChunkFooter>>,
636}
637
638impl ChunkFooter {
639 fn get(&self) -> NonNull<Self> {
641 NonNull::from(self)
642 }
643
644 fn is_dead(&self) -> bool {
646 ptr::eq(self, &DEAD_CHUNK.0)
647 }
648
649 fn occupied(&self) -> usize {
651 let start = self.data.as_ptr() as usize;
652 let ptr = self.ptr.get().as_ptr() as usize;
653 debug_assert!(start <= ptr);
654 ptr - start
655 }
656
657 fn capacity(&self) -> usize {
659 let end = self.get().as_ptr() as usize;
660 let start = self.data.as_ptr() as usize;
661 debug_assert!(start < end);
662 end - start
663 }
664
665 fn is_empty(&self) -> bool {
667 let start = self.data.as_ptr() as usize;
668 let ptr = self.ptr.get().as_ptr() as usize;
669 debug_assert!(start <= ptr);
670 start == ptr
671 }
672}
673
674#[repr(transparent)]
675struct DeadChunkFooter(ChunkFooter);
676
677unsafe impl Sync for DeadChunkFooter {}
678
679impl DeadChunkFooter {
680 const fn get(&'static self) -> NonNull<ChunkFooter> {
681 unsafe { NonNull::new_unchecked(&DEAD_CHUNK as *const DeadChunkFooter as *mut ChunkFooter) }
682 }
683}
684
685static DEAD_CHUNK: DeadChunkFooter = DeadChunkFooter(ChunkFooter {
687 data: DEAD_CHUNK.get().cast(),
688 ptr: Cell::new(DEAD_CHUNK.get().cast()),
689 layout: Layout::new::<ChunkFooter>(),
690 prev: Cell::new(DEAD_CHUNK.get()),
691 next: Cell::new(DEAD_CHUNK.get()),
692});
693
694const ALLOC_OVERHEAD: usize = 16;
696
697impl<T> Stack<T> {
699 const ELEMENT_ALIGN: usize = align_of::<T>();
701
702 const ELEMENT_SIZE: usize = size_of::<T>();
704
705 const FOOTER_ALIGN: usize = util::max(align_of::<ChunkFooter>(), Self::ELEMENT_ALIGN);
709
710 const FOOTER_SIZE: usize = size_of::<ChunkFooter>();
711
712 const CHUNK_ALIGN: usize = util::max(Self::ELEMENT_ALIGN, Self::FOOTER_ALIGN);
714
715 const CHUNK_MIN_SIZE: usize = Self::chunk_size_for(1);
717
718 const FOOTER_IS_END: bool = {
721 assert!(util::is_aligned_to(Self::CHUNK_ALIGN, Self::ELEMENT_ALIGN));
722 assert!(util::is_aligned_to(Self::CHUNK_ALIGN, Self::FOOTER_ALIGN));
723 Self::FOOTER_ALIGN.is_multiple_of(Self::ELEMENT_SIZE)
724 || Self::ELEMENT_SIZE.is_multiple_of(Self::FOOTER_ALIGN)
725 };
726
727 const CHUNK_FIRST_SIZE: usize = {
730 let chunk_512b = 0x200 - ALLOC_OVERHEAD;
731 let size = if Self::ELEMENT_SIZE > 1024 {
732 Self::chunk_size_for(2)
733 } else {
734 util::max(chunk_512b, Self::chunk_size_for(8))
735 };
736 assert!((size + ALLOC_OVERHEAD).is_power_of_two());
737 size
738 };
739
740 const CHUNK_MAX_SIZE: usize = {
743 let part_of_entire_memory = util::round_down_to_pow2(isize::MAX as usize >> 2);
744 let common_sensible_in_2025 = 4 << 30; let size = util::min(part_of_entire_memory, common_sensible_in_2025);
746 assert!(size.is_power_of_two());
747 size - ALLOC_OVERHEAD
748 };
749
750 const ELEMENT_IS_ZST: bool = Self::ELEMENT_SIZE == 0;
752
753 const fn chunk_size_for(mut elements_count: usize) -> usize {
756 if elements_count < 2 {
757 elements_count = 2;
762 }
763 let mut chunk_size = elements_count * Self::ELEMENT_SIZE;
764 assert!(util::is_aligned_to(chunk_size, Self::ELEMENT_ALIGN));
765
766 let overhead = ALLOC_OVERHEAD + Self::FOOTER_SIZE;
767 chunk_size += overhead.next_multiple_of(Self::FOOTER_ALIGN);
768
769 chunk_size.next_power_of_two() - ALLOC_OVERHEAD
770 }
771
772 #[inline(always)]
774 fn chunk_is_full(footer: &ChunkFooter) -> bool {
775 if const { Self::ELEMENT_SIZE == 0 } {
776 return false;
777 }
778 let end = footer.get().as_ptr() as usize;
779 let ptr = footer.ptr.get().as_ptr() as usize;
780 debug_assert!(ptr <= end);
781 if const { Self::FOOTER_IS_END } {
782 end == ptr
783 } else {
784 end - ptr < Self::ELEMENT_SIZE
785 }
786 }
787}
788
789impl<T> Stack<T> {
791 #[inline(always)]
792 unsafe fn alloc_element(&self) -> NonNull<T> {
793 if let Some(ptr) = self.alloc_element_fast() {
794 ptr
795 } else {
796 debug_assert!(!Self::ELEMENT_IS_ZST, "slow alloc is impossible for ZST");
797 unsafe { self.alloc_element_slow() }
798 }
799 }
800
801 #[inline(always)]
802 fn alloc_element_fast(&self) -> Option<NonNull<T>> {
803 let current_footer = unsafe { self.current_footer.get().as_ref() };
804
805 if Self::chunk_is_full(current_footer) {
806 return None;
807 }
808
809 let ptr = current_footer.ptr.get().as_ptr();
810 let new_ptr = ptr.wrapping_byte_add(Self::ELEMENT_SIZE);
811
812 debug_assert!(util::ptr_is_aligned_to(new_ptr, Self::ELEMENT_ALIGN));
813
814 let new_ptr = unsafe { NonNull::new_unchecked(new_ptr) };
815 current_footer.ptr.set(new_ptr);
816
817 let ptr = unsafe { NonNull::new_unchecked(ptr) };
818 Some(ptr.cast())
819 }
820
821 unsafe fn alloc_element_slow(&self) -> NonNull<T> {
823 unsafe {
824 let current_footer = self.current_footer.get().as_ref();
825
826 debug_assert!(Self::chunk_is_full(current_footer));
827
828 if current_footer.is_dead() {
829 debug_assert!(current_footer.is_dead());
831 debug_assert!(current_footer.prev.get().as_ref().is_dead());
832 debug_assert!(current_footer.next.get().as_ref().is_dead());
833
834 let new_footer_ptr = self.alloc_chunk(Self::CHUNK_FIRST_SIZE);
835 self.current_footer.set(new_footer_ptr);
836 self.first_footer.set(new_footer_ptr);
837 } else {
838 let next_footer_ptr = current_footer.next.get();
840 let next_footer = next_footer_ptr.as_ref();
841
842 if next_footer.is_dead() {
843 let current_chunk_size = current_footer.layout.size();
846 let new_chunk_size = if current_chunk_size == Self::CHUNK_MAX_SIZE {
847 Self::CHUNK_MAX_SIZE
848 } else {
849 debug_assert!(current_chunk_size < Self::CHUNK_MAX_SIZE);
850 ((current_chunk_size + ALLOC_OVERHEAD) << 1) - ALLOC_OVERHEAD
851 };
852
853 debug_assert!((new_chunk_size + ALLOC_OVERHEAD).is_power_of_two());
854
855 let new_footer_ptr = self.alloc_chunk(new_chunk_size);
856 let new_footer = new_footer_ptr.as_ref();
857
858 current_footer.next.set(new_footer_ptr);
859 new_footer.prev.set(self.current_footer.get());
860
861 self.current_footer.set(new_footer_ptr);
862 } else {
863 debug_assert!(next_footer.is_empty());
865 self.current_footer.set(next_footer_ptr);
866 }
867 }
868
869 self.alloc_element_fast().unwrap_unchecked()
870 }
871 }
872
873 unsafe fn alloc_chunk(&self, chunk_size: usize) -> NonNull<ChunkFooter> {
874 debug_assert!(chunk_size <= Self::CHUNK_MAX_SIZE);
875
876 let mut new_chunk_size = chunk_size;
877 let new_chunk_align = Self::CHUNK_ALIGN;
878
879 let (new_chunk_ptr, new_chunk_layout) = loop {
880 debug_assert!(new_chunk_align != 0);
882 debug_assert!(new_chunk_align.is_power_of_two());
883 debug_assert!((new_chunk_size + ALLOC_OVERHEAD).is_power_of_two());
884 debug_assert!(new_chunk_size <= isize::MAX as usize);
885
886 let new_chunk_layout =
887 unsafe { Layout::from_size_align_unchecked(new_chunk_size, new_chunk_align) };
888
889 let new_chunk_ptr = unsafe { alloc(new_chunk_layout) };
890 if !new_chunk_ptr.is_null() {
891 assert!(util::ptr_is_aligned_to(new_chunk_ptr, Self::CHUNK_ALIGN));
892 break (new_chunk_ptr, new_chunk_layout);
893 }
894
895 new_chunk_size = ((new_chunk_size + ALLOC_OVERHEAD) >> 1) - ALLOC_OVERHEAD;
897 if new_chunk_size < Self::CHUNK_MIN_SIZE {
898 handle_alloc_error(new_chunk_layout);
899 }
900 };
901
902 let new_start = new_chunk_ptr;
903 let new_end = new_start.wrapping_byte_add(new_chunk_layout.size());
904 let mut new_footer_start = new_end.wrapping_byte_sub(Self::FOOTER_SIZE);
905 new_footer_start = util::round_mut_ptr_down_to(new_footer_start, Self::FOOTER_ALIGN);
906 let new_ptr = new_start;
907
908 debug_assert!(new_start < new_footer_start);
909 debug_assert!(new_footer_start < new_end);
910 debug_assert!(util::ptr_is_aligned_to(new_ptr, Self::ELEMENT_ALIGN));
911
912 let new_chunk_cap_in_bytes = new_footer_start as usize - new_ptr as usize;
913 let new_chunk_capacity = new_chunk_cap_in_bytes / Self::ELEMENT_SIZE;
914 self.capacity.update(|cap| cap + new_chunk_capacity);
915
916 if const { Self::ELEMENT_SIZE.is_multiple_of(Self::FOOTER_ALIGN) } {
917 let buffer_size = Self::ELEMENT_SIZE * new_chunk_capacity;
920 let corrected_footer_start = new_start.wrapping_byte_add(buffer_size);
921
922 let delta = new_footer_start as usize - corrected_footer_start as usize;
923 debug_assert!(delta < Self::ELEMENT_SIZE);
924
925 new_footer_start = corrected_footer_start;
926 }
927
928 unsafe {
929 let new_footer_ptr = new_footer_start as *mut ChunkFooter;
930
931 util::write_with(new_footer_ptr, || ChunkFooter {
932 data: NonNull::new_unchecked(new_start),
933 ptr: Cell::new(NonNull::new_unchecked(new_ptr)),
934 layout: new_chunk_layout,
935 prev: Cell::new(DEAD_CHUNK.get()),
936 next: Cell::new(DEAD_CHUNK.get()),
937 });
938
939 NonNull::new_unchecked(new_footer_ptr)
940 }
941 }
942
943 #[inline(always)]
944 unsafe fn dealloc_element(&mut self) -> Option<NonNull<T>> {
945 unsafe {
946 if let Some(ptr) = self.dealloc_element_fast() {
947 Some(ptr)
948 } else {
949 self.dealloc_element_slow()
950 }
951 }
952 }
953
954 #[inline(always)]
955 unsafe fn dealloc_element_fast(&mut self) -> Option<NonNull<T>> {
956 let current_footer_ptr = self.current_footer.get();
957 let current_footer = unsafe { current_footer_ptr.as_ref() };
958
959 let start = current_footer.data.as_ptr();
960 let ptr = current_footer.ptr.get().as_ptr();
961 debug_assert!(start <= ptr);
962 let capacity = ptr as usize - start as usize;
963
964 if capacity < Self::ELEMENT_SIZE {
965 return None;
966 }
967
968 let new_ptr = ptr.wrapping_byte_sub(Self::ELEMENT_SIZE);
969
970 debug_assert!(start <= new_ptr);
971 debug_assert!(util::ptr_is_aligned_to(new_ptr, Self::ELEMENT_ALIGN));
972
973 let new_ptr = unsafe { NonNull::new_unchecked(new_ptr) };
974 current_footer.ptr.set(new_ptr);
975
976 Some(new_ptr.cast())
977 }
978
979 unsafe fn dealloc_element_slow(&mut self) -> Option<NonNull<T>> {
980 unsafe {
981 let current_footer_ptr = self.current_footer.get();
982 let current_footer = current_footer_ptr.as_ref();
983
984 let next_footer_ptr = current_footer.next.get();
985 let next_footer = next_footer_ptr.as_ref();
986
987 let prev_footer_ptr = current_footer.prev.get();
988 let prev_footer = prev_footer_ptr.as_ref();
989
990 if current_footer.is_dead() {
991 debug_assert!(next_footer.is_dead());
992 debug_assert!(prev_footer.is_dead());
993 return None;
994 }
995
996 debug_assert!(current_footer.is_empty());
997 debug_assert!(next_footer.is_empty());
998
999 if !next_footer.is_dead() {
1000 if current_footer.layout.size() < next_footer.layout.size() {
1001 debug_assert!(next_footer.next.get().as_ref().is_dead());
1002
1003 next_footer.prev.set(prev_footer_ptr);
1004 self.current_footer.set(next_footer_ptr);
1005
1006 self.dealloc_chunk(current_footer_ptr);
1007 } else {
1008 self.dealloc_chunk(next_footer_ptr);
1009 }
1010 self.current_footer
1011 .get()
1012 .as_ref()
1013 .next
1014 .set(DEAD_CHUNK.get());
1015 }
1016
1017 if prev_footer.is_dead() {
1018 self.first_footer.set(self.current_footer.get());
1019 None
1020 } else {
1021 debug_assert!(Self::chunk_is_full(prev_footer));
1023
1024 prev_footer.next.set(self.current_footer.get());
1025 self.current_footer.set(prev_footer_ptr);
1026
1027 self.dealloc_element_fast()
1028 }
1029 }
1030 }
1031
1032 unsafe fn dealloc_chunk(&mut self, mut footer_ptr: NonNull<ChunkFooter>) {
1033 unsafe {
1034 let footer = footer_ptr.as_mut();
1035 let chunk_capacity = footer.capacity() / Self::ELEMENT_SIZE;
1036 debug_assert!(chunk_capacity <= self.capacity());
1037 self.capacity.update(|cap| cap - chunk_capacity);
1038 debug_assert!(self.len() <= self.capacity());
1039 dealloc(footer.data.as_ptr(), footer.layout);
1040 }
1041 }
1042
1043 unsafe fn first_unchecked(&self) -> NonNull<T> {
1049 unsafe {
1050 let first_footer = self.first_footer.get().as_ref();
1051 assert!(first_footer.occupied() >= Self::ELEMENT_SIZE);
1052 first_footer.data.cast()
1053 }
1054 }
1055
1056 unsafe fn last_unchecked(&self) -> NonNull<T> {
1062 unsafe {
1063 let mut footer = self.current_footer.get().as_ref();
1064 if footer.is_empty() {
1065 footer = footer.prev.get().as_ref();
1066 }
1067 assert!(footer.occupied() >= Self::ELEMENT_SIZE);
1068 footer.ptr.get().cast().sub(1)
1069 }
1070 }
1071}
1072
1073mod stest;
1077
1078#[cfg(test)]
1080mod utest;