1use core::cell::UnsafeCell;
2use core::marker::PhantomData;
3use core::mem::{MaybeUninit, align_of, forget, replace, size_of};
4use core::num::NonZeroU16;
5use core::ops::{Deref, DerefMut};
6use core::ptr;
7
8use super::{Alloc, AllocError, Allocator, SliceBuffer};
9
10#[cfg(test)]
11mod tests;
12
13const MAX_BYTES: usize = i32::MAX as usize;
16
17pub struct Slice<'a> {
113 internal: UnsafeCell<Internal>,
118 _marker: PhantomData<&'a mut [MaybeUninit<u8>]>,
120}
121
122impl<'a> Slice<'a> {
123 pub fn new<B>(buffer: &'a mut B) -> Self
143 where
144 B: ?Sized + SliceBuffer,
145 {
146 let buffer = buffer.as_uninit_bytes();
147 let size = buffer.len();
148
149 assert!(
150 size <= MAX_BYTES,
151 "Buffer of {size} bytes is larger than the maximum {MAX_BYTES}"
152 );
153
154 let mut data = Range::new(buffer.as_mut_ptr_range());
155
156 let align = data.end.align_offset(align_of::<Header>());
161
162 if align != 0 {
163 let sub = align_of::<Header>() - align;
164
165 if sub <= size {
166 unsafe {
169 data.end = data.end.sub(sub);
170 }
171 } else {
172 data.end = data.start;
173 }
174 }
175
176 Self {
177 internal: UnsafeCell::new(Internal {
178 free_head: None,
179 tail: None,
180 occupied: None,
181 full: data,
182 free: data,
183 }),
184 _marker: PhantomData,
185 }
186 }
187}
188
189unsafe impl<'a> Allocator for &'a Slice<'_> {
190 #[inline]
191 fn __do_not_implement() {}
192
193 const IS_GLOBAL: bool = false;
194
195 type Alloc<T> = SliceAlloc<'a, T>;
196
197 #[inline]
198 fn alloc<T>(self, value: T) -> Result<Self::Alloc<T>, AllocError> {
199 if size_of::<T>() == 0 {
200 return Ok(SliceAlloc::ZST);
201 }
202
203 let region = unsafe {
206 let i = &mut *self.internal.get();
207 let region = i.alloc(size_of::<T>(), align_of::<T>());
208
209 let Some(region) = region else {
210 return Err(AllocError);
211 };
212
213 region.range.start.cast::<T>().write(value);
215 Some(region.id)
216 };
217
218 Ok(SliceAlloc {
219 region,
220 internal: Some(&self.internal),
221 cap: 1,
222 _marker: PhantomData,
223 })
224 }
225
226 #[inline]
227 fn alloc_empty<T>(self) -> Self::Alloc<T> {
228 if size_of::<T>() == 0 {
229 return SliceAlloc::ZST;
230 }
231
232 let region = unsafe {
235 (*self.internal.get())
236 .alloc(0, align_of::<T>())
237 .map(|r| r.id)
238 };
239
240 SliceAlloc {
241 region,
242 internal: Some(&self.internal),
243 cap: 0,
244 _marker: PhantomData,
245 }
246 }
247}
248
249pub struct SliceAlloc<'a, T> {
253 region: Option<HeaderId>,
254 internal: Option<&'a UnsafeCell<Internal>>,
255 cap: usize,
258 _marker: PhantomData<T>,
259}
260
261impl<T> SliceAlloc<'_, T> {
262 const ZST: Self = Self {
263 region: None,
264 internal: None,
265 cap: usize::MAX,
266 _marker: PhantomData,
267 };
268}
269
270impl<T> SliceAlloc<'_, T> {
271 #[inline]
272 fn free(&mut self) {
273 let (Some(region), Some(internal)) = (self.region.take(), self.internal) else {
274 return;
275 };
276
277 unsafe {
279 (*internal.get()).free(region);
280 }
281 }
282}
283
284impl<T> Alloc<T> for SliceAlloc<'_, T> {
285 #[inline]
286 fn as_ptr(&self) -> *const T {
287 let (Some(region), Some(internal)) = (self.region, self.internal) else {
288 return ptr::NonNull::dangling().as_ptr();
289 };
290
291 unsafe {
292 let i = &*internal.get();
293 let this = i.header(region);
294 this.range.start.cast_const().cast()
295 }
296 }
297
298 #[inline]
299 fn as_mut_ptr(&mut self) -> *mut T {
300 let (Some(region), Some(internal)) = (self.region, self.internal) else {
301 return ptr::NonNull::dangling().as_ptr();
302 };
303
304 unsafe {
305 let i = &*internal.get();
306 let this = i.header(region);
307 this.range.start.cast()
308 }
309 }
310
311 #[inline]
312 fn capacity(&self) -> usize {
313 let Some(internal) = self.internal else {
314 return usize::MAX;
315 };
316
317 let Some(region_id) = self.region else {
318 return 0;
319 };
320
321 unsafe {
324 let i = &mut *internal.get();
325 i.region(region_id).capacity()
326 }
327 }
328
329 #[inline]
330 fn resize(&mut self, len: usize, additional: usize) -> Result<(), AllocError> {
331 if len + additional <= self.cap {
336 return Ok(());
337 }
338
339 let Some(internal) = self.internal else {
340 debug_assert_eq!(
341 size_of::<T>(),
342 0,
343 "Only ZSTs should lack an internal pointer"
344 );
345 return Ok(());
347 };
348
349 let Some(region_id) = self.region else {
350 return Err(AllocError);
351 };
352
353 let Some(len) = len.checked_mul(size_of::<T>()) else {
354 return Err(AllocError);
355 };
356
357 let Some(additional) = additional.checked_mul(size_of::<T>()) else {
358 return Err(AllocError);
359 };
360
361 let Some(requested) = len.checked_add(additional) else {
362 return Err(AllocError);
363 };
364
365 if requested > MAX_BYTES {
366 return Err(AllocError);
367 }
368
369 unsafe {
372 let i = &mut *internal.get();
373
374 let region = i.region(region_id);
375
376 let actual = region.capacity();
377
378 if actual >= requested {
380 self.cap = actual / size_of::<T>();
381 return Ok(());
382 };
383
384 let Some(region) = i.realloc(region_id, len, requested, align_of::<T>()) else {
385 return Err(AllocError);
386 };
387
388 self.region = Some(region.id);
389 self.cap = region.capacity() / size_of::<T>();
390 Ok(())
391 }
392 }
393
394 #[inline]
395 fn try_merge<B>(&mut self, this_len: usize, buf: B, other_len: usize) -> Result<(), B>
396 where
397 B: Alloc<T>,
398 {
399 let Some(internal) = self.internal else {
400 return Ok(());
403 };
404
405 let Some(region) = self.region else {
406 return Err(buf);
407 };
408
409 let this_len = this_len * size_of::<T>();
410 let other_len = other_len * size_of::<T>();
411
412 let other_ptr = buf.as_ptr().cast();
415
416 unsafe {
417 let i = &mut *internal.get();
418 let mut this = i.region(region);
419
420 debug_assert!(this.capacity() >= this_len);
421
422 if !ptr::eq(this.range.end.cast_const(), other_ptr) {
427 return Err(buf);
428 }
429
430 let Some(next) = this.next else {
431 return Err(buf);
432 };
433
434 forget(buf);
437
438 let next = i.region(next);
439
440 let to = this.range.start.wrapping_add(this_len);
441
442 if this.range.end != to {
445 this.range.end.copy_to(to, other_len);
446 }
447
448 let old = i.free_region(next);
449 this.range.end = old.range.end;
450 Ok(())
451 }
452 }
453}
454
455impl<T> Drop for SliceAlloc<'_, T> {
456 #[inline]
457 fn drop(&mut self) {
458 self.free()
459 }
460}
461
462struct Region {
463 id: HeaderId,
464 ptr: *mut Header,
465}
466
467impl Deref for Region {
468 type Target = Header;
469
470 #[inline]
471 fn deref(&self) -> &Self::Target {
472 unsafe { &*self.ptr }
475 }
476}
477
478impl DerefMut for Region {
479 #[inline]
480 fn deref_mut(&mut self) -> &mut Self::Target {
481 unsafe { &mut *self.ptr }
484 }
485}
486
487#[derive(Debug, Clone, Copy, PartialEq, Eq)]
489#[cfg_attr(test, derive(PartialOrd, Ord, Hash))]
490#[repr(transparent)]
491struct HeaderId(NonZeroU16);
492
493impl HeaderId {
494 #[cfg(test)]
495 const unsafe fn new_unchecked(value: u16) -> Self {
496 Self(unsafe { NonZeroU16::new_unchecked(value) })
497 }
498
499 #[inline]
505 fn new(value: isize) -> Option<Self> {
506 Some(Self(NonZeroU16::new(u16::try_from(value).ok()?)?))
507 }
508
509 #[inline]
511 fn get(self) -> usize {
512 self.0.get() as usize
513 }
514}
515
516#[derive(Debug, Clone, Copy, PartialEq, Eq)]
517struct Range {
518 start: *mut MaybeUninit<u8>,
519 end: *mut MaybeUninit<u8>,
520}
521
522impl Range {
523 fn new(range: core::ops::Range<*mut MaybeUninit<u8>>) -> Self {
524 Self {
525 start: range.start,
526 end: range.end,
527 }
528 }
529
530 fn head(self) -> Range {
531 Self {
532 start: self.start,
533 end: self.start,
534 }
535 }
536}
537
538struct Internal {
539 free_head: Option<HeaderId>,
541 tail: Option<HeaderId>,
543 occupied: Option<HeaderId>,
545 full: Range,
547 free: Range,
549}
550
551impl Internal {
552 #[cfg(test)]
554 #[inline]
555 fn bytes(&self) -> usize {
556 unsafe { self.free.start.byte_offset_from(self.full.start) as usize }
558 }
559
560 #[cfg(test)]
561 #[inline]
562 fn headers(&self) -> usize {
563 unsafe {
564 self.full
565 .end
566 .cast::<Header>()
567 .offset_from(self.free.end.cast()) as usize
568 }
569 }
570
571 #[inline]
573 fn remaining(&self) -> usize {
574 unsafe { self.free.end.byte_offset_from(self.free.start) as usize }
576 }
577
578 #[inline]
580 fn header(&self, at: HeaderId) -> &Header {
581 unsafe { &*self.full.end.cast::<Header>().wrapping_sub(at.get()) }
584 }
585
586 #[inline]
588 fn header_mut(&mut self, at: HeaderId) -> *mut Header {
589 self.full.end.cast::<Header>().wrapping_sub(at.get())
590 }
591
592 #[inline]
594 fn region(&mut self, id: HeaderId) -> Region {
595 Region {
596 id,
597 ptr: self.header_mut(id),
598 }
599 }
600
601 unsafe fn unlink(&mut self, header: &Header) {
602 unsafe {
603 if let Some(next) = header.next {
604 (*self.header_mut(next)).prev = header.prev;
605 } else {
606 self.tail = header.prev;
607 }
608
609 if let Some(prev) = header.prev {
610 (*self.header_mut(prev)).next = header.next;
611 }
612 }
613 }
614
615 unsafe fn replace_back(&mut self, region: &mut Region) {
616 unsafe {
617 let prev = region.prev.take();
618 let next = region.next.take();
619
620 if let Some(prev) = prev {
621 (*self.header_mut(prev)).next = next;
622 }
623
624 if let Some(next) = next {
625 (*self.header_mut(next)).prev = prev;
626 }
627
628 self.push_back(region);
629 }
630 }
631
632 unsafe fn push_back(&mut self, region: &mut Region) {
633 unsafe {
634 if let Some(tail) = self.tail.replace(region.id) {
635 region.prev = Some(tail);
636 (*self.header_mut(tail)).next = Some(region.id);
637 }
638 }
639 }
640
641 unsafe fn free_region(&mut self, region: Region) -> Header {
643 unsafe {
644 self.unlink(®ion);
645
646 region.ptr.replace(Header {
647 range: self.full.head(),
648 next: self.free_head.replace(region.id),
649 prev: None,
650 })
651 }
652 }
653
654 unsafe fn alloc_header(&mut self, end: *mut MaybeUninit<u8>) -> Option<Region> {
661 if let Some(region) = self.free_head.take() {
662 let mut region = self.region(region);
663
664 region.range.start = self.free.start;
665 region.range.end = end;
666
667 return Some(region);
668 }
669
670 debug_assert_eq!(
671 self.free.end.align_offset(align_of::<Header>()),
672 0,
673 "End pointer should be aligned to header"
674 );
675
676 let ptr = self.free.end.cast::<Header>().wrapping_sub(1);
677
678 if ptr < self.free.start.cast() || ptr >= self.free.end.cast() {
679 return None;
680 }
681
682 unsafe {
683 let id = HeaderId::new(self.full.end.cast::<Header>().offset_from(ptr))?;
684
685 ptr.write(Header {
686 range: Range::new(self.free.start..end),
687 prev: None,
688 next: None,
689 });
690
691 self.free.end = ptr.cast();
692 Some(Region { id, ptr })
693 }
694 }
695
696 unsafe fn alloc(&mut self, requested: usize, align: usize) -> Option<Region> {
702 if let Some(occupied) = self.occupied {
703 let region = self.region(occupied);
704
705 if region.capacity() >= requested && region.is_aligned(align) {
706 self.occupied = None;
707 return Some(region);
708 }
709 }
710
711 unsafe {
712 self.align(align)?;
713
714 if self.remaining() < requested {
715 return None;
716 }
717
718 let end = self.free.start.wrapping_add(requested);
719
720 let mut region = self.alloc_header(end)?;
721
722 self.free.start = end;
723 debug_assert!(self.free.start <= self.free.end);
724 self.push_back(&mut region);
725 Some(region)
726 }
727 }
728
729 unsafe fn align(&mut self, align: usize) -> Option<()> {
735 let align = self.free.start.align_offset(align);
736
737 if align == 0 {
738 return Some(());
739 }
740
741 if self.remaining() < align {
742 return None;
743 }
744
745 let aligned_start = self.free.start.wrapping_add(align);
746
747 if let Some(tail) = self.tail {
748 self.region(tail).range.end = aligned_start;
750 } else {
751 unsafe {
752 let mut region = self.alloc_header(aligned_start)?;
756
757 self.push_back(&mut region);
758 }
759 }
760
761 self.free.start = aligned_start;
762 Some(())
763 }
764
765 unsafe fn free(&mut self, region: HeaderId) {
766 let region = self.region(region);
767
768 if region.next.is_none() {
770 unsafe {
771 self.free_tail(region);
772 }
773
774 return;
775 }
776
777 let Some(prev) = region.prev else {
779 debug_assert!(
780 self.occupied.is_none(),
781 "There can only be one occupied region"
782 );
783
784 self.occupied = Some(region.id);
785 return;
786 };
787
788 let mut prev = self.region(prev);
789
790 let region = unsafe { self.free_region(region) };
792
793 prev.range.end = region.range.end;
794
795 if region.next.is_none() {
797 self.free.start = region.range.start;
798 }
799 }
800
801 unsafe fn free_tail(&mut self, current: Region) {
803 debug_assert_eq!(self.tail, Some(current.id));
804
805 unsafe {
806 let current = self.free_region(current);
807 debug_assert_eq!(current.next, None);
808
809 self.free.start = match current.prev {
810 Some(prev) if self.occupied == Some(prev) => {
812 self.occupied = None;
813 let prev = self.region(prev);
814 self.free_region(prev).range.start
815 }
816 _ => current.range.start,
817 };
818 }
819 }
820
821 unsafe fn reserve(&mut self, additional: usize, align: usize) -> Option<*mut MaybeUninit<u8>> {
822 unsafe {
823 self.align(align)?;
824 }
825
826 let free_start = self.free.start.wrapping_add(additional);
827
828 if free_start > self.free.end || free_start < self.free.start {
829 return None;
830 }
831
832 Some(free_start)
833 }
834
835 unsafe fn realloc(
836 &mut self,
837 from: HeaderId,
838 len: usize,
839 requested: usize,
840 align: usize,
841 ) -> Option<Region> {
842 let mut from = self.region(from);
843
844 if from.next.is_none() {
846 unsafe {
850 let additional = requested - from.capacity();
851 self.free.start = self.reserve(additional, align)?;
852 from.range.end = from.range.end.add(additional);
853 }
854
855 return Some(from);
856 }
857
858 if from.range.start == from.range.end {
861 unsafe {
862 let free_start = self.reserve(requested, align)?;
863 from.range.start = replace(&mut self.free.start, free_start);
864 from.range.end = free_start;
865 self.replace_back(&mut from);
866 }
867
868 return Some(from);
869 }
870
871 'bail: {
874 let Some(prev) = from.prev else {
876 break 'bail;
877 };
878
879 if self.occupied != Some(prev) {
880 break 'bail;
881 }
882
883 let mut prev = self.region(prev);
884
885 if prev.capacity() + from.capacity() < requested {
886 break 'bail;
887 }
888
889 if !prev.is_aligned(align) {
890 break 'bail;
891 }
892
893 unsafe {
894 let from = self.free_region(from);
895
896 from.range.start.copy_to(prev.range.start, len);
897 prev.range.end = from.range.end;
898 self.occupied = None;
899 return Some(prev);
900 }
901 }
902
903 unsafe {
904 let to = self.alloc(requested, align)?;
905 from.range.start.copy_to_nonoverlapping(to.range.start, len);
906 self.free(from.id);
907 Some(to)
908 }
909 }
910}
911
912#[derive(Debug, Clone, Copy, PartialEq, Eq)]
914struct Header {
915 range: Range,
917 prev: Option<HeaderId>,
919 next: Option<HeaderId>,
921}
922
923impl Header {
924 #[inline]
925 fn capacity(&self) -> usize {
926 unsafe { self.range.end.byte_offset_from(self.range.start) as usize }
928 }
929
930 #[inline]
932 fn is_aligned(&self, align: usize) -> bool {
933 self.range.start.align_offset(align) == 0
934 }
935}