1use core::alloc::Layout;
4use core::cell::UnsafeCell;
5use core::marker::PhantomData;
6use core::mem::MaybeUninit;
7use core::ptr::NonNull;
8use core::{fmt, ptr};
9
10extern crate alloc;
11
12#[derive(Copy, Clone, PartialEq, Eq, Debug)]
13pub struct AllocError;
14
15#[cfg(any(feature = "std", feature = "core-error"))]
16impl crate::Error for AllocError {}
17
18impl fmt::Display for AllocError {
19 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
20 f.write_str("memory allocation failed")
21 }
22}
23
24pub unsafe trait Allocator {
79 fn allocate(&self, layout: Layout) -> Result<NonNull<[u8]>, AllocError>;
105
106 fn allocate_zeroed(&self, layout: Layout) -> Result<NonNull<[u8]>, AllocError> {
122 let ptr = self.allocate(layout)?;
123 unsafe { (ptr.as_ptr() as *mut u8).write_bytes(0, ptr.len()) }
125 Ok(ptr)
126 }
127
128 unsafe fn deallocate(&self, ptr: NonNull<u8>, layout: Layout);
138
139 unsafe fn grow(&self, ptr: NonNull<u8>, old_layout: Layout, new_layout: Layout) -> Result<NonNull<[u8]>, AllocError> {
180 debug_assert!(
181 new_layout.size() >= old_layout.size(),
182 "`new_layout.size()` must be greater than or equal to `old_layout.size()`"
183 );
184
185 let new_ptr = self.allocate(new_layout)?;
186
187 unsafe {
193 ptr::copy_nonoverlapping(ptr.as_ptr(), new_ptr.as_ptr() as *mut u8, old_layout.size());
194 self.deallocate(ptr, old_layout);
195 }
196
197 Ok(new_ptr)
198 }
199
200 unsafe fn grow_zeroed(&self, ptr: NonNull<u8>, old_layout: Layout, new_layout: Layout) -> Result<NonNull<[u8]>, AllocError> {
239 debug_assert!(
240 new_layout.size() >= old_layout.size(),
241 "`new_layout.size()` must be greater than or equal to `old_layout.size()`"
242 );
243
244 let new_ptr = self.allocate_zeroed(new_layout)?;
245
246 unsafe {
252 ptr::copy_nonoverlapping(ptr.as_ptr(), new_ptr.as_ptr() as *mut u8, old_layout.size());
253 self.deallocate(ptr, old_layout);
254 }
255
256 Ok(new_ptr)
257 }
258
259 unsafe fn shrink(&self, ptr: NonNull<u8>, old_layout: Layout, new_layout: Layout) -> Result<NonNull<[u8]>, AllocError> {
300 debug_assert!(
301 new_layout.size() <= old_layout.size(),
302 "`new_layout.size()` must be smaller than or equal to `old_layout.size()`"
303 );
304
305 let new_ptr = self.allocate(new_layout)?;
306
307 unsafe {
313 ptr::copy_nonoverlapping(ptr.as_ptr(), new_ptr.as_ptr() as *mut u8, new_layout.size());
314 self.deallocate(ptr, old_layout);
315 }
316
317 Ok(new_ptr)
318 }
319
320 #[inline(always)]
324 fn by_ref(&self) -> &Self
325 where
326 Self: Sized,
327 {
328 self
329 }
330}
331
332unsafe impl<T: ?Sized + Allocator> Allocator for &T {
333 #[inline(always)]
334 fn allocate(&self, layout: Layout) -> Result<NonNull<[u8]>, AllocError> {
335 (**self).allocate(layout)
336 }
337
338 #[inline(always)]
339 unsafe fn deallocate(&self, ptr: NonNull<u8>, layout: Layout) {
340 (**self).deallocate(ptr, layout)
341 }
342
343 #[inline(always)]
344 fn allocate_zeroed(&self, layout: Layout) -> Result<NonNull<[u8]>, AllocError> {
345 (**self).allocate_zeroed(layout)
346 }
347
348 #[inline(always)]
349 unsafe fn grow(&self, ptr: NonNull<u8>, old_layout: Layout, new_layout: Layout) -> Result<NonNull<[u8]>, AllocError> {
350 (**self).grow(ptr, old_layout, new_layout)
351 }
352
353 #[inline(always)]
354 unsafe fn grow_zeroed(&self, ptr: NonNull<u8>, old_layout: Layout, new_layout: Layout) -> Result<NonNull<[u8]>, AllocError> {
355 (**self).grow_zeroed(ptr, old_layout, new_layout)
356 }
357
358 #[inline(always)]
359 unsafe fn shrink(&self, ptr: NonNull<u8>, old_layout: Layout, new_layout: Layout) -> Result<NonNull<[u8]>, AllocError> {
360 (**self).shrink(ptr, old_layout, new_layout)
361 }
362}
363
364unsafe impl<T: ?Sized + Allocator> Allocator for &mut T {
365 #[inline(always)]
366 fn allocate(&self, layout: Layout) -> Result<NonNull<[u8]>, AllocError> {
367 (**self).allocate(layout)
368 }
369
370 #[inline(always)]
371 unsafe fn deallocate(&self, ptr: NonNull<u8>, layout: Layout) {
372 (**self).deallocate(ptr, layout)
373 }
374
375 #[inline(always)]
376 fn allocate_zeroed(&self, layout: Layout) -> Result<NonNull<[u8]>, AllocError> {
377 (**self).allocate_zeroed(layout)
378 }
379
380 #[inline(always)]
381 unsafe fn grow(&self, ptr: NonNull<u8>, old_layout: Layout, new_layout: Layout) -> Result<NonNull<[u8]>, AllocError> {
382 (**self).grow(ptr, old_layout, new_layout)
383 }
384
385 #[inline(always)]
386 unsafe fn grow_zeroed(&self, ptr: NonNull<u8>, old_layout: Layout, new_layout: Layout) -> Result<NonNull<[u8]>, AllocError> {
387 (**self).grow_zeroed(ptr, old_layout, new_layout)
388 }
389
390 #[inline(always)]
391 unsafe fn shrink(&self, ptr: NonNull<u8>, old_layout: Layout, new_layout: Layout) -> Result<NonNull<[u8]>, AllocError> {
392 (**self).shrink(ptr, old_layout, new_layout)
393 }
394}
395
396#[cfg(feature = "alloc")]
397#[cfg_attr(docsrs, doc(cfg(feature = "alloc")))]
398unsafe impl<T: ?Sized + Allocator> Allocator for alloc::boxed::Box<T> {
399 #[inline(always)]
400 fn allocate(&self, layout: Layout) -> Result<NonNull<[u8]>, AllocError> {
401 (**self).allocate(layout)
402 }
403
404 #[inline(always)]
405 unsafe fn deallocate(&self, ptr: NonNull<u8>, layout: Layout) {
406 (**self).deallocate(ptr, layout)
407 }
408
409 #[inline(always)]
410 fn allocate_zeroed(&self, layout: Layout) -> Result<NonNull<[u8]>, AllocError> {
411 (**self).allocate_zeroed(layout)
412 }
413
414 #[inline(always)]
415 unsafe fn grow(&self, ptr: NonNull<u8>, old_layout: Layout, new_layout: Layout) -> Result<NonNull<[u8]>, AllocError> {
416 (**self).grow(ptr, old_layout, new_layout)
417 }
418
419 #[inline(always)]
420 unsafe fn grow_zeroed(&self, ptr: NonNull<u8>, old_layout: Layout, new_layout: Layout) -> Result<NonNull<[u8]>, AllocError> {
421 (**self).grow_zeroed(ptr, old_layout, new_layout)
422 }
423
424 #[inline(always)]
425 unsafe fn shrink(&self, ptr: NonNull<u8>, old_layout: Layout, new_layout: Layout) -> Result<NonNull<[u8]>, AllocError> {
426 (**self).shrink(ptr, old_layout, new_layout)
427 }
428}
429
430#[cfg(feature = "alloc")]
431#[cfg_attr(docsrs, doc(cfg(feature = "alloc")))]
432pub struct Global;
433
434#[cfg(feature = "alloc")]
435#[cfg_attr(docsrs, doc(cfg(feature = "alloc")))]
436unsafe impl Allocator for Global {
437 fn allocate(&self, layout: Layout) -> Result<NonNull<[u8]>, AllocError> {
438 let ptr = if layout.size() == 0 {
439 core::ptr::null_mut::<u8>().wrapping_add(layout.align())
440 } else {
441 unsafe { alloc::alloc::alloc(layout) }
442 };
443
444 if ptr.is_null() {
445 Err(AllocError)
446 } else {
447 Ok(unsafe { NonNull::new_unchecked(core::ptr::slice_from_raw_parts_mut(ptr, layout.size())) })
448 }
449 }
450
451 fn allocate_zeroed(&self, layout: Layout) -> Result<NonNull<[u8]>, AllocError> {
452 let ptr = if layout.size() == 0 {
453 core::ptr::null_mut::<u8>().wrapping_add(layout.align())
454 } else {
455 unsafe { alloc::alloc::alloc_zeroed(layout) }
456 };
457
458 if ptr.is_null() {
459 Err(AllocError)
460 } else {
461 Ok(unsafe { NonNull::new_unchecked(core::ptr::slice_from_raw_parts_mut(ptr, layout.size())) })
462 }
463 }
464
465 unsafe fn deallocate(&self, ptr: NonNull<u8>, layout: Layout) {
466 if layout.size() != 0 {
467 alloc::alloc::dealloc(ptr.as_ptr(), layout);
468 }
469 }
470
471 unsafe fn grow(&self, ptr: NonNull<u8>, old_layout: Layout, new_layout: Layout) -> Result<NonNull<[u8]>, AllocError> {
472 core::debug_assert!(
473 new_layout.size() >= old_layout.size(),
474 "`new_layout.size()` must be greater than or equal to `old_layout.size()`"
475 );
476
477 if old_layout.align() == new_layout.align() {
478 let ptr = if new_layout.size() == 0 {
479 core::ptr::null_mut::<u8>().wrapping_add(new_layout.align())
480 } else {
481 alloc::alloc::realloc(ptr.as_ptr(), old_layout, new_layout.size())
482 };
483 if ptr.is_null() {
484 Err(AllocError)
485 } else {
486 Ok(unsafe { NonNull::new_unchecked(core::ptr::slice_from_raw_parts_mut(ptr, new_layout.size())) })
487 }
488 } else {
489 let new_ptr = self.allocate(new_layout)?;
490
491 unsafe {
497 ptr::copy_nonoverlapping(ptr.as_ptr(), new_ptr.as_ptr() as *mut u8, old_layout.size());
498 self.deallocate(ptr, old_layout);
499 }
500
501 Ok(new_ptr)
502 }
503 }
504
505 unsafe fn shrink(&self, ptr: NonNull<u8>, old_layout: Layout, new_layout: Layout) -> Result<NonNull<[u8]>, AllocError> {
506 core::debug_assert!(
507 new_layout.size() <= old_layout.size(),
508 "`new_layout.size()` must be smaller than or equal to `old_layout.size()`"
509 );
510
511 if old_layout.align() == new_layout.align() {
512 let ptr = if new_layout.size() == 0 {
513 core::ptr::null_mut::<u8>().wrapping_add(new_layout.align())
514 } else {
515 alloc::alloc::realloc(ptr.as_ptr(), old_layout, new_layout.size())
516 };
517
518 if ptr.is_null() {
519 Err(AllocError)
520 } else {
521 Ok(unsafe { NonNull::new_unchecked(core::ptr::slice_from_raw_parts_mut(ptr, new_layout.size())) })
522 }
523 } else {
524 let new_ptr = self.allocate(new_layout)?;
525
526 unsafe {
532 ptr::copy_nonoverlapping(ptr.as_ptr(), new_ptr.as_ptr() as *mut u8, new_layout.size());
533 self.deallocate(ptr, old_layout);
534 }
535
536 Ok(new_ptr)
537 }
538 }
539}
540
541#[derive(Copy, Clone, Debug)]
542pub(crate) struct VTable {
543 pub allocate: unsafe fn(*const (), Layout) -> Result<NonNull<[u8]>, AllocError>,
544 pub allocate_zeroed: unsafe fn(*const (), Layout) -> Result<NonNull<[u8]>, AllocError>,
545 pub deallocate: unsafe fn(*const (), ptr: NonNull<u8>, Layout),
546 pub grow: unsafe fn(*const (), NonNull<u8>, Layout, Layout) -> Result<NonNull<[u8]>, AllocError>,
547 pub grow_zeroed: unsafe fn(*const (), NonNull<u8>, Layout, Layout) -> Result<NonNull<[u8]>, AllocError>,
548 pub shrink: unsafe fn(*const (), NonNull<u8>, Layout, Layout) -> Result<NonNull<[u8]>, AllocError>,
549
550 pub clone: Option<unsafe fn(*mut (), *const ())>,
551 pub drop: unsafe fn(*mut ()),
552}
553
554pub struct DynAlloc<'a> {
555 pub(crate) alloc: UnsafeCell<MaybeUninit<*const ()>>,
556 pub(crate) vtable: &'static VTable,
557 __marker: PhantomData<&'a ()>,
558}
559
560unsafe impl Send for DynAlloc<'_> {}
561
562unsafe impl Allocator for DynAlloc<'_> {
563 #[inline]
564 fn allocate(&self, layout: Layout) -> Result<NonNull<[u8]>, AllocError> {
565 unsafe { (self.vtable.allocate)(core::ptr::addr_of!(self.alloc) as *const (), layout) }
566 }
567
568 #[inline]
569 unsafe fn deallocate(&self, ptr: NonNull<u8>, layout: Layout) {
570 unsafe { (self.vtable.deallocate)(core::ptr::addr_of!(self.alloc) as *const (), ptr, layout) }
571 }
572
573 #[inline]
574 fn allocate_zeroed(&self, layout: Layout) -> Result<NonNull<[u8]>, AllocError> {
575 unsafe { (self.vtable.allocate_zeroed)(core::ptr::addr_of!(self.alloc) as *const (), layout) }
576 }
577
578 #[inline]
579 unsafe fn grow(&self, ptr: NonNull<u8>, old_layout: Layout, new_layout: Layout) -> Result<NonNull<[u8]>, AllocError> {
580 unsafe { (self.vtable.grow)(core::ptr::addr_of!(self.alloc) as *const (), ptr, old_layout, new_layout) }
581 }
582
583 #[inline]
584 unsafe fn grow_zeroed(&self, ptr: NonNull<u8>, old_layout: Layout, new_layout: Layout) -> Result<NonNull<[u8]>, AllocError> {
585 unsafe { (self.vtable.grow_zeroed)(core::ptr::addr_of!(self.alloc) as *const (), ptr, old_layout, new_layout) }
586 }
587
588 #[inline]
589 unsafe fn shrink(&self, ptr: NonNull<u8>, old_layout: Layout, new_layout: Layout) -> Result<NonNull<[u8]>, AllocError> {
590 unsafe { (self.vtable.shrink)(core::ptr::addr_of!(self.alloc) as *const (), ptr, old_layout, new_layout) }
591 }
592}
593
594impl Drop for DynAlloc<'_> {
595 #[inline]
596 fn drop(&mut self) {
597 unsafe { (self.vtable.drop)(core::ptr::addr_of_mut!(self.alloc) as *mut ()) }
598 }
599}
600
601impl Clone for DynAlloc<'_> {
602 #[inline]
603 fn clone(&self) -> Self {
604 let mut alloc = UnsafeCell::new(MaybeUninit::uninit());
605 unsafe {
606 self.vtable.clone.unwrap()(core::ptr::addr_of_mut!(alloc) as *mut (), core::ptr::addr_of!(self.alloc) as *const ());
607 }
608
609 Self {
610 alloc,
611 vtable: self.vtable,
612 __marker: PhantomData,
613 }
614 }
615}
616
617impl<'a> DynAlloc<'a> {
618 #[inline]
619 pub fn try_new_unclone<A: 'a + Allocator + Send>(alloc: A) -> Result<Self, A> {
620 if core::mem::size_of::<A>() <= core::mem::size_of::<*const ()>() && core::mem::align_of::<A>() <= core::mem::align_of::<*const ()>() {
621 trait AllocUnclone: Allocator + Send {
622 const VTABLE: &'static VTable = &unsafe {
623 VTable {
624 allocate: core::mem::transmute(Self::allocate as fn(&Self, _) -> _),
625 allocate_zeroed: core::mem::transmute(Self::allocate_zeroed as fn(&Self, _) -> _),
626 deallocate: core::mem::transmute(Self::deallocate as unsafe fn(&Self, _, _) -> _),
627 grow: core::mem::transmute(Self::grow as unsafe fn(&Self, _, _, _) -> _),
628 grow_zeroed: core::mem::transmute(Self::grow_zeroed as unsafe fn(&Self, _, _, _) -> _),
629 shrink: core::mem::transmute(Self::shrink as unsafe fn(&Self, _, _, _) -> _),
630
631 clone: None,
632 drop: core::mem::transmute(core::ptr::drop_in_place::<Self> as unsafe fn(_) -> _),
633 }
634 };
635 }
636 impl<A: Allocator + Send> AllocUnclone for A {}
637
638 Ok(Self {
639 alloc: unsafe { core::mem::transmute_copy(&core::mem::ManuallyDrop::new(alloc)) },
640 vtable: <A as AllocUnclone>::VTABLE,
641 __marker: PhantomData,
642 })
643 } else {
644 Err(alloc)
645 }
646 }
647
648 #[inline]
649 pub fn try_new_clone<A: 'a + Clone + Allocator + Send>(alloc: A) -> Result<Self, A> {
650 if core::mem::size_of::<A>() <= core::mem::size_of::<*const ()>() && core::mem::align_of::<A>() <= core::mem::align_of::<*const ()>() {
651 trait AllocClone: Allocator + Send + Clone {
652 const VTABLE: &'static VTable = &unsafe {
653 VTable {
654 allocate: core::mem::transmute(Self::allocate as fn(_, _) -> _),
655 allocate_zeroed: core::mem::transmute(Self::allocate_zeroed as fn(_, _) -> _),
656 deallocate: core::mem::transmute(Self::deallocate as unsafe fn(_, _, _) -> _),
657 grow: core::mem::transmute(Self::grow as unsafe fn(_, _, _, _) -> _),
658 grow_zeroed: core::mem::transmute(Self::grow_zeroed as unsafe fn(_, _, _, _) -> _),
659 shrink: core::mem::transmute(Self::shrink as unsafe fn(_, _, _, _) -> _),
660
661 clone: Some(|dst: *mut (), src: *const ()| (dst as *mut Self).write((*(src as *const Self)).clone())),
662 drop: core::mem::transmute(core::ptr::drop_in_place::<Self> as unsafe fn(_) -> _),
663 }
664 };
665 }
666 impl<A: Allocator + Send + Clone> AllocClone for A {}
667
668 Ok(Self {
669 alloc: unsafe { core::mem::transmute_copy(&core::mem::ManuallyDrop::new(alloc)) },
670 vtable: <A as AllocClone>::VTABLE,
671 __marker: PhantomData,
672 })
673 } else {
674 Err(alloc)
675 }
676 }
677
678 #[inline]
679 pub fn from_ref<A: Allocator + Sync>(alloc: &'a A) -> Self {
680 match Self::try_new_clone(alloc) {
681 Ok(me) => me,
682 Err(_) => unreachable!(),
683 }
684 }
685
686 #[inline]
687 pub fn from_mut<A: Allocator + Send>(alloc: &'a mut A) -> Self {
688 match Self::try_new_unclone(alloc) {
689 Ok(me) => me,
690 Err(_) => unreachable!(),
691 }
692 }
693
694 #[inline]
695 pub fn by_mut(&mut self) -> DynAlloc<'_> {
696 DynAlloc::from_mut(self)
697 }
698
699 #[inline]
700 pub fn cloneable(&self) -> bool {
701 self.vtable.clone.is_some()
702 }
703}