use crate::{
alloc::{AbortAlloc, AllocRef, BuildAllocRef, DeallocRef, Global, NonZeroLayout},
clone::CloneIn,
collections::CollectionAllocErr,
raw_vec::RawVec,
UncheckedResultExt,
};
use core::{
any::Any,
borrow,
cmp::Ordering,
fmt,
future::Future,
hash::{Hash, Hasher},
iter::FusedIterator,
marker::PhantomData,
mem,
num::NonZeroUsize,
ops::{Deref, DerefMut},
pin::Pin,
ptr::{self, NonNull},
slice,
task::{Context, Poll},
};
pub struct Box<T: ?Sized, B: BuildAllocRef = AbortAlloc<Global>> {
ptr: NonNull<T>,
build_alloc: B,
_owned: PhantomData<T>,
}
unsafe impl<T: ?Sized, B: BuildAllocRef + Send> Send for Box<T, B> {}
unsafe impl<T: ?Sized, B: BuildAllocRef + Sync> Sync for Box<T, B> {}
#[allow(clippy::use_self)]
impl<T> Box<T> {
#[inline(always)]
#[must_use]
pub fn new(x: T) -> Self {
Self::new_in(x, AbortAlloc(Global))
}
#[inline(always)]
#[must_use]
pub fn new_uninit() -> Box<mem::MaybeUninit<T>> {
Self::new_uninit_in(AbortAlloc(Global))
}
#[inline(always)]
pub fn pin(x: T) -> Pin<Self> {
Self::new(x).into()
}
}
#[allow(clippy::use_self)]
impl<T, B: BuildAllocRef> Box<T, B>
where
B::Ref: AllocRef,
{
#[inline(always)]
pub fn new_in(x: T, a: B::Ref) -> Self
where
B::Ref: AllocRef<Error = crate::Never>,
{
unsafe { Self::try_new_in(x, a).unwrap_unchecked() }
}
pub fn try_new_in(x: T, mut a: B::Ref) -> Result<Self, <B::Ref as AllocRef>::Error> {
let ptr = if let Ok(layout) = NonZeroLayout::new::<T>() {
let ptr = a.alloc(layout)?.cast::<T>();
unsafe {
ptr.as_ptr().write(x);
}
ptr
} else {
NonNull::dangling()
};
unsafe { Ok(Self::from_raw_in(ptr.as_ptr(), a.get_build_alloc())) }
}
#[inline(always)]
pub fn new_uninit_in(a: B::Ref) -> Box<mem::MaybeUninit<T>, B>
where
B::Ref: AllocRef<Error = crate::Never>,
{
unsafe { Self::try_new_uninit_in(a).unwrap_unchecked() }
}
pub fn try_new_uninit_in(
mut a: B::Ref,
) -> Result<Box<mem::MaybeUninit<T>, B>, <B::Ref as AllocRef>::Error> {
let ptr = if let Ok(layout) = NonZeroLayout::new::<T>() {
let ptr: NonNull<mem::MaybeUninit<T>> = a.alloc(layout)?.cast();
ptr
} else {
NonNull::dangling()
};
unsafe { Ok(Box::from_raw_in(ptr.as_ptr(), a.get_build_alloc())) }
}
#[inline(always)]
pub fn pin_in(x: T, a: B::Ref) -> Pin<Self>
where
B::Ref: AllocRef<Error = crate::Never>,
{
unsafe { Self::try_pin_in(x, a).unwrap_unchecked() }
}
#[inline]
pub fn try_pin_in(x: T, a: B::Ref) -> Result<Pin<Self>, <B::Ref as AllocRef>::Error> {
Self::try_new_in(x, a).map(Pin::from)
}
}
#[allow(clippy::use_self)]
impl<T> Box<[T]> {
#[inline(always)]
#[must_use]
pub fn new_uninit_slice(len: usize) -> Box<[mem::MaybeUninit<T>]> {
Self::new_uninit_slice_in(len, AbortAlloc(Global))
}
}
#[allow(clippy::use_self)]
impl<T, B: BuildAllocRef> Box<[T], B>
where
B::Ref: AllocRef,
{
#[inline(always)]
pub fn new_uninit_slice_in(len: usize, a: B::Ref) -> Box<[mem::MaybeUninit<T>], B>
where
B::Ref: AllocRef<Error = crate::Never>,
{
unsafe { Self::try_new_uninit_slice_in(len, a).unwrap_unchecked() }
}
pub fn try_new_uninit_slice_in(
len: usize,
mut a: B::Ref,
) -> Result<Box<[mem::MaybeUninit<T>], B>, CollectionAllocErr<B>> {
let ptr = if mem::size_of::<T>() == 0 || len == 0 {
NonNull::dangling()
} else {
let len = unsafe { NonZeroUsize::new_unchecked(len) };
let layout = NonZeroLayout::array::<mem::MaybeUninit<T>>(len)?;
a.alloc(layout)
.map_err(|inner| CollectionAllocErr::AllocError { layout, inner })?
};
unsafe {
let slice = slice::from_raw_parts_mut(ptr.cast().as_ptr(), len);
Ok(Box::from_raw_in(
NonNull::from(slice).as_ptr(),
a.get_build_alloc(),
))
}
}
}
#[allow(clippy::use_self)]
impl<T, B: BuildAllocRef> Box<mem::MaybeUninit<T>, B> {
#[inline]
pub unsafe fn assume_init(self) -> Box<T, B> {
let (ptr, b) = Self::into_raw_alloc(self);
Box::from_raw_in((*ptr).as_mut_ptr(), b)
}
}
#[allow(clippy::use_self)]
impl<T, B: BuildAllocRef> Box<[mem::MaybeUninit<T>], B> {
#[inline]
pub unsafe fn assume_init(self) -> Box<[T], B> {
let (ptr, b) = Self::into_raw_alloc(self);
Box::from_raw_in(ptr as *mut [T], b)
}
}
impl<T: ?Sized> Box<T> {
#[inline(always)]
pub unsafe fn from_raw(raw: *mut T) -> Self {
Self::from_raw_in(raw, AbortAlloc(Global))
}
}
impl<T: ?Sized, B: BuildAllocRef> Box<T, B> {
#[inline]
pub unsafe fn from_raw_in(raw: *mut T, builder: B) -> Self {
Self {
ptr: NonNull::new_unchecked(raw),
build_alloc: builder,
_owned: PhantomData,
}
}
pub fn build_alloc(&self) -> &B {
&self.build_alloc
}
pub fn build_alloc_mut(&mut self) -> &mut B {
&mut self.build_alloc
}
pub fn alloc_ref(&mut self) -> (B::Ref, Option<NonZeroLayout>) {
let layout = NonZeroLayout::for_value(self.as_ref());
let ptr = self.ptr.cast();
let alloc = unsafe { self.build_alloc_mut().build_alloc_ref(ptr, layout) };
(alloc, layout)
}
#[inline]
pub fn into_raw(b: Self) -> *mut T {
Self::into_raw_alloc(b).0
}
#[inline]
pub fn into_raw_alloc(b: Self) -> (*mut T, B) {
let (p, b) = Self::into_raw_non_null_alloc(b);
(p.as_ptr(), b)
}
#[inline]
pub fn into_raw_non_null(b: Self) -> NonNull<T> {
Self::into_raw_non_null_alloc(b).0
}
#[inline]
pub fn into_raw_non_null_alloc(b: Self) -> (NonNull<T>, B) {
let mut ptr = b.ptr;
unsafe {
let alloc = ptr::read(b.build_alloc());
mem::forget(b);
(NonNull::new_unchecked(ptr.as_mut()), alloc)
}
}
#[inline]
#[doc(hidden)]
#[cfg(feature = "ptr_internals")]
pub fn into_unique(b: Self) -> core::ptr::Unique<T> {
Self::into_unique_alloc(b).0
}
#[inline]
#[doc(hidden)]
#[cfg(feature = "ptr_internals")]
pub fn into_unique_alloc(b: Self) -> (core::ptr::Unique<T>, B) {
let mut ptr = b.ptr;
unsafe {
let alloc = ptr::read(b.build_alloc());
mem::forget(b);
(core::ptr::Unique::new_unchecked(ptr.as_mut()), alloc)
}
}
#[inline]
pub fn leak<'a>(b: Self) -> &'a mut T
where
T: 'a, {
unsafe { &mut *Self::into_raw(b) }
}
pub fn into_pin(boxed: Self) -> Pin<Self> {
unsafe { Pin::new_unchecked(boxed) }
}
}
fn drop_box<T: ?Sized, B: BuildAllocRef>(boxed: &mut Box<T, B>) {
unsafe {
let ptr = boxed.ptr;
ptr::drop_in_place(ptr.as_ptr());
if let (mut alloc, Some(layout)) = boxed.alloc_ref() {
alloc.dealloc(ptr.cast(), layout)
}
}
}
#[cfg(feature = "dropck_eyepatch")]
unsafe impl<#[may_dangle] T: ?Sized, B: BuildAllocRef> Drop for Box<T, B> {
fn drop(&mut self) {
drop_box(self);
}
}
impl<T, B: BuildAllocRef> Default for Box<T, B>
where
T: Default,
B::Ref: Default + AllocRef<Error = crate::Never>,
{
#[must_use]
fn default() -> Self {
Self::new_in(T::default(), <B as BuildAllocRef>::Ref::default())
}
}
#[cfg(feature = "coerce_unsized")]
#[allow(clippy::use_self)]
impl<T, B: BuildAllocRef> Default for Box<[T], B>
where
B::Ref: Default + AllocRef<Error = crate::Never>,
{
#[must_use]
fn default() -> Self {
Box::<[T; 0], B>::new_in([], <B as BuildAllocRef>::Ref::default())
}
}
#[inline]
unsafe fn from_boxed_utf8_unchecked<B: BuildAllocRef>(v: Box<[u8], B>) -> Box<str, B> {
let (ptr, b) = Box::into_raw_alloc(v);
Box::from_raw_in(ptr as *mut str, b)
}
#[cfg(feature = "coerce_unsized")]
#[allow(clippy::use_self)]
impl<B: BuildAllocRef> Default for Box<str, B>
where
B::Ref: Default + AllocRef<Error = crate::Never>,
{
#[must_use]
fn default() -> Self {
unsafe { from_boxed_utf8_unchecked(Box::default()) }
}
}
#[cfg(not(feature = "dropck_eyepatch"))]
impl<T: ?Sized, B: BuildAllocRef> Drop for Box<T, B> {
fn drop(&mut self) {
drop_box(self);
}
}
impl<T: Clone, B: BuildAllocRef + Clone> Clone for Box<T, B>
where
B::Ref: AllocRef<Error = crate::Never>,
{
#[inline]
fn clone(&self) -> Self {
let mut b = self.build_alloc().clone();
let old_ptr = self.ptr.cast();
let old_layout = NonZeroLayout::for_value(self.as_ref());
unsafe {
let a = b.build_alloc_ref(old_ptr, old_layout);
self.clone_in(a)
}
}
#[inline]
fn clone_from(&mut self, source: &Self) {
(**self).clone_from(&(**source));
}
}
#[allow(clippy::use_self)]
impl<T: Clone, A: AllocRef, B: BuildAllocRef> CloneIn<A> for Box<T, B>
where
B::Ref: AllocRef,
{
type Cloned = Box<T, A::BuildAlloc>;
fn clone_in(&self, a: A) -> Self::Cloned
where
A: AllocRef<Error = crate::Never>,
{
Box::new_in(self.as_ref().clone(), a)
}
fn try_clone_in(&self, a: A) -> Result<Self::Cloned, A::Error> {
Box::try_new_in(self.as_ref().clone(), a)
}
}
impl<T: ?Sized + PartialEq, B: BuildAllocRef> PartialEq for Box<T, B> {
#[inline]
fn eq(&self, other: &Self) -> bool {
PartialEq::eq(&**self, &**other)
}
#[allow(clippy::partialeq_ne_impl)]
#[inline]
fn ne(&self, other: &Self) -> bool {
PartialEq::ne(&**self, &**other)
}
}
impl<T: ?Sized + PartialOrd, B: BuildAllocRef> PartialOrd for Box<T, B> {
#[inline]
fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
PartialOrd::partial_cmp(&**self, &**other)
}
#[inline]
fn lt(&self, other: &Self) -> bool {
PartialOrd::lt(&**self, &**other)
}
#[inline]
fn le(&self, other: &Self) -> bool {
PartialOrd::le(&**self, &**other)
}
#[inline]
fn gt(&self, other: &Self) -> bool {
PartialOrd::gt(&**self, &**other)
}
#[inline]
fn ge(&self, other: &Self) -> bool {
PartialOrd::ge(&**self, &**other)
}
}
impl<T: ?Sized + Ord, B: BuildAllocRef> Ord for Box<T, B> {
#[inline]
fn cmp(&self, other: &Self) -> Ordering {
Ord::cmp(&**self, &**other)
}
}
impl<T: ?Sized + Eq, B: BuildAllocRef> Eq for Box<T, B> {}
impl<T: ?Sized + Hash, B: BuildAllocRef> Hash for Box<T, B> {
fn hash<H: Hasher>(&self, state: &mut H) {
(**self).hash(state);
}
}
impl<T: ?Sized + Hasher, B: BuildAllocRef> Hasher for Box<T, B> {
fn finish(&self) -> u64 {
(**self).finish()
}
fn write(&mut self, bytes: &[u8]) {
(**self).write(bytes)
}
fn write_u8(&mut self, i: u8) {
(**self).write_u8(i)
}
fn write_u16(&mut self, i: u16) {
(**self).write_u16(i)
}
fn write_u32(&mut self, i: u32) {
(**self).write_u32(i)
}
fn write_u64(&mut self, i: u64) {
(**self).write_u64(i)
}
fn write_u128(&mut self, i: u128) {
(**self).write_u128(i)
}
fn write_usize(&mut self, i: usize) {
(**self).write_usize(i)
}
fn write_i8(&mut self, i: i8) {
(**self).write_i8(i)
}
fn write_i16(&mut self, i: i16) {
(**self).write_i16(i)
}
fn write_i32(&mut self, i: i32) {
(**self).write_i32(i)
}
fn write_i64(&mut self, i: i64) {
(**self).write_i64(i)
}
fn write_i128(&mut self, i: i128) {
(**self).write_i128(i)
}
fn write_isize(&mut self, i: isize) {
(**self).write_isize(i)
}
}
impl<T, B: BuildAllocRef> From<T> for Box<T, B>
where
B::Ref: Default + AllocRef<Error = crate::Never>,
{
fn from(t: T) -> Self {
Self::new_in(t, <B as BuildAllocRef>::Ref::default())
}
}
impl<T: ?Sized, B: BuildAllocRef> From<Box<T, B>> for Pin<Box<T, B>> {
fn from(boxed: Box<T, B>) -> Self {
Box::into_pin(boxed)
}
}
#[allow(clippy::use_self)]
impl<T: Copy, B: BuildAllocRef> From<&[T]> for Box<[T], B>
where
B::Ref: Default + AllocRef<Error = crate::Never>,
{
fn from(slice: &[T]) -> Self {
let len = slice.len();
let buf = RawVec::with_capacity_in(len, <B as BuildAllocRef>::Ref::default());
unsafe {
ptr::copy_nonoverlapping(slice.as_ptr(), buf.ptr(), len);
buf.into_box().assume_init()
}
}
}
#[allow(clippy::use_self)]
impl<B: BuildAllocRef> From<&str> for Box<str, B>
where
B::Ref: Default + AllocRef<Error = crate::Never>,
{
#[inline]
#[must_use]
fn from(s: &str) -> Self {
unsafe { from_boxed_utf8_unchecked(Box::from(s.as_bytes())) }
}
}
#[allow(clippy::use_self)]
impl<B: BuildAllocRef> From<Box<str, B>> for Box<[u8], B> {
#[inline]
fn from(s: Box<str, B>) -> Self {
let (ptr, b) = Box::into_raw_alloc(s);
unsafe { Self::from_raw_in(ptr as *mut [u8], b) }
}
}
#[allow(clippy::use_self)]
impl<B: BuildAllocRef> Box<dyn Any, B> {
#[inline]
pub fn downcast<T: Any>(self) -> Result<Box<T, B>, Box<dyn Any, B>> {
if self.is::<T>() {
unsafe {
let (raw, b): (*mut dyn Any, _) = Self::into_raw_alloc(self);
Ok(Box::from_raw_in(raw as *mut T, b))
}
} else {
Err(self)
}
}
}
#[allow(clippy::use_self)]
impl<B: BuildAllocRef> Box<dyn Any + Send, B> {
#[inline]
pub fn downcast<T: Any>(self) -> Result<Box<T, B>, Box<dyn Any + Send, B>> {
if self.is::<T>() {
unsafe {
let (raw, b): (*mut (dyn Any + Send), _) = Self::into_raw_alloc(self);
Ok(Box::from_raw_in(raw as *mut T, b))
}
} else {
Err(self)
}
}
}
impl<T: fmt::Display + ?Sized, B: BuildAllocRef> fmt::Display for Box<T, B> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
fmt::Display::fmt(&**self, f)
}
}
impl<T: fmt::Debug + ?Sized, B: BuildAllocRef> fmt::Debug for Box<T, B> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
fmt::Debug::fmt(&**self, f)
}
}
impl<T: ?Sized, B: BuildAllocRef> fmt::Pointer for Box<T, B> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
let ptr: *const T = &**self;
fmt::Pointer::fmt(&ptr, f)
}
}
impl<T: ?Sized, B: BuildAllocRef> Deref for Box<T, B> {
type Target = T;
fn deref(&self) -> &T {
unsafe { self.ptr.as_ref() }
}
}
impl<T: ?Sized, B: BuildAllocRef> DerefMut for Box<T, B> {
fn deref_mut(&mut self) -> &mut T {
unsafe { self.ptr.as_mut() }
}
}
#[cfg(feature = "receiver_trait")]
impl<T: ?Sized, B: BuildAllocRef> core::ops::Receiver for Box<T, B> {}
impl<I: Iterator + ?Sized, B: BuildAllocRef> Iterator for Box<I, B> {
type Item = I::Item;
fn next(&mut self) -> Option<I::Item> {
(**self).next()
}
fn size_hint(&self) -> (usize, Option<usize>) {
(**self).size_hint()
}
fn last(self) -> Option<I::Item> {
BoxIter::last(self)
}
fn nth(&mut self, n: usize) -> Option<I::Item> {
(**self).nth(n)
}
}
trait BoxIter {
type Item;
fn last(self) -> Option<Self::Item>;
}
impl<I: Iterator + ?Sized, B: BuildAllocRef> BoxIter for Box<I, B> {
type Item = I::Item;
fn last(self) -> Option<I::Item> {
#[inline]
fn some<T>(_: Option<T>, x: T) -> Option<T> {
Some(x)
}
self.fold(None, some)
}
}
impl<I: DoubleEndedIterator + ?Sized, B: BuildAllocRef> DoubleEndedIterator for Box<I, B> {
fn next_back(&mut self) -> Option<I::Item> {
(**self).next_back()
}
fn nth_back(&mut self, n: usize) -> Option<I::Item> {
(**self).nth_back(n)
}
}
impl<I: ExactSizeIterator + ?Sized, B: BuildAllocRef> ExactSizeIterator for Box<I, B> {
fn len(&self) -> usize {
(**self).len()
}
#[cfg(feature = "exact_size_is_empty")]
fn is_empty(&self) -> bool {
(**self).is_empty()
}
}
impl<I: FusedIterator + ?Sized, B: BuildAllocRef> FusedIterator for Box<I, B> {}
#[cfg(feature = "fn_traits")]
impl<A, F: FnOnce<A> + Copy + ?Sized, B: BuildAllocRef> FnOnce<A> for Box<F, B> {
type Output = <F as FnOnce<A>>::Output;
extern "rust-call" fn call_once(self, args: A) -> Self::Output {
<F as FnOnce<A>>::call_once(*self, args)
}
}
#[cfg(feature = "fn_traits")]
impl<A, F: FnMut<A> + Copy + ?Sized, B: BuildAllocRef> FnMut<A> for Box<F, B> {
extern "rust-call" fn call_mut(&mut self, args: A) -> Self::Output {
<F as FnMut<A>>::call_mut(self, args)
}
}
#[cfg(feature = "fn_traits")]
impl<A, F: Fn<A> + Copy + ?Sized, B: BuildAllocRef> Fn<A> for Box<F, B> {
extern "rust-call" fn call(&self, args: A) -> Self::Output {
<F as Fn<A>>::call(self, args)
}
}
#[cfg(feature = "coerce_unsized")]
impl<T: ?Sized + core::marker::Unsize<U>, U: ?Sized, B: BuildAllocRef>
core::ops::CoerceUnsized<Box<U, B>> for Box<T, B>
{
}
macro_rules! impl_dispatch_from_dyn {
($alloc:ty) => {
#[cfg(feature = "dispatch_from_dyn")]
impl<T: ?Sized + core::marker::Unsize<U>, U: ?Sized>
core::ops::DispatchFromDyn<Box<U, $alloc>> for Box<T, $alloc>
{
}
};
}
impl_dispatch_from_dyn!(Global);
impl_dispatch_from_dyn!(AbortAlloc<Global>);
#[cfg(feature = "std")]
impl_dispatch_from_dyn!(std::alloc::System);
#[cfg(feature = "std")]
impl_dispatch_from_dyn!(AbortAlloc<std::alloc::System>);
#[allow(clippy::items_after_statements)]
impl<T: Clone, B: BuildAllocRef + Clone> Clone for Box<[T], B>
where
B::Ref: AllocRef<Error = crate::Never>,
{
fn clone(&self) -> Self {
let mut b = self.build_alloc().clone();
let old_ptr = self.ptr.cast();
let old_layout = NonZeroLayout::for_value(self.as_ref());
let a = unsafe { b.build_alloc_ref(old_ptr, old_layout) };
let mut new = BoxBuilder {
data: RawVec::with_capacity_in(self.len(), a),
len: 0,
};
let mut target = new.data.ptr();
for item in self.iter() {
unsafe {
ptr::write(target, item.clone());
target = target.offset(1);
};
new.len += 1;
}
return unsafe { new.into_box() };
struct BoxBuilder<T, B: BuildAllocRef> {
data: RawVec<T, B>,
len: usize,
}
impl<T, B: BuildAllocRef> BoxBuilder<T, B> {
unsafe fn into_box(self) -> Box<[T], B> {
let raw = ptr::read(&self.data);
mem::forget(self);
raw.into_box().assume_init()
}
}
impl<T, B: BuildAllocRef> Drop for BoxBuilder<T, B> {
fn drop(&mut self) {
let mut data = self.data.ptr();
let max = unsafe { data.add(self.len) };
while data != max {
unsafe {
ptr::read(data);
data = data.offset(1);
}
}
}
}
}
}
impl<T: ?Sized, B: BuildAllocRef> borrow::Borrow<T> for Box<T, B> {
fn borrow(&self) -> &T {
&**self
}
}
impl<T: ?Sized, B: BuildAllocRef> borrow::BorrowMut<T> for Box<T, B> {
fn borrow_mut(&mut self) -> &mut T {
&mut **self
}
}
impl<T: ?Sized, B: BuildAllocRef> AsRef<T> for Box<T, B> {
fn as_ref(&self) -> &T {
&**self
}
}
impl<T: ?Sized, B: BuildAllocRef> AsMut<T> for Box<T, B> {
fn as_mut(&mut self) -> &mut T {
&mut **self
}
}
impl<T: ?Sized, B: BuildAllocRef> Unpin for Box<T, B> {}
impl<F: ?Sized + Future + Unpin, B: BuildAllocRef> Future for Box<F, B> {
type Output = F::Output;
fn poll(mut self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Self::Output> {
F::poll(Pin::new(&mut *self), cx)
}
}