1use crate::{Allocator, GenericAlloc, NullAlloc, Pool, PoolAlloc, Result};
2use core::alloc::Layout;
3use core::any::Any;
4use core::cmp::{Eq, Ord, Ordering, PartialEq, PartialOrd};
5use core::convert::{AsMut, AsRef, From};
6use core::fmt::{self, Debug, Display, Formatter};
7use core::hash::{Hash, Hasher};
8use core::marker::PhantomData;
9use core::mem::{needs_drop, ManuallyDrop, MaybeUninit};
10use core::ops::{Deref, DerefMut};
11use core::ptr::{self, NonNull};
12use core::slice;
13
14#[repr(C)]
15pub struct Boxed<'a, T: ?Sized + 'a, A: Allocator = PoolAlloc> {
16 ptr: NonNull<T>,
17 layout: Layout,
18 alloc: &'a A,
19 mark: PhantomData<T>,
20}
21
22unsafe impl<T: ?Sized + Send, A: Allocator + Pool> Send for Boxed<'static, T, A> {}
23unsafe impl<T: ?Sized + Sync, A: Allocator> Sync for Boxed<'static, T, A> {}
24
25impl<T> Boxed<'static, T, PoolAlloc> {
26 pub fn new(val: T) -> Result<Self> {
27 Boxed::new_in(&PoolAlloc, val)
28 }
29}
30
31impl Boxed<'static, (), PoolAlloc> {
33 pub fn zeroed<T>() -> Result<Boxed<'static, MaybeUninit<T>, PoolAlloc>> {
34 Boxed::zeroed_in::<T>(&PoolAlloc)
35 }
36 pub fn uninit<T>() -> Result<Boxed<'static, MaybeUninit<T>, PoolAlloc>> {
37 Boxed::uninit_in::<T>(&PoolAlloc)
38 }
39 pub fn zeroed_slice<T>(len: usize) -> Result<Boxed<'static, [MaybeUninit<T>], PoolAlloc>> {
40 Boxed::zeroed_slice_in(&PoolAlloc, len)
41 }
42 pub fn uninit_slice<T>(len: usize) -> Result<Boxed<'static, [MaybeUninit<T>], PoolAlloc>> {
43 Boxed::uninit_slice_in(&PoolAlloc, len)
44 }
45 pub fn new_then<T, F>(f: F) -> Result<Boxed<'static, T, PoolAlloc>>
46 where
47 F: FnOnce() -> Result<T>,
48 {
49 Boxed::new_then_in(&PoolAlloc, f)
50 }
51 pub fn new_slice_then<T, F>(len: usize, f: F) -> Result<Boxed<'static, [T], PoolAlloc>>
52 where
53 F: FnMut(usize) -> Result<T>,
54 {
55 Boxed::new_slice_then_in(&PoolAlloc, len, f)
56 }
57}
58
59impl Boxed<'static, [u8], PoolAlloc> {
60 pub fn new_buf(layout: Layout) -> Result<Self> {
61 Self::new_buf_then(layout, |_| Ok(()))
62 }
63
64 pub fn new_buf_then<F>(layout: Layout, f: F) -> Result<Self>
65 where
66 F: FnOnce(NonNull<[u8]>) -> Result<()>,
67 {
68 Self::new_buf_then_in(&PoolAlloc, layout, f)
69 }
70}
71
72impl<T> Boxed<'static, [T], PoolAlloc> {
73 pub fn new_slice(len: usize, val: T) -> Result<Self>
74 where
75 T: Clone,
76 {
77 Self::new_slice_in(&PoolAlloc, len, val)
78 }
79}
80
81impl<'a, T, A: Allocator> Boxed<'a, T, A> {
82 pub fn new_in(alloc: &'a A, val: T) -> Result<Self> {
83 let ptr = unsafe { alloc.init(val)? };
84 Ok(unsafe { Self::from_with(ptr, Layout::new::<T>(), alloc) })
85 }
86}
87
88impl<'a, A: Allocator> Boxed<'a, (), A> {
89 pub fn zeroed_in<T>(alloc: &'a A) -> Result<Boxed<'a, MaybeUninit<T>, A>> {
90 let ptr = unsafe { alloc.zeroed::<T>()? };
91 Ok(unsafe { Boxed::from_with(ptr, Layout::new::<T>(), alloc) })
92 }
93 pub fn uninit_in<T>(alloc: &'a A) -> Result<Boxed<'a, MaybeUninit<T>, A>> {
94 let ptr = unsafe { alloc.uninit::<T>()? };
95 Ok(unsafe { Boxed::from_with(ptr, Layout::new::<T>(), alloc) })
96 }
97 pub fn zeroed_slice_in<T>(alloc: &'a A, len: usize) -> Result<Boxed<'a, [MaybeUninit<T>], A>> {
98 let ptr = unsafe { alloc.zeroed_slice::<T>(len)? };
99 Ok(unsafe { Boxed::from_with(ptr, Layout::array::<T>(len).unwrap(), alloc) })
100 }
101 pub fn uninit_slice_in<T>(alloc: &'a A, len: usize) -> Result<Boxed<'a, [MaybeUninit<T>], A>> {
102 let ptr = unsafe { alloc.uninit_slice::<T>(len)? };
103 Ok(unsafe { Boxed::from_with(ptr, Layout::array::<T>(len).unwrap(), alloc) })
104 }
105 pub fn new_then_in<T, F>(alloc: &'a A, f: F) -> Result<Boxed<'a, T, A>>
106 where
107 F: FnOnce() -> Result<T>,
108 {
109 let ptr = unsafe { alloc.alloc_then(f)? };
110 Ok(unsafe { Boxed::from_with(ptr, Layout::new::<T>(), alloc) })
111 }
112
113 pub fn new_slice_then_in<T, F>(alloc: &'a A, len: usize, f: F) -> Result<Boxed<'a, [T], A>>
114 where
115 F: FnMut(usize) -> Result<T>,
116 {
117 let ptr = unsafe { alloc.alloc_slice_then(len, f)? };
118 Ok(unsafe { Boxed::from_with(ptr, Layout::array::<T>(len).unwrap(), alloc) })
119 }
120}
121
122impl<'a, A: Allocator> Boxed<'a, [u8], A> {
123 pub fn new_buf_in(alloc: &'a A, layout: Layout) -> Result<Self> {
124 Self::new_buf_then_in(alloc, layout, |_| Ok(()))
125 }
126
127 pub fn new_buf_then_in<F>(alloc: &'a A, layout: Layout, f: F) -> Result<Self>
128 where
129 F: FnOnce(NonNull<[u8]>) -> Result<()>,
130 {
131 let ptr = unsafe { alloc.alloc_buf(layout, f)? };
132 Ok(unsafe { Self::from_with(ptr, layout, alloc) })
133 }
134}
135
136impl<'a, T, A: Allocator> Boxed<'a, [T], A> {
137 pub fn new_slice_in(alloc: &'a A, len: usize, val: T) -> Result<Self>
138 where
139 T: Clone,
140 {
141 let ptr = unsafe { alloc.init_slice(len, val)? };
142 Ok(unsafe { Self::from_with(ptr, Layout::array::<T>(len).unwrap(), alloc) })
143 }
144}
145
146impl<'a, T> Boxed<'a, T, NullAlloc> {
147 pub unsafe fn from_raw(data: *mut T) -> Self {
152 Self {
153 ptr: NonNull::new(data).unwrap(),
154 layout: Layout::new::<T>(),
155 alloc: &NullAlloc,
156 mark: PhantomData,
157 }
158 }
159}
160
161impl<T> Boxed<'_, [T], NullAlloc> {
162 pub unsafe fn from_raw_slice(data: *mut T, len: usize) -> Self {
165 let slice = unsafe { slice::from_raw_parts(data, len) };
166 Self {
167 ptr: NonNull::from(slice),
168 layout: Layout::array::<T>(len).unwrap(),
169 alloc: &NullAlloc,
170 mark: PhantomData,
171 }
172 }
173}
174
175impl<'a, T, A: Allocator> Boxed<'a, MaybeUninit<T>, A> {
176 pub fn write(mut self, val: T) -> Boxed<'a, T, A> {
177 let _ = (*self).write(val);
178 unsafe { Self::cast_unchecked::<T>(self) }
179 }
180}
181
182impl<'a, T, A: Allocator> Boxed<'a, [MaybeUninit<T>], A> {
183 pub fn write_slice_then<F>(mut self, mut f: F) -> Result<Boxed<'a, [T], A>>
184 where
185 F: FnMut(usize) -> Result<T>,
186 {
187 for (n, uninit) in self.iter_mut().enumerate() {
188 match f(n) {
189 Err(e) => {
190 for uninit in &mut self[0..n] {
191 unsafe {
192 uninit.assume_init_drop();
193 }
194 }
195 return Err(e);
196 },
197 Ok(val) => { uninit.write(val); },
198 }
199 }
200 let len = self.len();
201 Ok(unsafe { Self::cast_slice_unchecked::<T>(self, len) })
202 }
203}
204
205impl<'a, T: ?Sized + 'a, A: Allocator> Boxed<'a, T, A> {
206 pub unsafe fn from_with(ptr: NonNull<T>, layout: Layout, alloc: &'a A) -> Self {
209 Self {
210 ptr,
211 layout,
212 alloc,
213 mark: PhantomData,
214 }
215 }
216
217 pub unsafe fn from_boxed(data: Boxed<'a, T, NullAlloc>, alloc: &'a A) -> Self {
220 let data = ManuallyDrop::new(data);
221 Self {
222 ptr: data.ptr,
223 layout: data.layout,
224 alloc,
225 mark: PhantomData,
226 }
227 }
228
229 pub const fn as_ptr(&self) -> *mut T {
230 self.ptr.as_ptr()
231 }
232
233 pub unsafe fn as_other<U>(&self) -> &U {
236 assert!(Layout::new::<U>().size() <= self.layout.size());
237 unsafe { self.ptr.cast::<U>().as_ref() }
238 }
239
240 pub unsafe fn as_other_mut<U>(&mut self) -> &mut U {
243 assert!(Layout::new::<U>().size() <= self.layout.size());
244 unsafe { self.ptr.cast::<U>().as_mut() }
245 }
246
247 pub unsafe fn as_slice<U>(&self, len: usize) -> &[U] {
250 assert!(Layout::array::<U>(len).unwrap().size() <= self.layout.size());
251 unsafe { slice::from_raw_parts(self.as_other::<U>(), len) }
252 }
253
254 pub unsafe fn as_slice_mut<U>(&mut self, len: usize) -> &mut [U] {
257 assert!(Layout::array::<U>(len).unwrap().size() <= self.layout.size());
258 unsafe { slice::from_raw_parts_mut(self.as_other_mut::<U>(), len) }
259 }
260
261 pub unsafe fn cast<U>(self) -> core::result::Result<Boxed<'a, U, A>, Self> {
264 let layout = Layout::new::<U>();
265 if layout.size() <= self.layout.size() {
266 Ok(self.cast_unchecked::<U>())
267 } else {
268 Err(self)
269 }
270 }
271
272 pub unsafe fn cast_unchecked<U>(self) -> Boxed<'a, U, A> {
275 assert!(Layout::new::<U>().size() <= self.layout.size());
276 let this = ManuallyDrop::new(self);
277 let ptr = NonNull::from(unsafe { this.as_other::<U>() });
278 Boxed::from_with(ptr, this.layout, this.alloc)
279 }
280
281 pub unsafe fn cast_slice<U>(self, len: usize) -> core::result::Result<Boxed<'a, [U], A>, Self> {
284 let Ok(layout) = Layout::array::<U>(len) else {
285 return Err(self);
286 };
287 if layout.size() <= self.layout.size() {
288 Ok(self.cast_slice_unchecked::<U>(len))
289 } else {
290 Err(self)
291 }
292 }
293
294 pub unsafe fn cast_slice_unchecked<U>(self, len: usize) -> Boxed<'a, [U], A> {
297 assert!(Layout::array::<U>(len).unwrap().size() <= self.layout.size());
298 let this = ManuallyDrop::new(self);
299 let ptr = NonNull::from(this.as_slice::<U>(len));
300 Boxed::from_with(ptr, this.layout, this.alloc)
301 }
302
303 pub fn leak(self) -> (&'a mut T, Layout, &'a A) {
304 let mut this = ManuallyDrop::new(self);
305 (unsafe { this.ptr.as_mut() }, this.layout, this.alloc)
306 }
307
308 pub fn leak_boxed(self) -> Boxed<'a, T, NullAlloc> {
310 let this = ManuallyDrop::new(self);
311 unsafe { Boxed::from_with(NonNull::from(this.as_ref()), this.layout, &NullAlloc) }
312 }
313}
314
315impl<T: ?Sized, A: Allocator> Drop for Boxed<'_, T, A> {
316 #[inline]
317 fn drop(&mut self) {
318 if needs_drop::<T>() {
319 unsafe { ptr::drop_in_place(self.ptr.as_ptr()) };
320 }
321 unsafe { self.alloc.release_with(self.ptr, self.layout) };
322 }
323}
324
325impl<T: ?Sized, A: Allocator> AsRef<T> for Boxed<'_, T, A> {
326 #[inline(always)]
327 fn as_ref(&self) -> &T {
328 unsafe { self.ptr.as_ref() }
331 }
332}
333
334impl<T: ?Sized, A: Allocator> AsMut<T> for Boxed<'_, T, A> {
335 #[inline(always)]
336 fn as_mut(&mut self) -> &mut T {
337 unsafe { self.ptr.as_mut() }
340 }
341}
342
343impl<T: ?Sized + Unpin, A: Allocator> Unpin for Boxed<'_, T, A> {}
344
345impl<T: ?Sized, A: Allocator> Deref for Boxed<'_, T, A> {
346 type Target = T;
347 #[inline]
348 fn deref(&self) -> &Self::Target {
349 unsafe { self.ptr.as_ref() }
351 }
352}
353
354impl<T: ?Sized, A: Allocator> DerefMut for Boxed<'_, T, A> {
355 #[inline]
356 fn deref_mut(&mut self) -> &mut Self::Target {
357 unsafe { self.ptr.as_mut() }
359 }
360}
361
362impl<T: ?Sized + Display, A: Allocator> Display for Boxed<'_, T, A> {
363 #[inline]
364 fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
365 Display::fmt(&**self, f)
366 }
367}
368
369impl<T: ?Sized + Debug, A: Allocator> Debug for Boxed<'_, T, A> {
370 #[inline]
371 fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
372 Debug::fmt(&**self, f)
373 }
374}
375
376impl<T: ?Sized, A: Allocator> fmt::Pointer for Boxed<'_, T, A> {
377 #[inline]
378 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
379 let ptr: *const T = &**self;
380 fmt::Pointer::fmt(&ptr, f)
381 }
382}
383
384impl<T: ?Sized + PartialEq, A: Allocator> PartialEq for Boxed<'_, T, A> {
385 #[inline]
386 fn eq(&self, other: &Self) -> bool {
387 PartialEq::eq(&**self, &**other)
388 }
389}
390
391impl<T: ?Sized + PartialOrd, A: Allocator> PartialOrd for Boxed<'_, T, A> {
392 #[inline]
393 fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
394 PartialOrd::partial_cmp(&**self, &**other)
395 }
396 #[inline]
397 fn lt(&self, other: &Self) -> bool {
398 PartialOrd::lt(&**self, &**other)
399 }
400 #[inline]
401 fn le(&self, other: &Self) -> bool {
402 PartialOrd::le(&**self, &**other)
403 }
404 #[inline]
405 fn gt(&self, other: &Self) -> bool {
406 PartialOrd::gt(&**self, &**other)
407 }
408 #[inline]
409 fn ge(&self, other: &Self) -> bool {
410 PartialOrd::ge(&**self, &**other)
411 }
412}
413
414impl<T: ?Sized + Ord, A: Allocator> Ord for Boxed<'_, T, A> {
415 #[inline]
416 fn cmp(&self, other: &Self) -> Ordering {
417 Ord::cmp(&**self, &**other)
418 }
419}
420
421impl<T: ?Sized + Eq, A: Allocator> Eq for Boxed<'_, T, A> {}
422
423impl<T: ?Sized + Hash, A: Allocator> Hash for Boxed<'_, T, A> {
424 #[inline]
425 fn hash<H: Hasher>(&self, state: &mut H) {
426 Hash::hash(&**self, state)
427 }
428}
429
430impl<T: ?Sized + Hasher, A: Allocator> Hasher for Boxed<'_, T, A> {
431 #[inline]
432 fn finish(&self) -> u64 {
433 Hasher::finish(&**self)
434 }
435 #[inline]
436 fn write(&mut self, bytes: &[u8]) {
437 Hasher::write(&mut **self, bytes)
438 }
439 #[inline]
440 fn write_u8(&mut self, data: u8) {
441 Hasher::write_u8(&mut **self, data)
442 }
443 #[inline]
444 fn write_u16(&mut self, data: u16) {
445 Hasher::write_u16(&mut **self, data)
446 }
447 #[inline]
448 fn write_u32(&mut self, data: u32) {
449 Hasher::write_u32(&mut **self, data)
450 }
451 #[inline]
452 fn write_u64(&mut self, data: u64) {
453 Hasher::write_u64(&mut **self, data)
454 }
455 #[inline]
456 fn write_u128(&mut self, data: u128) {
457 Hasher::write_u128(&mut **self, data)
458 }
459 #[inline]
460 fn write_usize(&mut self, data: usize) {
461 Hasher::write_usize(&mut **self, data)
462 }
463 #[inline]
464 fn write_isize(&mut self, data: isize) {
465 Hasher::write_isize(&mut **self, data)
466 }
467 #[inline]
468 fn write_i8(&mut self, data: i8) {
469 Hasher::write_i8(&mut **self, data)
470 }
471 #[inline]
472 fn write_i16(&mut self, data: i16) {
473 Hasher::write_i16(&mut **self, data)
474 }
475 #[inline]
476 fn write_i32(&mut self, data: i32) {
477 Hasher::write_i32(&mut **self, data)
478 }
479 #[inline]
480 fn write_i64(&mut self, data: i64) {
481 Hasher::write_i64(&mut **self, data)
482 }
483 #[inline]
484 fn write_i128(&mut self, data: i128) {
485 Hasher::write_i128(&mut **self, data)
486 }
487 #[cfg(feature = "nightly")]
488 #[inline]
489 fn write_str(&mut self, s: &str) {
490 Hasher::write_str(&mut **self, s)
491 }
492 #[cfg(feature = "nightly")]
493 #[inline]
494 fn write_length_prefix(&mut self, len: usize) {
495 Hasher::write_length_prefix(&mut **self, len)
496 }
497}
498
499unsafe impl<'a, T: ?Sized + Allocator, A: Allocator> Allocator for Boxed<'a, T, A> {
500 unsafe fn alloc_buf<F>(&self, layout: Layout, f: F) -> Result<NonNull<[u8]>>
501 where
502 F: FnOnce(NonNull<[u8]>) -> Result<()>,
503 {
504 Allocator::alloc_buf(&**self, layout, f)
505 }
506
507 unsafe fn free_buf(&self, ptr: NonNull<[u8]>, layout: Layout) {
510 Allocator::free_buf(&**self, ptr, layout)
511 }
512}
513
514unsafe impl<T: ?Sized + Allocator + Pool, A: Allocator> Pool for Boxed<'_, T, A> {}
515
516impl<'a, T: Any, A: Allocator> Boxed<'a, T, A> {
517 pub fn to_any(self) -> Boxed<'a, dyn Any, A> {
518 let this = ManuallyDrop::new(self);
519 let any: &dyn Any = this.as_ref();
520 Boxed {
521 ptr: any.into(),
522 layout: this.layout,
523 alloc: this.alloc,
524 mark: PhantomData,
525 }
526 }
527}
528
529impl<'a, A: Allocator> Boxed<'a, dyn Any + 'a, A> {
530 pub fn downcast<T: Any>(self) -> core::result::Result<Boxed<'a, T, A>, Self> {
531 if <dyn Any>::is::<T>(self.as_ref()) {
532 let this = ManuallyDrop::new(self);
533 Ok(Boxed {
534 ptr: this.ptr.cast::<T>(),
535 layout: this.layout,
536 alloc: this.alloc,
537 mark: PhantomData,
538 })
539 } else {
540 Err(self)
541 }
542 }
543}
544
545impl<'a, T: ?Sized, A: Allocator> Boxed<'a, T, A> {
546 pub fn upcast<U: ?Sized>(self, f: impl FnOnce(&T) -> &U) -> core::result::Result<Boxed<'a, U, A>, Self> {
548 let ptr: NonNull<U> = f(self.as_ref()).into();
549 if !ptr::eq(ptr.cast::<u8>().as_ptr(), self.ptr.cast::<u8>().as_ptr()) {
550 return Err(self);
551 }
552 let this = ManuallyDrop::new(self);
553 Ok(Boxed {
554 ptr,
555 layout: this.layout,
556 alloc: this.alloc,
557 mark: PhantomData,
558 })
559 }
560}
561
562impl<'a, T: ?Sized, A: Allocator> Boxed<'a, T, A> {
563 pub fn layout(&self) -> Layout {
564 self.layout
565 }
566 pub(crate) fn allocator(&self) -> &'a A {
567 self.alloc
568 }
569}
570
571#[cfg(test)]
572mod test {
573 extern crate std;
574 use std::format;
575 use std::string::ToString;
576
577 use super::Boxed;
578 use std::collections::HashSet;
579
580 #[test]
581 fn test_drop() {
582 struct Foo;
583 static mut DROP: usize = 0;
584 impl Drop for Foo {
585 fn drop(&mut self) {
586 unsafe {
587 DROP += 1;
588 }
589 }
590 }
591 unsafe {
592 DROP = 0;
593 }
594
595 let mut foo = Foo;
596 {
597 let _ = unsafe { Boxed::from_raw(&mut foo) };
598 }
599
600 unsafe {
601 assert_eq!(1, DROP);
602 }
603 }
604
605 #[test]
606 fn test_hash() {
607 let mut val = 100;
608 let mut other = 100;
609 let pbox2 = unsafe { Boxed::from_raw(&mut other) };
610 let pbox1 = unsafe { Boxed::from_raw(&mut val) };
611 let mut set = HashSet::new();
612 let ret = set.insert(pbox1);
613 assert!(ret);
614 let ret = set.insert(pbox2);
615 assert!(!ret);
616 }
617 #[test]
618 fn test_deref() {
619 let mut val = 1;
620 {
621 let mut pbox = unsafe { Boxed::from_raw(&mut val) };
622 assert_eq!(1, *pbox);
623 *pbox = 100;
624 let s = format!("val = {}", pbox);
625 assert_eq!(s, "val = 100".to_string());
626 let s = format!("val = {:?}", pbox);
627 assert_eq!(s, "val = 100".to_string());
628 }
629 assert_eq!(val, 100);
630 }
631
632 #[test]
633 fn test_any() {
634 static mut D: i32 = 0;
635 #[derive(Debug)]
636 struct Foo(i32);
637 trait Bar {}
638 impl Bar for Foo {}
639 impl Drop for Foo {
640 fn drop(&mut self) {
641 unsafe {
642 D = 1;
643 }
644 }
645 }
646 {
647 let v32 = Boxed::new(Foo(100)).unwrap();
648 let any = v32.to_any();
649 let v32 = any.downcast::<Foo>().unwrap();
650 assert_eq!(v32.0, 100);
651
652 let x = v32.upcast::<dyn Bar>(|val| val).unwrap();
653 let v32 = unsafe { x.cast_unchecked::<Foo>() };
654 assert_eq!(v32.0, 100);
655
656 }
657 unsafe {
658 assert_eq!(D, 1);
659 }
660 }
661}