1use crate::aligned_alloc;
2
3use alloc::alloc::{alloc, dealloc, Layout};
4use core::fmt::{self, Debug};
5use core::marker::PhantomData;
6use core::mem;
7use core::ops::{Deref, DerefMut};
8use core::ptr::{drop_in_place, NonNull};
9
10pub struct AlignedBox<T: ?Sized> {
12 inner: NonNull<T>,
13 align: usize,
14 _marker: PhantomData<T>,
15}
16
17unsafe impl<T: ?Sized + Send> Send for AlignedBox<T> {}
18unsafe impl<T: ?Sized + Sync> Sync for AlignedBox<T> {}
19
20#[cfg(feature = "std")]
21mod std_impl {
22 use super::AlignedBox;
23
24 use std::panic::{RefUnwindSafe, UnwindSafe};
25
26 impl<T: ?Sized + UnwindSafe> UnwindSafe for AlignedBox<T> {}
27 impl<T: ?Sized + RefUnwindSafe> RefUnwindSafe for AlignedBox<T> {}
28}
29
30impl<T> AlignedBox<T> {
31 pub fn new(x: T, align: usize) -> Self {
35 let size = mem::size_of::<T>();
36 let align = mem::align_of::<T>().max(align);
37
38 let inner = if size == 0 {
39 mem::forget(x);
40 NonNull::dangling()
41 } else {
42 unsafe {
43 let ptr = aligned_alloc(alloc, size, align).cast::<T>();
44 ptr.write(x);
45 NonNull::new_unchecked(ptr)
46 }
47 };
48
49 debug_assert!(inner.as_ptr() as usize % align == 0);
50
51 Self {
52 align,
53 inner,
54 _marker: PhantomData,
55 }
56 }
57}
58
59impl<T: ?Sized> AlignedBox<T> {
60 pub fn into_raw(this: Self) -> (*mut T, usize) {
62 let ans = (this.inner.as_ptr(), this.align);
63 mem::forget(this);
64 ans
65 }
66
67 pub unsafe fn from_raw(ptr: *mut T, align: usize) -> Self {
72 Self {
73 inner: NonNull::new_unchecked(ptr),
74 align,
75 _marker: PhantomData,
76 }
77 }
78
79 pub fn alignment(this: &Self) -> usize {
81 this.align
82 }
83}
84
85impl<T: ?Sized> Drop for AlignedBox<T> {
86 fn drop(&mut self) {
87 unsafe {
88 let size = mem::size_of_val(self.inner.as_ref());
89
90 let ptr = self.inner.as_ptr();
91 drop_in_place(ptr);
92
93 if size != 0 {
94 let layout = Layout::from_size_align_unchecked(size, self.align);
95 dealloc(ptr.cast(), layout)
96 }
97 }
98 }
99}
100
101impl<T: ?Sized> Deref for AlignedBox<T> {
102 type Target = T;
103 fn deref(&self) -> &Self::Target {
104 unsafe { &*self.inner.as_ptr() }
105 }
106}
107
108impl<T: ?Sized> DerefMut for AlignedBox<T> {
109 fn deref_mut(&mut self) -> &mut Self::Target {
110 unsafe { &mut *self.inner.as_ptr() }
111 }
112}
113
114impl<T: ?Sized + Debug> Debug for AlignedBox<T> {
115 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
116 T::fmt(self, f)
117 }
118}
119
120#[cfg(feature = "unstable")]
121mod unstable_impl {
122 use super::AlignedBox;
123
124 use core::marker::Unsize;
125 use core::ops::CoerceUnsized;
126
127 impl<T: ?Sized + Unsize<U>, U: ?Sized> CoerceUnsized<AlignedBox<U>> for AlignedBox<T> {}
128}
129
130#[cfg(test)]
131mod tests {
132 use super::AlignedBox;
133
134 #[test]
135 fn check_zst() {
136 let b = AlignedBox::new((), 1);
137 assert_eq!(&*b, &());
138 drop(b);
139 }
140
141 #[cfg(feature = "unstable")]
142 #[test]
143 fn check_coerce() {
144 let b: AlignedBox<[u8]> = AlignedBox::new([1, 2, 3, 4], 8);
145 assert_eq!(&*b, &[1, 2, 3, 4]);
146 }
147
148 #[test]
149 fn check_zst_drop() {
150 use std::sync::atomic::{AtomicUsize, Ordering};
151
152 static DROP_COUNT: AtomicUsize = AtomicUsize::new(0);
153
154 #[derive(Debug)]
155 struct Token;
156
157 impl Drop for Token {
158 fn drop(&mut self) {
159 DROP_COUNT.fetch_add(1, Ordering::SeqCst);
160 }
161 }
162
163 let b: AlignedBox<Token> = AlignedBox::new(Token, 1);
164 assert_eq!(DROP_COUNT.load(Ordering::SeqCst), 0);
165
166 drop(b);
167 assert_eq!(DROP_COUNT.load(Ordering::SeqCst), 1);
168 }
169}