1use core::cmp::Ordering;
2use core::fmt;
3use core::hash::{Hash, Hasher};
4use core::mem::needs_drop;
5use core::ops::{Deref, DerefMut};
6
7use super::{Alloc, AllocError, Allocator, GlobalAllocator};
8
9pub struct Box<T, A>
19where
20 A: Allocator,
21{
22 buf: A::Alloc<T>,
23}
24
25impl<T, A> Box<T, A>
26where
27 A: Allocator,
28{
29 #[inline]
59 pub fn new_in(value: T, alloc: A) -> Result<Self, AllocError> {
60 Ok(Self {
61 buf: alloc.alloc(value)?,
62 })
63 }
64}
65
66impl<T, A> Clone for Box<T, A>
67where
68 T: Clone,
69 A: GlobalAllocator,
70{
71 #[inline]
72 fn clone(&self) -> Self {
73 let mut buf = <A as GlobalAllocator>::clone_alloc(&self.buf);
74
75 unsafe {
77 buf.as_mut_ptr().write(self.as_ref().clone());
78 }
79
80 Self { buf }
81 }
82}
83
84unsafe impl<T, A> Send for Box<T, A>
85where
86 T: Send,
87 A: Allocator,
88{
89}
90unsafe impl<T, A> Sync for Box<T, A>
91where
92 T: Sync,
93 A: Allocator,
94{
95}
96
97impl<T, A> Deref for Box<T, A>
98where
99 A: Allocator,
100{
101 type Target = T;
102
103 #[inline]
104 fn deref(&self) -> &Self::Target {
105 unsafe { &*self.buf.as_ptr() }
107 }
108}
109
110impl<T, A> DerefMut for Box<T, A>
111where
112 A: Allocator,
113{
114 #[inline]
115 fn deref_mut(&mut self) -> &mut Self::Target {
116 unsafe { &mut *self.buf.as_mut_ptr() }
118 }
119}
120
121impl<T, A> AsRef<T> for Box<T, A>
122where
123 A: Allocator,
124{
125 #[inline]
126 fn as_ref(&self) -> &T {
127 self
128 }
129}
130
131impl<T, A> Drop for Box<T, A>
132where
133 A: Allocator,
134{
135 #[inline]
136 fn drop(&mut self) {
137 unsafe {
140 if needs_drop::<T>() {
141 self.buf.as_mut_ptr().drop_in_place();
142 }
143 }
144 }
145}
146
147impl<T, A> fmt::Display for Box<T, A>
148where
149 T: fmt::Display,
150 A: Allocator,
151{
152 #[inline]
153 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
154 fmt::Display::fmt(&**self, f)
155 }
156}
157
158impl<T, A> fmt::Debug for Box<T, A>
159where
160 T: fmt::Debug,
161 A: Allocator,
162{
163 #[inline]
164 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
165 fmt::Debug::fmt(&**self, f)
166 }
167}
168
169impl<T, A> PartialEq for Box<T, A>
170where
171 T: PartialEq,
172 A: Allocator,
173{
174 #[inline]
175 fn eq(&self, other: &Self) -> bool {
176 PartialEq::eq(&**self, &**other)
177 }
178
179 #[inline]
180 #[allow(clippy::partialeq_ne_impl)]
181 fn ne(&self, other: &Self) -> bool {
182 PartialEq::ne(&**self, &**other)
183 }
184}
185
186impl<T, A> PartialOrd for Box<T, A>
187where
188 T: PartialOrd,
189 A: Allocator,
190{
191 #[inline]
192 fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
193 PartialOrd::partial_cmp(&**self, &**other)
194 }
195
196 #[inline]
197 fn lt(&self, other: &Self) -> bool {
198 PartialOrd::lt(&**self, &**other)
199 }
200
201 #[inline]
202 fn le(&self, other: &Self) -> bool {
203 PartialOrd::le(&**self, &**other)
204 }
205
206 #[inline]
207 fn ge(&self, other: &Self) -> bool {
208 PartialOrd::ge(&**self, &**other)
209 }
210
211 #[inline]
212 fn gt(&self, other: &Self) -> bool {
213 PartialOrd::gt(&**self, &**other)
214 }
215}
216
217impl<T, A> Ord for Box<T, A>
218where
219 T: Ord,
220 A: Allocator,
221{
222 #[inline]
223 fn cmp(&self, other: &Self) -> Ordering {
224 Ord::cmp(&**self, &**other)
225 }
226}
227
228impl<T, A> Eq for Box<T, A>
229where
230 T: Eq,
231 A: Allocator,
232{
233}
234
235impl<T, A> Hash for Box<T, A>
236where
237 T: Hash,
238 A: Allocator,
239{
240 #[inline]
241 fn hash<H: Hasher>(&self, state: &mut H) {
242 (**self).hash(state);
243 }
244}
245
246impl<T, A> Hasher for Box<T, A>
247where
248 T: Hasher,
249 A: Allocator,
250{
251 #[inline]
252 fn finish(&self) -> u64 {
253 (**self).finish()
254 }
255
256 #[inline]
257 fn write(&mut self, bytes: &[u8]) {
258 (**self).write(bytes)
259 }
260
261 #[inline]
262 fn write_u8(&mut self, i: u8) {
263 (**self).write_u8(i)
264 }
265
266 #[inline]
267 fn write_u16(&mut self, i: u16) {
268 (**self).write_u16(i)
269 }
270
271 #[inline]
272 fn write_u32(&mut self, i: u32) {
273 (**self).write_u32(i)
274 }
275
276 #[inline]
277 fn write_u64(&mut self, i: u64) {
278 (**self).write_u64(i)
279 }
280
281 #[inline]
282 fn write_u128(&mut self, i: u128) {
283 (**self).write_u128(i)
284 }
285
286 #[inline]
287 fn write_usize(&mut self, i: usize) {
288 (**self).write_usize(i)
289 }
290
291 #[inline]
292 fn write_i8(&mut self, i: i8) {
293 (**self).write_i8(i)
294 }
295
296 #[inline]
297 fn write_i16(&mut self, i: i16) {
298 (**self).write_i16(i)
299 }
300
301 #[inline]
302 fn write_i32(&mut self, i: i32) {
303 (**self).write_i32(i)
304 }
305
306 #[inline]
307 fn write_i64(&mut self, i: i64) {
308 (**self).write_i64(i)
309 }
310
311 #[inline]
312 fn write_i128(&mut self, i: i128) {
313 (**self).write_i128(i)
314 }
315
316 #[inline]
317 fn write_isize(&mut self, i: isize) {
318 (**self).write_isize(i)
319 }
320}