1use core::{mem, fmt, ptr, marker};
4
5use crate::Deleter;
6
7#[cfg(feature = "alloc")]
8pub type Global<T> = Unique<'static, T, crate::GlobalDeleter>;
10
11#[cfg(feature = "alloc")]
12impl<T> Global<T> {
13 #[inline]
14 pub fn boxed(val: T) -> Self {
16 alloc::boxed::Box::new(val).into()
17 }
18}
19
20#[cfg(feature = "alloc")]
21impl<T: ?Sized> Global<T> {
22 #[inline]
23 pub fn into_boxed(self) -> alloc::boxed::Box<T> {
25 let ptr = self.release().as_ptr();
26 unsafe {
27 alloc::boxed::Box::from_raw(ptr)
28 }
29 }
30}
31
32#[repr(transparent)]
33pub struct Unique<'a, T: ?Sized, D: Deleter> {
55 inner: ptr::NonNull<T>,
56 _traits: marker::PhantomData<&'a D>,
57}
58
59impl<'a, T: ?Sized, D: Deleter> Unique<'a, T, D> {
60 #[inline]
61 pub unsafe fn new(ptr: *mut T) -> Self {
67 assert!(!ptr.is_null());
68
69 Self::from_ptr_unchecked(ptr)
70 }
71
72 #[inline]
73 pub unsafe fn from_ptr(ptr: *mut T) -> Option<Self> {
77 match ptr.is_null() {
78 true => None,
79 false => Some(Self::from_ptr_unchecked(ptr)),
80 }
81 }
82
83 #[inline]
84 pub unsafe fn from_ptr_unchecked(ptr: *mut T) -> Self {
88 Self {
89 inner: ptr::NonNull::new_unchecked(ptr),
90 _traits: marker::PhantomData,
91 }
92 }
93
94 #[inline(always)]
95 pub fn get(&self) -> *mut T {
100 self.inner.as_ptr()
101 }
102
103 #[inline(always)]
104 pub fn as_ref(&self) -> &T {
106 self
107 }
108
109 #[inline(always)]
110 pub fn as_mut(&mut self) -> &mut T {
112 self
113 }
114
115 #[inline(always)]
116 pub fn cast<N>(&self) -> *mut N {
118 self.inner.as_ptr() as *mut N
119 }
120
121 #[inline(always)]
122 pub fn const_cast<N>(&self) -> *const N {
124 self.inner.as_ptr() as *mut N as *const N
125 }
126
127 #[inline(always)]
128 pub fn swap(&mut self, other: &mut Self) {
130 mem::swap(&mut self.inner, &mut other.inner);
131 }
132
133 #[inline]
134 pub fn release(self) -> ptr::NonNull<T> {
136 let result = self.inner;
137 mem::forget(self);
138 result
139 }
140}
141
142impl<'a, T: ?Sized, D: Deleter> Drop for Unique<'a, T, D> {
143 #[inline(always)]
144 fn drop(&mut self) {
145 unsafe {
146 D::delete::<T>(self.inner.as_ptr())
147 }
148 }
149}
150
151impl<'a, T: ?Sized, D: Deleter> fmt::Pointer for Unique<'a, T, D> {
152 #[inline(always)]
153 fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result {
154 fmt::Pointer::fmt(&self.inner, fmt)
155 }
156}
157
158impl<'a, T: ?Sized + fmt::Debug, D: Deleter> fmt::Debug for Unique<'a, T, D> {
159 #[inline(always)]
160 fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result {
161 fmt::Debug::fmt(&self.as_ref(), fmt)
162 }
163}
164
165impl<'a, T: ?Sized + fmt::Display, D: Deleter> fmt::Display for Unique<'a, T, D> {
166 #[inline(always)]
167 fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result {
168 fmt::Display::fmt(&self.as_ref(), fmt)
169 }
170}
171
172impl<'a, T: ?Sized + Unpin, D: Deleter> Unpin for Unique<'a, T, D> {}
173
174unsafe impl<'a, T: ?Sized + Send, D: Deleter> Send for Unique<'a, T, D> {}
175
176unsafe impl<'a, T: ?Sized + Sync, D: Deleter> Sync for Unique<'a, T, D> {}
177
178impl<'a, T: ?Sized, D: Deleter> core::ops::Deref for Unique<'a, T, D> {
179 type Target = T;
180
181 #[inline]
182 fn deref(&self) -> &Self::Target {
183 unsafe {
184 &*self.inner.as_ptr()
185 }
186 }
187}
188
189impl<'a, T: ?Sized, D: Deleter> core::ops::DerefMut for Unique<'a, T, D> {
190 #[inline]
191 fn deref_mut(&mut self) -> &mut Self::Target {
192 unsafe {
193 &mut *self.inner.as_ptr()
194 }
195 }
196}
197
198impl<'a, T: ?Sized + core::hash::Hash, D: Deleter> core::hash::Hash for Unique<'a, T, D> {
199 #[inline]
200 fn hash<H: core::hash::Hasher>(&self, state: &mut H) {
201 self.as_ref().hash(state);
202 }
203}
204
205impl<'a, T: ?Sized + PartialOrd, D: Deleter> PartialOrd<Self> for Unique<'a, T, D> {
206 #[inline(always)]
207 fn partial_cmp(&self, other: &Self) -> Option<core::cmp::Ordering> {
208 PartialOrd::partial_cmp(self.as_ref(), other.as_ref())
209 }
210}
211
212impl<'a, T: ?Sized + PartialEq, D: Deleter> PartialEq<Self> for Unique<'a, T, D> {
213 #[inline(always)]
214 fn eq(&self, other: &Self) -> bool {
215 PartialEq::eq(self.as_ref(), other.as_ref())
216 }
217
218 #[allow(clippy::partialeq_ne_impl)]
219 #[inline(always)]
220 fn ne(&self, other: &Self) -> bool {
221 PartialEq::ne(self.as_ref(), other.as_ref())
222 }
223}
224
225impl<'a, T: ?Sized + Eq, D: Deleter> Eq for Unique<'a, T, D> {
226}
227
228impl<'a, T: ?Sized + core::panic::RefUnwindSafe, D: Deleter> core::panic::UnwindSafe for Unique<'a, T, D> {
229}
230
231#[cfg(feature = "alloc")]
232impl<T: ?Sized> From<alloc::boxed::Box<T>> for Global<T> {
233 #[inline]
234 fn from(ptr: alloc::boxed::Box<T>) -> Self {
235 let ptr = alloc::boxed::Box::into_raw(ptr);
236 unsafe {
237 Self::from_ptr_unchecked(ptr)
238 }
239 }
240}
241
242#[cfg(feature = "alloc")]
243impl<T: ?Sized + Clone> Clone for Global<T> {
244 fn clone(&self) -> Self {
245 let val = unsafe {
246 core::mem::ManuallyDrop::new(
248 alloc::boxed::Box::from_raw(self.get())
249 )
250 };
251 let result = core::mem::ManuallyDrop::into_inner(val.clone());
252 result.into()
253 }
254}
255
256impl<'a, T: ?Sized> From<&'a mut T> for Unique<'a, T, ()> {
257 #[inline]
258 fn from(ptr: &'a mut T) -> Self {
259 unsafe {
260 Self::from_ptr_unchecked(ptr)
261 }
262 }
263}