nonnull_mut/
lib.rs

1#![doc = include_str!("../README.md")]
2#![no_std]
3#![forbid(unsafe_op_in_unsafe_fn)]
4
5use core::{cmp::Ordering, fmt, hash, marker::PhantomData, num::NonZeroUsize, ptr::NonNull};
6
7#[doc = include_str!("../README.md")]
8#[repr(transparent)]
9pub struct NonNullMut<T: ?Sized> {
10    inner: NonNull<T>,
11    _phantom: PhantomData<*mut T>,
12}
13
14impl<T: ?Sized> From<NonNull<T>> for NonNullMut<T> {
15    fn from(inner: NonNull<T>) -> Self {
16        Self {
17            inner,
18            _phantom: PhantomData,
19        }
20    }
21}
22
23impl<T: ?Sized> From<NonNullMut<T>> for NonNull<T> {
24    fn from(value: NonNullMut<T>) -> Self {
25        value.inner
26    }
27}
28
29impl<T> NonNullMut<T> {
30    /// Like [`NonNull::dangling`]
31    ///
32    /// # Examples
33    ///
34    /// ```
35    /// use nonnull_mut::NonNullMut;
36    ///
37    /// let ptr = NonNullMut::<u32>::dangling();
38    /// // Important: don't try to access the value of `ptr` without
39    /// // initializing it first! The pointer is not null but isn't valid either!
40    /// ```
41    #[inline]
42    #[must_use]
43    pub const fn dangling() -> Self {
44        let inner = NonNull::dangling();
45        Self {
46            inner,
47            _phantom: PhantomData,
48        }
49    }
50}
51
52impl<T: ?Sized> NonNullMut<T> {
53    /// Like [`NonNull::new`]
54    ///
55    /// # Examples
56    ///
57    /// ```
58    /// use nonnull_mut::NonNullMut;
59    ///
60    /// let mut x = 0u32;
61    /// let ptr = NonNullMut::<u32>::new(&mut x as *mut _).expect("ptr is null!");
62    ///
63    /// if let Some(ptr) = NonNullMut::<u32>::new(std::ptr::null_mut()) {
64    ///     unreachable!();
65    /// }
66    /// ```
67    #[inline]
68    pub const fn new(ptr: *mut T) -> Option<Self> {
69        match NonNull::new(ptr) {
70            Some(inner) => Some(Self {
71                inner,
72                _phantom: PhantomData,
73            }),
74            None => None,
75        }
76    }
77
78    /// Like [`NonNull::new_unchecked`]
79    ///
80    /// # Safety
81    /// - Like [`NonNull::new_unchecked`]
82    #[inline]
83    pub const unsafe fn new_unchecked(ptr: *mut T) -> Self {
84        let inner = unsafe { NonNull::new_unchecked(ptr) };
85        Self {
86            inner,
87            _phantom: PhantomData,
88        }
89    }
90
91    /// Create [`NonNullMut<T>`] from [`NonNull<T>`]
92    pub const fn from_inner(inner: NonNull<T>) -> Self {
93        Self {
94            inner,
95            _phantom: PhantomData,
96        }
97    }
98
99    /// Like [`NonNull::addr`]
100    #[inline]
101    #[must_use]
102    pub fn addr(self) -> NonZeroUsize {
103        self.inner.addr()
104    }
105
106    /// Like [`NonNull::with_addr`]
107    #[inline]
108    #[must_use]
109    pub fn with_addr(self, addr: NonZeroUsize) -> Self {
110        self.inner.with_addr(addr).into()
111    }
112
113    /// Like [`NonNull::map_addr`]
114    #[inline]
115    #[must_use]
116    pub fn map_addr(self, f: impl FnOnce(NonZeroUsize) -> NonZeroUsize) -> Self {
117        self.inner.map_addr(f).into()
118    }
119
120    /// Like [`NonNull::as_ptr`]
121    #[inline(always)]
122    #[must_use]
123    pub const fn as_ptr(self) -> *mut T {
124        self.inner.as_ptr()
125    }
126
127    /// Get inner [`NonNull<T>`]
128    pub const fn as_inner(self) -> NonNull<T> {
129        self.inner
130    }
131
132    /// Like [`NonNull::as_ref`]
133    ///
134    /// # Safety
135    /// - Like [`NonNull::as_ref`]
136    #[inline(always)]
137    #[must_use]
138    pub const unsafe fn as_ref<'a>(&self) -> &'a T {
139        unsafe { self.inner.as_ref() }
140    }
141
142    /// Like [`NonNull::as_mut`]
143    ///
144    /// # Safety
145    /// - Like [`NonNull::as_mut`]
146    #[inline(always)]
147    #[must_use]
148    pub const unsafe fn as_mut<'a>(&mut self) -> &'a mut T {
149        unsafe { self.inner.as_mut() }
150    }
151
152    /// Like [`NonNull::cast`]
153    #[inline]
154    #[must_use = "this returns the result of the operation, \
155                  without modifying the original"]
156    pub const fn cast<U>(self) -> NonNull<U> {
157        self.inner.cast()
158    }
159
160    /// Like [`NonNull::offset`]
161    ///
162    /// # Safety
163    /// - Like [`NonNull::offset`]
164    #[inline(always)]
165    #[must_use = "returns a new pointer rather than modifying its argument"]
166    pub const unsafe fn offset(self, count: isize) -> Self
167    where
168        T: Sized,
169    {
170        unsafe { Self::from_inner(self.inner.offset(count)) }
171    }
172
173    /// Like [`NonNull::byte_offset`]
174    ///
175    /// # Safety
176    /// - Like [`NonNull::byte_offset`]
177    #[inline(always)]
178    #[must_use]
179    pub const unsafe fn byte_offset(self, count: isize) -> Self {
180        unsafe { Self::from_inner(self.inner.byte_offset(count)) }
181    }
182
183    /// Like [`NonNull::add`]
184    ///
185    /// # Safety
186    /// - Like [`NonNull::add`]
187    #[inline(always)]
188    #[must_use = "returns a new pointer rather than modifying its argument"]
189    pub const unsafe fn add(self, count: usize) -> Self
190    where
191        T: Sized,
192    {
193        unsafe { Self::from_inner(self.inner.add(count)) }
194    }
195
196    /// Like [`NonNull::byte_add`]
197    ///
198    /// # Safety
199    /// - Like [`NonNull::byte_add`]
200    #[inline(always)]
201    #[must_use]
202    pub const unsafe fn byte_add(self, count: usize) -> Self {
203        unsafe { Self::from_inner(self.inner.byte_add(count)) }
204    }
205
206    /// Like [`NonNull::sub`]
207    ///
208    /// # Safety
209    /// - Like [`NonNull::sub`]
210    #[inline(always)]
211    #[must_use = "returns a new pointer rather than modifying its argument"]
212    pub const unsafe fn sub(self, count: usize) -> Self
213    where
214        T: Sized,
215    {
216        unsafe { Self::from_inner(self.inner.sub(count)) }
217    }
218
219    /// Like [`NonNull::byte_sub`]
220    ///
221    /// # Safety
222    /// - Like [`NonNull::byte_sub`]
223    #[inline(always)]
224    #[must_use]
225    pub const unsafe fn byte_sub(self, count: usize) -> Self {
226        unsafe { Self::from_inner(self.inner.byte_sub(count)) }
227    }
228
229    /// Like [`NonNull::offset_from`]
230    ///
231    /// # Safety
232    /// - Like [`NonNull::offset_from`]
233    #[inline]
234    pub const unsafe fn offset_from(self, origin: NonNull<T>) -> isize
235    where
236        T: Sized,
237    {
238        unsafe { self.inner.offset_from(origin) }
239    }
240
241    /// Like [`NonNull::byte_offset_from`]
242    ///
243    /// # Safety
244    /// - Like [`NonNull::byte_offset_from`]
245    #[inline(always)]
246    pub const unsafe fn byte_offset_from<U: ?Sized>(self, origin: NonNull<U>) -> isize {
247        unsafe { self.inner.byte_offset_from(origin) }
248    }
249
250    /// Like [`NonNull::read`]
251    ///
252    /// # Safety
253    /// - Like [`NonNull::read`]
254    #[inline]
255    pub const unsafe fn read(self) -> T
256    where
257        T: Sized,
258    {
259        unsafe { self.inner.read() }
260    }
261
262    /// Like [`NonNull::read_volatile`]
263    ///
264    /// # Safety
265    /// - Like [`NonNull::read_volatile`]
266    #[inline]
267    pub unsafe fn read_volatile(self) -> T
268    where
269        T: Sized,
270    {
271        unsafe { self.inner.read_volatile() }
272    }
273
274    /// Like [`NonNull::read_unaligned`]
275    ///
276    /// # Safety
277    /// - Like [`NonNull::read_unaligned`]
278    #[inline]
279    pub const unsafe fn read_unaligned(self) -> T
280    where
281        T: Sized,
282    {
283        unsafe { self.inner.read_unaligned() }
284    }
285
286    /// Like [`NonNull::copy_to`]
287    ///
288    /// # Safety
289    /// - Like [`NonNull::copy_to`]
290    #[inline(always)]
291    pub const unsafe fn copy_to(self, dest: NonNull<T>, count: usize)
292    where
293        T: Sized,
294    {
295        unsafe { self.inner.copy_to(dest, count) }
296    }
297
298    /// Like [`NonNull::copy_to_nonoverlapping`]
299    ///
300    /// # Safety
301    /// - Like [`NonNull::copy_to_nonoverlapping`]
302    #[inline(always)]
303    pub const unsafe fn copy_to_nonoverlapping(self, dest: NonNull<T>, count: usize)
304    where
305        T: Sized,
306    {
307        unsafe { self.inner.copy_to_nonoverlapping(dest, count) }
308    }
309
310    /// Like [`NonNull::copy_from`]
311    ///
312    /// # Safety
313    /// - Like [`NonNull::copy_from`]
314    #[inline(always)]
315    pub const unsafe fn copy_from(self, src: NonNull<T>, count: usize)
316    where
317        T: Sized,
318    {
319        unsafe { self.inner.copy_from(src, count) }
320    }
321
322    /// Like [`NonNull::copy_from_nonoverlapping`]
323    ///
324    /// # Safety
325    /// - Like [`NonNull::copy_from_nonoverlapping`]
326    #[inline(always)]
327    pub const unsafe fn copy_from_nonoverlapping(self, src: NonNull<T>, count: usize)
328    where
329        T: Sized,
330    {
331        unsafe { self.inner.copy_from_nonoverlapping(src, count) }
332    }
333
334    /// Like [`NonNull::drop_in_place`]
335    ///
336    /// # Safety
337    /// - Like [`NonNull::drop_in_place`]
338    #[inline(always)]
339    pub unsafe fn drop_in_place(self) {
340        unsafe { self.inner.drop_in_place() }
341    }
342
343    /// Like [`NonNull::write`]
344    ///
345    /// # Safety
346    /// - Like [`NonNull::write`]
347    #[inline(always)]
348    pub const unsafe fn write(self, val: T)
349    where
350        T: Sized,
351    {
352        unsafe { self.inner.write(val) }
353    }
354
355    /// Like [`NonNull::write_bytes`]
356    ///
357    /// # Safety
358    /// - Like [`NonNull::write_bytes`]
359    #[inline(always)]
360    pub const unsafe fn write_bytes(self, val: u8, count: usize)
361    where
362        T: Sized,
363    {
364        unsafe { self.inner.write_bytes(val, count) }
365    }
366
367    /// Like [`NonNull::write_volatile`]
368    ///
369    /// # Safety
370    /// - Like [`NonNull::write_volatile`]
371    #[inline(always)]
372    pub unsafe fn write_volatile(self, val: T)
373    where
374        T: Sized,
375    {
376        unsafe { self.inner.write_volatile(val) }
377    }
378
379    /// Like [`NonNull::write_unaligned`]
380    ///
381    /// # Safety
382    /// - Like [`NonNull::write_unaligned`]
383    #[inline(always)]
384    pub const unsafe fn write_unaligned(self, val: T)
385    where
386        T: Sized,
387    {
388        unsafe { self.inner.write_unaligned(val) }
389    }
390
391    /// Like [`NonNull::replace`]
392    ///
393    /// # Safety
394    /// - Like [`NonNull::replace`]
395    #[inline(always)]
396    pub unsafe fn replace(self, src: T) -> T
397    where
398        T: Sized,
399    {
400        unsafe { self.inner.replace(src) }
401    }
402
403    /// Like [`NonNull::swap`]
404    ///
405    /// # Safety
406    /// - Like [`NonNull::swap`]
407    #[inline(always)]
408    pub const unsafe fn swap(self, with: NonNull<T>)
409    where
410        T: Sized,
411    {
412        unsafe { self.inner.swap(with) }
413    }
414
415    /// Like [`NonNull::align_offset`]
416    #[inline]
417    #[must_use]
418    pub fn align_offset(self, align: usize) -> usize
419    where
420        T: Sized,
421    {
422        self.inner.align_offset(align)
423    }
424
425    /// Like [`NonNull::is_aligned`]
426    #[inline]
427    #[must_use]
428    pub fn is_aligned(self) -> bool
429    where
430        T: Sized,
431    {
432        self.inner.is_aligned()
433    }
434}
435
436impl<T> NonNullMut<[T]> {
437    /// Like [`NonNull::slice_from_raw_parts`]
438    #[inline]
439    #[must_use]
440    pub const fn slice_from_raw_parts(data: NonNull<T>, len: usize) -> Self {
441        Self::from_inner(NonNull::slice_from_raw_parts(data, len))
442    }
443
444    /// Like [`NonNull::len`]
445    #[inline]
446    #[must_use]
447    pub const fn len(self) -> usize {
448        self.inner.len()
449    }
450
451    /// Like [`NonNull::is_empty`]
452    #[inline]
453    #[must_use]
454    pub const fn is_empty(self) -> bool {
455        self.inner.is_empty()
456    }
457}
458
459impl<T: ?Sized> Clone for NonNullMut<T> {
460    #[inline(always)]
461    fn clone(&self) -> Self {
462        *self
463    }
464}
465
466impl<T: ?Sized> Copy for NonNullMut<T> {}
467
468impl<T: ?Sized> fmt::Debug for NonNullMut<T> {
469    #[inline(always)]
470    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
471        fmt::Pointer::fmt(&self.as_ptr(), f)
472    }
473}
474
475impl<T: ?Sized> fmt::Pointer for NonNullMut<T> {
476    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
477        fmt::Pointer::fmt(&self.as_ptr(), f)
478    }
479}
480
481impl<T: ?Sized> Eq for NonNullMut<T> {}
482
483#[allow(ambiguous_wide_pointer_comparisons)]
484impl<T: ?Sized> PartialEq for NonNullMut<T> {
485    #[inline]
486    fn eq(&self, other: &Self) -> bool {
487        self.as_ptr() == other.as_ptr()
488    }
489}
490
491#[allow(ambiguous_wide_pointer_comparisons)]
492impl<T: ?Sized> Ord for NonNullMut<T> {
493    #[inline]
494    fn cmp(&self, other: &Self) -> Ordering {
495        self.as_ptr().cmp(&other.as_ptr())
496    }
497}
498
499#[allow(ambiguous_wide_pointer_comparisons)]
500#[allow(clippy::non_canonical_partial_ord_impl)]
501impl<T: ?Sized> PartialOrd for NonNullMut<T> {
502    #[inline]
503    fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
504        self.as_ptr().partial_cmp(&other.as_ptr())
505    }
506}
507
508#[allow(ambiguous_wide_pointer_comparisons)]
509impl<T: ?Sized> hash::Hash for NonNullMut<T> {
510    #[inline]
511    fn hash<H: hash::Hasher>(&self, state: &mut H) {
512        self.as_ptr().hash(state)
513    }
514}
515
516impl<T: ?Sized> From<&mut T> for NonNullMut<T> {
517    #[inline]
518    fn from(r: &mut T) -> Self {
519        NonNullMut::from_inner(r.into())
520    }
521}
522
523impl<T: ?Sized> From<&T> for NonNullMut<T> {
524    #[inline]
525    fn from(r: &T) -> Self {
526        NonNullMut::from_inner(r.into())
527    }
528}