1#![doc = include_str!("../README.md")]
2#![no_std]
3#![forbid(unsafe_op_in_unsafe_fn)]
4
5use core::{cmp::Ordering, fmt, hash, marker::PhantomData, num::NonZeroUsize, ptr::NonNull};
6
7#[doc = include_str!("../README.md")]
8#[repr(transparent)]
9pub struct NonNullMut<T: ?Sized> {
10 inner: NonNull<T>,
11 _phantom: PhantomData<*mut T>,
12}
13
14impl<T: ?Sized> From<NonNull<T>> for NonNullMut<T> {
15 fn from(inner: NonNull<T>) -> Self {
16 Self {
17 inner,
18 _phantom: PhantomData,
19 }
20 }
21}
22
23impl<T: ?Sized> From<NonNullMut<T>> for NonNull<T> {
24 fn from(value: NonNullMut<T>) -> Self {
25 value.inner
26 }
27}
28
29impl<T> NonNullMut<T> {
30 #[inline]
42 #[must_use]
43 pub const fn dangling() -> Self {
44 let inner = NonNull::dangling();
45 Self {
46 inner,
47 _phantom: PhantomData,
48 }
49 }
50}
51
52impl<T: ?Sized> NonNullMut<T> {
53 #[inline]
68 pub const fn new(ptr: *mut T) -> Option<Self> {
69 match NonNull::new(ptr) {
70 Some(inner) => Some(Self {
71 inner,
72 _phantom: PhantomData,
73 }),
74 None => None,
75 }
76 }
77
78 #[inline]
84 pub const unsafe fn new_unchecked(ptr: *mut T) -> Self {
85 let inner = unsafe { NonNull::new_unchecked(ptr) };
86 Self {
87 inner,
88 _phantom: PhantomData,
89 }
90 }
91
92 pub const fn from_inner(inner: NonNull<T>) -> Self {
94 Self {
95 inner,
96 _phantom: PhantomData,
97 }
98 }
99
100 #[inline]
102 #[must_use]
103 pub fn addr(self) -> NonZeroUsize {
104 self.inner.addr()
105 }
106
107 #[inline]
109 #[must_use]
110 pub fn with_addr(self, addr: NonZeroUsize) -> Self {
111 self.inner.with_addr(addr).into()
112 }
113
114 #[inline]
116 #[must_use]
117 pub fn map_addr(self, f: impl FnOnce(NonZeroUsize) -> NonZeroUsize) -> Self {
118 self.inner.map_addr(f).into()
119 }
120
121 #[inline(always)]
123 #[must_use]
124 pub const fn as_ptr(self) -> *mut T {
125 self.inner.as_ptr()
126 }
127
128 pub const fn as_inner(self) -> NonNull<T> {
130 self.inner
131 }
132
133 #[inline(always)]
139 #[must_use]
140 pub const unsafe fn as_ref<'a>(&self) -> &'a T {
141 unsafe { self.inner.as_ref() }
142 }
143
144 #[inline(always)]
150 #[must_use]
151 pub const unsafe fn as_mut<'a>(&mut self) -> &'a mut T {
152 unsafe { self.inner.as_mut() }
153 }
154
155 #[inline]
157 #[must_use = "this returns the result of the operation, \
158 without modifying the original"]
159 pub const fn cast<U>(self) -> NonNull<U> {
160 self.inner.cast()
161 }
162
163 #[inline(always)]
169 #[must_use = "returns a new pointer rather than modifying its argument"]
170 pub const unsafe fn offset(self, count: isize) -> Self
171 where
172 T: Sized,
173 {
174 unsafe { Self::from_inner(self.inner.offset(count)) }
175 }
176
177 #[inline(always)]
183 #[must_use]
184 pub const unsafe fn byte_offset(self, count: isize) -> Self {
185 unsafe { Self::from_inner(self.inner.byte_offset(count)) }
186 }
187
188 #[inline(always)]
194 #[must_use = "returns a new pointer rather than modifying its argument"]
195 pub const unsafe fn add(self, count: usize) -> Self
196 where
197 T: Sized,
198 {
199 unsafe { Self::from_inner(self.inner.add(count)) }
200 }
201
202 #[inline(always)]
208 #[must_use]
209 pub const unsafe fn byte_add(self, count: usize) -> Self {
210 unsafe { Self::from_inner(self.inner.byte_add(count)) }
211 }
212
213 #[inline(always)]
219 #[must_use = "returns a new pointer rather than modifying its argument"]
220 pub const unsafe fn sub(self, count: usize) -> Self
221 where
222 T: Sized,
223 {
224 unsafe { Self::from_inner(self.inner.sub(count)) }
225 }
226
227 #[inline(always)]
233 #[must_use]
234 pub const unsafe fn byte_sub(self, count: usize) -> Self {
235 unsafe { Self::from_inner(self.inner.byte_sub(count)) }
236 }
237
238 #[inline]
244 pub const unsafe fn offset_from(self, origin: NonNull<T>) -> isize
245 where
246 T: Sized,
247 {
248 unsafe { self.inner.offset_from(origin) }
249 }
250
251 #[inline(always)]
257 pub const unsafe fn byte_offset_from<U: ?Sized>(self, origin: NonNull<U>) -> isize {
258 unsafe { self.inner.byte_offset_from(origin) }
259 }
260
261 #[inline]
267 pub const unsafe fn read(self) -> T
268 where
269 T: Sized,
270 {
271 unsafe { self.inner.read() }
272 }
273
274 #[inline]
280 pub unsafe fn read_volatile(self) -> T
281 where
282 T: Sized,
283 {
284 unsafe { self.inner.read_volatile() }
285 }
286
287 #[inline]
293 pub const unsafe fn read_unaligned(self) -> T
294 where
295 T: Sized,
296 {
297 unsafe { self.inner.read_unaligned() }
298 }
299
300 #[inline(always)]
306 pub const unsafe fn copy_to(self, dest: NonNull<T>, count: usize)
307 where
308 T: Sized,
309 {
310 unsafe { self.inner.copy_to(dest, count) }
311 }
312
313 #[inline(always)]
319 pub const unsafe fn copy_to_nonoverlapping(self, dest: NonNull<T>, count: usize)
320 where
321 T: Sized,
322 {
323 unsafe { self.inner.copy_to_nonoverlapping(dest, count) }
324 }
325
326 #[inline(always)]
332 pub const unsafe fn copy_from(self, src: NonNull<T>, count: usize)
333 where
334 T: Sized,
335 {
336 unsafe { self.inner.copy_from(src, count) }
337 }
338
339 #[inline(always)]
345 pub const unsafe fn copy_from_nonoverlapping(self, src: NonNull<T>, count: usize)
346 where
347 T: Sized,
348 {
349 unsafe { self.inner.copy_from_nonoverlapping(src, count) }
350 }
351
352 #[inline(always)]
358 pub unsafe fn drop_in_place(self) {
359 unsafe { self.inner.drop_in_place() }
360 }
361
362 #[inline(always)]
368 pub const unsafe fn write(self, val: T)
369 where
370 T: Sized,
371 {
372 unsafe { self.inner.write(val) }
373 }
374
375 #[inline(always)]
381 pub const unsafe fn write_bytes(self, val: u8, count: usize)
382 where
383 T: Sized,
384 {
385 unsafe { self.inner.write_bytes(val, count) }
386 }
387
388 #[inline(always)]
394 pub unsafe fn write_volatile(self, val: T)
395 where
396 T: Sized,
397 {
398 unsafe { self.inner.write_volatile(val) }
399 }
400
401 #[inline(always)]
407 pub const unsafe fn write_unaligned(self, val: T)
408 where
409 T: Sized,
410 {
411 unsafe { self.inner.write_unaligned(val) }
412 }
413
414 #[inline(always)]
420 pub unsafe fn replace(self, src: T) -> T
421 where
422 T: Sized,
423 {
424 unsafe { self.inner.replace(src) }
425 }
426
427 #[inline(always)]
433 pub const unsafe fn swap(self, with: NonNull<T>)
434 where
435 T: Sized,
436 {
437 unsafe { self.inner.swap(with) }
438 }
439
440 #[inline]
442 #[must_use]
443 pub fn align_offset(self, align: usize) -> usize
444 where
445 T: Sized,
446 {
447 self.inner.align_offset(align)
448 }
449
450 #[inline]
452 #[must_use]
453 pub fn is_aligned(self) -> bool
454 where
455 T: Sized,
456 {
457 self.inner.is_aligned()
458 }
459}
460
461impl<T> NonNullMut<[T]> {
462 #[inline]
464 #[must_use]
465 pub const fn slice_from_raw_parts(data: NonNull<T>, len: usize) -> Self {
466 Self::from_inner(NonNull::slice_from_raw_parts(data, len))
467 }
468
469 #[inline]
471 #[must_use]
472 pub const fn len(self) -> usize {
473 self.inner.len()
474 }
475
476 #[inline]
478 #[must_use]
479 pub const fn is_empty(self) -> bool {
480 self.inner.is_empty()
481 }
482}
483
484impl<T: ?Sized> Clone for NonNullMut<T> {
485 #[inline(always)]
486 fn clone(&self) -> Self {
487 *self
488 }
489}
490
491impl<T: ?Sized> Copy for NonNullMut<T> {}
492
493impl<T: ?Sized> fmt::Debug for NonNullMut<T> {
494 #[inline(always)]
495 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
496 fmt::Pointer::fmt(&self.as_ptr(), f)
497 }
498}
499
500impl<T: ?Sized> fmt::Pointer for NonNullMut<T> {
501 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
502 fmt::Pointer::fmt(&self.as_ptr(), f)
503 }
504}
505
506impl<T: ?Sized> Eq for NonNullMut<T> {}
507
508#[allow(ambiguous_wide_pointer_comparisons)]
509impl<T: ?Sized> PartialEq for NonNullMut<T> {
510 #[inline]
511 fn eq(&self, other: &Self) -> bool {
512 self.as_ptr() == other.as_ptr()
513 }
514}
515
516#[allow(ambiguous_wide_pointer_comparisons)]
517impl<T: ?Sized> Ord for NonNullMut<T> {
518 #[inline]
519 fn cmp(&self, other: &Self) -> Ordering {
520 self.as_ptr().cmp(&other.as_ptr())
521 }
522}
523
524#[allow(ambiguous_wide_pointer_comparisons)]
525#[allow(clippy::non_canonical_partial_ord_impl)]
526impl<T: ?Sized> PartialOrd for NonNullMut<T> {
527 #[inline]
528 fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
529 self.as_ptr().partial_cmp(&other.as_ptr())
530 }
531}
532
533#[allow(ambiguous_wide_pointer_comparisons)]
534impl<T: ?Sized> hash::Hash for NonNullMut<T> {
535 #[inline]
536 fn hash<H: hash::Hasher>(&self, state: &mut H) {
537 self.as_ptr().hash(state)
538 }
539}
540
541impl<T: ?Sized> From<&mut T> for NonNullMut<T> {
542 #[inline]
543 fn from(r: &mut T) -> Self {
544 NonNullMut::from_inner(r.into())
545 }
546}
547
548impl<T: ?Sized> From<&T> for NonNullMut<T> {
549 #[inline]
550 fn from(r: &T) -> Self {
551 NonNullMut::from_inner(r.into())
552 }
553}