1#![doc = include_str!("../README.md")]
2#![no_std]
3#![forbid(unsafe_op_in_unsafe_fn)]
4
5use core::{cmp::Ordering, fmt, hash, marker::PhantomData, num::NonZeroUsize, ptr::NonNull};
6
7#[doc = include_str!("../README.md")]
8#[repr(transparent)]
9pub struct NonNullMut<T: ?Sized> {
10 inner: NonNull<T>,
11 _phantom: PhantomData<*mut T>,
12}
13
14impl<T: ?Sized> From<NonNull<T>> for NonNullMut<T> {
15 fn from(inner: NonNull<T>) -> Self {
16 Self {
17 inner,
18 _phantom: PhantomData,
19 }
20 }
21}
22
23impl<T: ?Sized> From<NonNullMut<T>> for NonNull<T> {
24 fn from(value: NonNullMut<T>) -> Self {
25 value.inner
26 }
27}
28
29impl<T> NonNullMut<T> {
30 #[inline]
42 #[must_use]
43 pub const fn dangling() -> Self {
44 let inner = NonNull::dangling();
45 Self {
46 inner,
47 _phantom: PhantomData,
48 }
49 }
50}
51
52impl<T: ?Sized> NonNullMut<T> {
53 #[inline]
68 pub const fn new(ptr: *mut T) -> Option<Self> {
69 match NonNull::new(ptr) {
70 Some(inner) => Some(Self {
71 inner,
72 _phantom: PhantomData,
73 }),
74 None => None,
75 }
76 }
77
78 #[inline]
83 pub const unsafe fn new_unchecked(ptr: *mut T) -> Self {
84 let inner = unsafe { NonNull::new_unchecked(ptr) };
85 Self {
86 inner,
87 _phantom: PhantomData,
88 }
89 }
90
91 pub const fn from_inner(inner: NonNull<T>) -> Self {
93 Self {
94 inner,
95 _phantom: PhantomData,
96 }
97 }
98
99 #[inline]
101 #[must_use]
102 pub fn addr(self) -> NonZeroUsize {
103 self.inner.addr()
104 }
105
106 #[inline]
108 #[must_use]
109 pub fn with_addr(self, addr: NonZeroUsize) -> Self {
110 self.inner.with_addr(addr).into()
111 }
112
113 #[inline]
115 #[must_use]
116 pub fn map_addr(self, f: impl FnOnce(NonZeroUsize) -> NonZeroUsize) -> Self {
117 self.inner.map_addr(f).into()
118 }
119
120 #[inline(always)]
122 #[must_use]
123 pub const fn as_ptr(self) -> *mut T {
124 self.inner.as_ptr()
125 }
126
127 pub const fn as_inner(self) -> NonNull<T> {
129 self.inner
130 }
131
132 #[inline(always)]
137 #[must_use]
138 pub const unsafe fn as_ref<'a>(&self) -> &'a T {
139 unsafe { self.inner.as_ref() }
140 }
141
142 #[inline(always)]
147 #[must_use]
148 pub const unsafe fn as_mut<'a>(&mut self) -> &'a mut T {
149 unsafe { self.inner.as_mut() }
150 }
151
152 #[inline]
154 #[must_use = "this returns the result of the operation, \
155 without modifying the original"]
156 pub const fn cast<U>(self) -> NonNull<U> {
157 self.inner.cast()
158 }
159
160 #[inline(always)]
165 #[must_use = "returns a new pointer rather than modifying its argument"]
166 pub const unsafe fn offset(self, count: isize) -> Self
167 where
168 T: Sized,
169 {
170 unsafe { Self::from_inner(self.inner.offset(count)) }
171 }
172
173 #[inline(always)]
178 #[must_use]
179 pub const unsafe fn byte_offset(self, count: isize) -> Self {
180 unsafe { Self::from_inner(self.inner.byte_offset(count)) }
181 }
182
183 #[inline(always)]
188 #[must_use = "returns a new pointer rather than modifying its argument"]
189 pub const unsafe fn add(self, count: usize) -> Self
190 where
191 T: Sized,
192 {
193 unsafe { Self::from_inner(self.inner.add(count)) }
194 }
195
196 #[inline(always)]
201 #[must_use]
202 pub const unsafe fn byte_add(self, count: usize) -> Self {
203 unsafe { Self::from_inner(self.inner.byte_add(count)) }
204 }
205
206 #[inline(always)]
211 #[must_use = "returns a new pointer rather than modifying its argument"]
212 pub const unsafe fn sub(self, count: usize) -> Self
213 where
214 T: Sized,
215 {
216 unsafe { Self::from_inner(self.inner.sub(count)) }
217 }
218
219 #[inline(always)]
224 #[must_use]
225 pub const unsafe fn byte_sub(self, count: usize) -> Self {
226 unsafe { Self::from_inner(self.inner.byte_sub(count)) }
227 }
228
229 #[inline]
234 pub const unsafe fn offset_from(self, origin: NonNull<T>) -> isize
235 where
236 T: Sized,
237 {
238 unsafe { self.inner.offset_from(origin) }
239 }
240
241 #[inline(always)]
246 pub const unsafe fn byte_offset_from<U: ?Sized>(self, origin: NonNull<U>) -> isize {
247 unsafe { self.inner.byte_offset_from(origin) }
248 }
249
250 #[inline]
255 pub const unsafe fn read(self) -> T
256 where
257 T: Sized,
258 {
259 unsafe { self.inner.read() }
260 }
261
262 #[inline]
267 pub unsafe fn read_volatile(self) -> T
268 where
269 T: Sized,
270 {
271 unsafe { self.inner.read_volatile() }
272 }
273
274 #[inline]
279 pub const unsafe fn read_unaligned(self) -> T
280 where
281 T: Sized,
282 {
283 unsafe { self.inner.read_unaligned() }
284 }
285
286 #[inline(always)]
291 pub const unsafe fn copy_to(self, dest: NonNull<T>, count: usize)
292 where
293 T: Sized,
294 {
295 unsafe { self.inner.copy_to(dest, count) }
296 }
297
298 #[inline(always)]
303 pub const unsafe fn copy_to_nonoverlapping(self, dest: NonNull<T>, count: usize)
304 where
305 T: Sized,
306 {
307 unsafe { self.inner.copy_to_nonoverlapping(dest, count) }
308 }
309
310 #[inline(always)]
315 pub const unsafe fn copy_from(self, src: NonNull<T>, count: usize)
316 where
317 T: Sized,
318 {
319 unsafe { self.inner.copy_from(src, count) }
320 }
321
322 #[inline(always)]
327 pub const unsafe fn copy_from_nonoverlapping(self, src: NonNull<T>, count: usize)
328 where
329 T: Sized,
330 {
331 unsafe { self.inner.copy_from_nonoverlapping(src, count) }
332 }
333
334 #[inline(always)]
339 pub unsafe fn drop_in_place(self) {
340 unsafe { self.inner.drop_in_place() }
341 }
342
343 #[inline(always)]
348 pub const unsafe fn write(self, val: T)
349 where
350 T: Sized,
351 {
352 unsafe { self.inner.write(val) }
353 }
354
355 #[inline(always)]
360 pub const unsafe fn write_bytes(self, val: u8, count: usize)
361 where
362 T: Sized,
363 {
364 unsafe { self.inner.write_bytes(val, count) }
365 }
366
367 #[inline(always)]
372 pub unsafe fn write_volatile(self, val: T)
373 where
374 T: Sized,
375 {
376 unsafe { self.inner.write_volatile(val) }
377 }
378
379 #[inline(always)]
384 pub const unsafe fn write_unaligned(self, val: T)
385 where
386 T: Sized,
387 {
388 unsafe { self.inner.write_unaligned(val) }
389 }
390
391 #[inline(always)]
396 pub unsafe fn replace(self, src: T) -> T
397 where
398 T: Sized,
399 {
400 unsafe { self.inner.replace(src) }
401 }
402
403 #[inline(always)]
408 pub const unsafe fn swap(self, with: NonNull<T>)
409 where
410 T: Sized,
411 {
412 unsafe { self.inner.swap(with) }
413 }
414
415 #[inline]
417 #[must_use]
418 pub fn align_offset(self, align: usize) -> usize
419 where
420 T: Sized,
421 {
422 self.inner.align_offset(align)
423 }
424
425 #[inline]
427 #[must_use]
428 pub fn is_aligned(self) -> bool
429 where
430 T: Sized,
431 {
432 self.inner.is_aligned()
433 }
434}
435
436impl<T> NonNullMut<[T]> {
437 #[inline]
439 #[must_use]
440 pub const fn slice_from_raw_parts(data: NonNull<T>, len: usize) -> Self {
441 Self::from_inner(NonNull::slice_from_raw_parts(data, len))
442 }
443
444 #[inline]
446 #[must_use]
447 pub const fn len(self) -> usize {
448 self.inner.len()
449 }
450
451 #[inline]
453 #[must_use]
454 pub const fn is_empty(self) -> bool {
455 self.inner.is_empty()
456 }
457}
458
459impl<T: ?Sized> Clone for NonNullMut<T> {
460 #[inline(always)]
461 fn clone(&self) -> Self {
462 *self
463 }
464}
465
466impl<T: ?Sized> Copy for NonNullMut<T> {}
467
468impl<T: ?Sized> fmt::Debug for NonNullMut<T> {
469 #[inline(always)]
470 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
471 fmt::Pointer::fmt(&self.as_ptr(), f)
472 }
473}
474
475impl<T: ?Sized> fmt::Pointer for NonNullMut<T> {
476 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
477 fmt::Pointer::fmt(&self.as_ptr(), f)
478 }
479}
480
481impl<T: ?Sized> Eq for NonNullMut<T> {}
482
483#[allow(ambiguous_wide_pointer_comparisons)]
484impl<T: ?Sized> PartialEq for NonNullMut<T> {
485 #[inline]
486 fn eq(&self, other: &Self) -> bool {
487 self.as_ptr() == other.as_ptr()
488 }
489}
490
491#[allow(ambiguous_wide_pointer_comparisons)]
492impl<T: ?Sized> Ord for NonNullMut<T> {
493 #[inline]
494 fn cmp(&self, other: &Self) -> Ordering {
495 self.as_ptr().cmp(&other.as_ptr())
496 }
497}
498
499#[allow(ambiguous_wide_pointer_comparisons)]
500#[allow(clippy::non_canonical_partial_ord_impl)]
501impl<T: ?Sized> PartialOrd for NonNullMut<T> {
502 #[inline]
503 fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
504 self.as_ptr().partial_cmp(&other.as_ptr())
505 }
506}
507
508#[allow(ambiguous_wide_pointer_comparisons)]
509impl<T: ?Sized> hash::Hash for NonNullMut<T> {
510 #[inline]
511 fn hash<H: hash::Hasher>(&self, state: &mut H) {
512 self.as_ptr().hash(state)
513 }
514}
515
516impl<T: ?Sized> From<&mut T> for NonNullMut<T> {
517 #[inline]
518 fn from(r: &mut T) -> Self {
519 NonNullMut::from_inner(r.into())
520 }
521}
522
523impl<T: ?Sized> From<&T> for NonNullMut<T> {
524 #[inline]
525 fn from(r: &T) -> Self {
526 NonNullMut::from_inner(r.into())
527 }
528}