non_null_const/lib.rs
1#![no_std]
2#![allow(internal_features)]
3#![feature(
4 sized_hierarchy,
5 negative_impls,
6 const_trait_impl,
7 pin_coerce_unsized_trait,
8 unsize,
9 const_destruct,
10 const_option_ops,
11 intra_doc_pointers,
12 const_convert
13)]
14#![cfg_attr(feature = "ptr_as_uninit", feature(ptr_as_uninit))]
15#![cfg_attr(feature = "ptr_cast_array", feature(ptr_cast_array))]
16#![cfg_attr(feature = "ptr_metadata", feature(ptr_metadata))]
17#![cfg_attr(
18 feature = "pointer_try_cast_aligned",
19 feature(pointer_try_cast_aligned)
20)]
21#![cfg_attr(feature = "const_drop_in_place", feature(const_drop_in_place))]
22#![cfg_attr(feature = "pointer_is_aligned_to", feature(pointer_is_aligned_to))]
23#![cfg_attr(feature = "cast_maybe_uninit", feature(cast_maybe_uninit))]
24#![cfg_attr(feature = "slice_ptr_get", feature(slice_ptr_get))]
25#![cfg_attr(feature = "coerce_unsized", feature(coerce_unsized))]
26#![cfg_attr(feature = "dispatch_from_dyn", feature(dispatch_from_dyn))]
27#![cfg_attr(feature = "ptr_internals", feature(ptr_internals))]
28#![cfg_attr(feature = "const_index", feature(const_index))]
29#![cfg_attr(docsrs, feature(doc_cfg))]
30
31//! This crate provides [`NonNullConst`], a **non-null, covariant** raw **const** pointer type. It is conceptually the `*const` analogue of [`core::ptr::NonNull`].
32
33#[cfg(any(feature = "slice_ptr_get", feature = "ptr_internals"))]
34use cfg_tt::cfg_tt;
35
36#[cfg(any(feature = "coerce_unsized", feature = "dispatch_from_dyn"))]
37use core::marker::Unsize;
38#[cfg(feature = "coerce_unsized")]
39use core::ops::CoerceUnsized;
40#[cfg(feature = "dispatch_from_dyn")]
41use core::ops::DispatchFromDyn;
42#[cfg(feature = "ptr_metadata")]
43use core::ptr;
44#[cfg(feature = "ptr_internals")]
45use core::ptr::Unique;
46#[cfg(feature = "slice_ptr_get")]
47use core::slice::SliceIndex;
48use core::{
49 cmp::Ordering, fmt, hash, marker::PointeeSized, mem::MaybeUninit, num::NonZero,
50 pin::PinCoerceUnsized,
51};
52
53pub use core::ptr::NonNull as NonNullMut;
54
55/// `*const T` but non-zero and [covariant].
56///
57/// This is often the correct thing to use when building data structures using
58/// raw pointers, but is ultimately more dangerous to use because of its additional
59/// properties. If you're not sure if you should use `NonNullConst<T>`, just use `*const T`!
60///
61/// Unlike `*const T`, the pointer must always be non-null, even if the pointer
62/// is never dereferenced. This is so that enums may use this forbidden value
63/// as a discriminant -- `Option<NonNullConst<T>>` has the same size as `*const T`.
64/// However the pointer may still dangle if it isn't dereferenced.
65///
66/// Unlike `*const T`, `NonNullConst<T>` is covariant over `T`. This is usually the correct
67/// choice for most data structures and safe abstractions, such as `Box`, `Rc`, `Arc`, `Vec`,
68/// and `LinkedList`.
69///
70/// # Representation
71///
72/// Thanks to the [null pointer optimization],
73/// `NonNullConst<T>` and `Option<NonNullConst<T>>`
74/// are guaranteed to have the same size and alignment:
75///
76/// ```
77/// use non_null_const::NonNullConst;
78///
79/// assert_eq!(size_of::<NonNullConst<i16>>(), size_of::<Option<NonNullConst<i16>>>());
80/// assert_eq!(align_of::<NonNullConst<i16>>(), align_of::<Option<NonNullConst<i16>>>());
81///
82/// assert_eq!(size_of::<NonNullConst<str>>(), size_of::<Option<NonNullConst<str>>>());
83/// assert_eq!(align_of::<NonNullConst<str>>(), align_of::<Option<NonNullConst<str>>>());
84/// ```
85///
86/// [covariant]: https://doc.rust-lang.org/reference/subtyping.html
87/// [null pointer optimization]: core::option#representation
88#[repr(transparent)]
89pub struct NonNullConst<T: PointeeSized>(NonNullMut<T>);
90
91/// `NonNullConst` pointers are not `Send` because the data they reference may be aliased.
92// N.B., this impl is unnecessary, but should provide better error messages.
93impl<T: PointeeSized> !Send for NonNullConst<T> {}
94
95/// `NonNullConst` pointers are not `Sync` because the data they reference may be aliased.
96// N.B., this impl is unnecessary, but should provide better error messages.
97impl<T: PointeeSized> !Sync for NonNullConst<T> {}
98
99impl<T: Sized> NonNullConst<T> {
100 /// Creates a pointer with the given address and no [provenance][core::ptr#provenance].
101 ///
102 /// For more details, see the equivalent method on a raw pointer, [`ptr::without_provenance`].
103 ///
104 /// This is a [Strict Provenance][core::ptr#strict-provenance] API.
105 #[must_use]
106 #[inline]
107 pub const fn without_provenance(addr: NonZero<usize>) -> Self {
108 Self(NonNullMut::without_provenance(addr))
109 }
110
111 /// Creates a new `NonNullConst` that is dangling, but well-aligned.
112 ///
113 /// This is useful for initializing types which lazily allocate, like
114 /// `Vec::new` does.
115 ///
116 /// Note that the address of the returned pointer may potentially
117 /// be that of a valid pointer, which means this must not be used
118 /// as a "not yet initialized" sentinel value.
119 /// Types that lazily allocate must track initialization by some other means.
120 ///
121 /// # Examples
122 ///
123 /// ```
124 /// use non_null_const::NonNullConst;
125 ///
126 /// let ptr = NonNullConst::<u32>::dangling();
127 /// // Important: don't try to access the value of `ptr` without
128 /// // initializing it first! The pointer is not null but isn't valid either!
129 /// ```
130 #[must_use]
131 #[inline]
132 pub const fn dangling() -> Self {
133 Self(NonNullMut::dangling())
134 }
135
136 /// Converts an address back to a immutable pointer, picking up some previously 'exposed'
137 /// [provenance][core::ptr#provenance].
138 ///
139 /// For more details, see the equivalent method on a raw pointer, [`ptr::with_exposed_provenance`].
140 ///
141 /// This is an [Exposed Provenance][core::ptr#exposed-provenance] API.
142 #[inline]
143 pub fn with_exposed_provenance(addr: NonZero<usize>) -> Self {
144 Self(NonNullMut::with_exposed_provenance(addr))
145 }
146
147 /// Returns a shared references to the value. In contrast to [`as_ref`], this does not require
148 /// that the value has to be initialized.
149 ///
150 /// [`as_ref`]: NonNullConst::as_ref
151 ///
152 /// # Safety
153 ///
154 /// When calling this method, you have to ensure that
155 /// the pointer is [convertible to a reference](core::ptr#pointer-to-reference-conversion).
156 /// Note that because the created reference is to `MaybeUninit<T>`, the
157 /// source pointer can point to uninitialized memory.
158 #[inline]
159 #[must_use]
160 #[cfg(feature = "ptr_as_uninit")]
161 pub const unsafe fn as_uninit_ref<'a>(self) -> &'a MaybeUninit<T> {
162 unsafe { self.0.as_uninit_ref() }
163 }
164
165 /// Casts from a pointer-to-`T` to a pointer-to-`[T; N]`.
166 #[inline]
167 #[cfg(feature = "ptr_cast_array")]
168 pub const fn cast_array<const N: usize>(self) -> NonNullConst<[T; N]> {
169 NonNullConst(self.0.cast())
170 }
171}
172
173impl<T: PointeeSized> NonNullConst<T> {
174 /// Creates a new `NonNullConst`.
175 ///
176 /// # Safety
177 ///
178 /// `ptr` must be non-null.
179 ///
180 /// # Examples
181 ///
182 /// ```
183 /// use non_null_const::NonNullConst;
184 ///
185 /// let x = 0u32;
186 /// let ptr = unsafe { NonNullConst::new_unchecked(&x as *const _) };
187 /// ```
188 ///
189 /// *Incorrect* usage of this function:
190 ///
191 /// ```rust,no_run
192 /// use non_null_const::NonNullConst;
193 ///
194 /// // NEVER DO THAT!!! This is undefined behavior. ⚠️
195 /// let ptr = unsafe { NonNullConst::<u32>::new_unchecked(std::ptr::null()) };
196 /// ```
197 #[inline]
198 #[track_caller]
199 pub const unsafe fn new_unchecked(ptr: *const T) -> Self {
200 unsafe { Self(NonNullMut::new_unchecked(ptr as *mut _)) }
201 }
202
203 /// Creates a new `NonNullConst` if `ptr` is non-null.
204 ///
205 /// # Panics during const evaluation
206 ///
207 /// This method will panic during const evaluation if the pointer cannot be
208 /// determined to be null or not. See [`is_null`] for more information.
209 ///
210 /// [`is_null`]: https://doc.rust-lang.org/core/primitive.pointer.html#method.is_null
211 ///
212 /// # Examples
213 ///
214 /// ```
215 /// use non_null_const::NonNullConst;
216 ///
217 /// let x = 0u32;
218 /// let ptr = NonNullConst::<u32>::new(&x as *const _).expect("ptr is null!");
219 ///
220 /// if let Some(ptr) = NonNullConst::<u32>::new(std::ptr::null()) {
221 /// unreachable!();
222 /// }
223 /// ```
224 #[inline]
225 pub const fn new(ptr: *const T) -> Option<Self> {
226 NonNullMut::new(ptr as *mut _).map(Self)
227 }
228
229 /// Converts a reference to a `NonNullConst` pointer.
230 #[inline]
231 pub const fn from_ref(r: &T) -> Self {
232 Self(NonNullMut::from_ref(r))
233 }
234
235 /// Converts a mutable reference to a `NonNullConst` pointer.
236 #[inline]
237 pub const fn from_mut(r: &mut T) -> Self {
238 Self(NonNullMut::from_mut(r))
239 }
240
241 /// Performs the same functionality as [`std::ptr::from_raw_parts`], except that a
242 /// `NonNullConst` pointer is returned, as opposed to a raw `*const` pointer.
243 ///
244 /// See the documentation of [`std::ptr::from_raw_parts`] for more details.
245 ///
246 /// [`std::ptr::from_raw_parts`]: core::ptr::from_raw_parts
247 #[cfg(feature = "ptr_metadata")]
248 #[inline]
249 pub const fn from_raw_parts(
250 data_pointer: NonNullConst<impl ptr::Thin>,
251 metadata: <T as ptr::Pointee>::Metadata,
252 ) -> NonNullConst<T> {
253 Self(NonNullMut::from_raw_parts(data_pointer.0, metadata))
254 }
255
256 /// Decompose a (possibly wide) pointer into its data pointer and metadata components.
257 ///
258 /// The pointer can be later reconstructed with [`NonNullConst::from_raw_parts`].
259 #[cfg(feature = "ptr_metadata")]
260 #[must_use = "this returns the result of the operation, \
261 without modifying the original"]
262 #[inline]
263 pub const fn to_raw_parts(self) -> (NonNullConst<()>, <T as ptr::Pointee>::Metadata) {
264 let (data_pointer, metadata) = self.0.to_raw_parts();
265 (NonNullConst::<()>(data_pointer), metadata)
266 }
267
268 /// Gets the "address" portion of the pointer.
269 ///
270 /// For more details, see the equivalent method on a raw pointer, [`pointer::addr`].
271 ///
272 /// This is a [Strict Provenance][core::ptr#strict-provenance] API.
273 #[must_use]
274 #[inline]
275 pub fn addr(self) -> NonZero<usize> {
276 self.0.addr()
277 }
278
279 /// Exposes the ["provenance"][core::ptr#provenance] part of the pointer for future use in
280 /// [`with_exposed_provenance`][NonNullConst::with_exposed_provenance] and returns the "address" portion.
281 ///
282 /// For more details, see the equivalent method on a raw pointer, [`pointer::expose_provenance`].
283 ///
284 /// This is an [Exposed Provenance][core::ptr#exposed-provenance] API.
285 pub fn expose_provenance(self) -> NonZero<usize> {
286 self.0.expose_provenance()
287 }
288
289 /// Creates a new pointer with the given address and the [provenance][core::ptr#provenance] of
290 /// `self`.
291 ///
292 /// For more details, see the equivalent method on a raw pointer, [`pointer::with_addr`].
293 ///
294 /// This is a [Strict Provenance][core::ptr#strict-provenance] API.
295 #[must_use]
296 #[inline]
297 pub fn with_addr(self, addr: NonZero<usize>) -> Self {
298 Self(self.0.with_addr(addr))
299 }
300
301 /// Creates a new pointer by mapping `self`'s address to a new one, preserving the
302 /// [provenance][core::ptr#provenance] of `self`.
303 ///
304 /// For more details, see the equivalent method on a raw pointer, [`pointer::map_addr`].
305 ///
306 /// This is a [Strict Provenance][core::ptr#strict-provenance] API.
307 #[must_use]
308 #[inline]
309 pub fn map_addr(self, f: impl FnOnce(NonZero<usize>) -> NonZero<usize>) -> Self {
310 Self(self.0.map_addr(f))
311 }
312
313 /// Acquires the underlying `*const` pointer.
314 ///
315 /// # Examples
316 ///
317 /// ```
318 /// use non_null_const::NonNullConst;
319 ///
320 /// let x = 0u32;
321 /// let ptr = NonNullConst::new(&x).expect("ptr is null!");
322 ///
323 /// let x_value = unsafe { *ptr.as_ptr() };
324 /// assert_eq!(x_value, 0);
325 /// ```
326 #[must_use]
327 #[inline(always)]
328 pub const fn as_ptr(self) -> *const T {
329 self.0.as_ptr()
330 }
331
332 /// Returns a shared reference to the value. If the value may be uninitialized, [`as_uninit_ref`]
333 /// must be used instead.
334 ///
335 /// [`as_uninit_ref`]: NonNullConst::as_uninit_ref
336 ///
337 /// # Safety
338 ///
339 /// When calling this method, you have to ensure that
340 /// the pointer is [convertible to a reference](core::ptr#pointer-to-reference-conversion).
341 ///
342 /// # Examples
343 ///
344 /// ```
345 /// use non_null_const::NonNullConst;
346 ///
347 /// let x = 0u32;
348 /// let ptr = NonNullConst::new(&x as *const _).expect("ptr is null!");
349 ///
350 /// let ref_x = unsafe { ptr.as_ref() };
351 /// println!("{ref_x}");
352 /// ```
353 ///
354 /// [the module documentation]: core::ptr#safety
355 #[must_use]
356 #[inline(always)]
357 pub const unsafe fn as_ref<'a>(&self) -> &'a T {
358 unsafe { self.0.as_ref() }
359 }
360
361 /// Casts to a pointer of another type.
362 ///
363 /// # Examples
364 ///
365 /// ```
366 /// use non_null_const::NonNullConst;
367 ///
368 /// let x = 0u32;
369 /// let ptr = NonNullConst::new(&x as *const _).expect("null pointer");
370 ///
371 /// let casted_ptr = ptr.cast::<i8>();
372 /// let raw_ptr: *const i8 = casted_ptr.as_ptr();
373 /// ```
374 #[must_use = "this returns the result of the operation, \
375 without modifying the original"]
376 #[inline]
377 pub const fn cast<U>(self) -> NonNullConst<U> {
378 NonNullConst(self.0.cast())
379 }
380
381 /// Try to cast to a pointer of another type by checking alignment.
382 ///
383 /// If the pointer is properly aligned to the target type, it will be
384 /// cast to the target type. Otherwise, `None` is returned.
385 ///
386 /// # Examples
387 ///
388 /// ```rust
389 /// #![feature(pointer_try_cast_aligned)]
390 /// use non_null_const::NonNullConst;
391 ///
392 /// let x = 0u64;
393 ///
394 /// let aligned = NonNullConst::from_ref(&x);
395 /// let unaligned = unsafe { aligned.byte_add(1) };
396 ///
397 /// assert!(aligned.try_cast_aligned::<u32>().is_some());
398 /// assert!(unaligned.try_cast_aligned::<u32>().is_none());
399 /// ```
400 #[cfg(feature = "pointer_try_cast_aligned")]
401 #[must_use = "this returns the result of the operation, \
402 without modifying the original"]
403 #[inline]
404 pub fn try_cast_aligned<U>(self) -> Option<NonNullConst<U>> {
405 self.0.try_cast_aligned().map(NonNullConst::<U>)
406 }
407
408 /// Adds an offset to a pointer.
409 ///
410 /// `count` is in units of T; e.g., a `count` of 3 represents a pointer
411 /// offset of `3 * size_of::<T>()` bytes.
412 ///
413 /// # Safety
414 ///
415 /// If any of the following conditions are violated, the result is Undefined Behavior:
416 ///
417 /// * The computed offset, `count * size_of::<T>()` bytes, must not overflow `isize`.
418 ///
419 /// * If the computed offset is non-zero, then `self` must be derived from a pointer to some
420 /// [allocation], and the entire memory range between `self` and the result must be in
421 /// bounds of that allocation. In particular, this range must not "wrap around" the edge
422 /// of the address space.
423 ///
424 /// Allocations can never be larger than `isize::MAX` bytes, so if the computed offset
425 /// stays in bounds of the allocation, it is guaranteed to satisfy the first requirement.
426 /// This implies, for instance, that `vec.as_ptr().add(vec.len())` (for `vec: Vec<T>`) is always
427 /// safe.
428 ///
429 /// [allocation]: core::ptr#allocation
430 ///
431 /// # Examples
432 ///
433 /// ```
434 /// use non_null_const::NonNullConst;
435 ///
436 /// let s = [1, 2, 3];
437 /// let ptr: NonNullConst<u32> = NonNullConst::new(s.as_ptr()).unwrap();
438 ///
439 /// unsafe {
440 /// println!("{}", ptr.offset(1).read());
441 /// println!("{}", ptr.offset(2).read());
442 /// }
443 /// ```
444 #[inline(always)]
445 #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
446 #[must_use = "returns a new pointer rather than modifying its argument"]
447 pub const unsafe fn offset(self, count: isize) -> Self
448 where
449 T: Sized,
450 {
451 unsafe { Self(self.0.offset(count)) }
452 }
453
454 /// Calculates the offset from a pointer in bytes.
455 ///
456 /// `count` is in units of **bytes**.
457 ///
458 /// This is purely a convenience for casting to a `u8` pointer and
459 /// using [offset][`pointer::offset`] on it. See that method for documentation
460 /// and safety requirements.
461 ///
462 /// For non-`Sized` pointees this operation changes only the data pointer,
463 /// leaving the metadata untouched.
464 #[must_use]
465 #[inline(always)]
466 #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
467 #[allow(clippy::missing_safety_doc)]
468 pub const unsafe fn byte_offset(self, count: isize) -> Self {
469 unsafe { Self(self.0.byte_offset(count)) }
470 }
471
472 /// Adds an offset to a pointer (convenience for `.offset(count as isize)`).
473 ///
474 /// `count` is in units of T; e.g., a `count` of 3 represents a pointer
475 /// offset of `3 * size_of::<T>()` bytes.
476 ///
477 /// # Safety
478 ///
479 /// If any of the following conditions are violated, the result is Undefined Behavior:
480 ///
481 /// * The computed offset, `count * size_of::<T>()` bytes, must not overflow `isize`.
482 ///
483 /// * If the computed offset is non-zero, then `self` must be derived from a pointer to some
484 /// [allocation], and the entire memory range between `self` and the result must be in
485 /// bounds of that allocation. In particular, this range must not "wrap around" the edge
486 /// of the address space.
487 ///
488 /// Allocations can never be larger than `isize::MAX` bytes, so if the computed offset
489 /// stays in bounds of the allocation, it is guaranteed to satisfy the first requirement.
490 /// This implies, for instance, that `vec.as_ptr().add(vec.len())` (for `vec: Vec<T>`) is always
491 /// safe.
492 ///
493 /// [allocation]: core::ptr#allocation
494 ///
495 /// # Examples
496 ///
497 /// ```
498 /// use non_null_const::NonNullConst;
499 ///
500 /// let s: &str = "123";
501 /// let ptr: NonNullConst<u8> = NonNullConst::new(s.as_ptr().cast()).unwrap();
502 ///
503 /// unsafe {
504 /// println!("{}", ptr.add(1).read() as char);
505 /// println!("{}", ptr.add(2).read() as char);
506 /// }
507 /// ```
508 #[inline(always)]
509 #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
510 #[must_use = "returns a new pointer rather than modifying its argument"]
511 pub const unsafe fn add(self, count: usize) -> Self
512 where
513 T: Sized,
514 {
515 unsafe { Self(self.0.add(count)) }
516 }
517
518 /// Calculates the offset from a pointer in bytes (convenience for `.byte_offset(count as isize)`).
519 ///
520 /// `count` is in units of bytes.
521 ///
522 /// This is purely a convenience for casting to a `u8` pointer and
523 /// using [`add`][NonNullConst::add] on it. See that method for documentation
524 /// and safety requirements.
525 ///
526 /// For non-`Sized` pointees this operation changes only the data pointer,
527 /// leaving the metadata untouched.
528 #[must_use]
529 #[inline(always)]
530 #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
531 #[allow(clippy::missing_safety_doc)]
532 pub const unsafe fn byte_add(self, count: usize) -> Self {
533 unsafe { Self(self.0.byte_add(count)) }
534 }
535
536 /// Subtracts an offset from a pointer (convenience for
537 /// `.offset((count as isize).wrapping_neg())`).
538 ///
539 /// `count` is in units of T; e.g., a `count` of 3 represents a pointer
540 /// offset of `3 * size_of::<T>()` bytes.
541 ///
542 /// # Safety
543 ///
544 /// If any of the following conditions are violated, the result is Undefined Behavior:
545 ///
546 /// * The computed offset, `count * size_of::<T>()` bytes, must not overflow `isize`.
547 ///
548 /// * If the computed offset is non-zero, then `self` must be derived from a pointer to some
549 /// [allocation], and the entire memory range between `self` and the result must be in
550 /// bounds of that allocation. In particular, this range must not "wrap around" the edge
551 /// of the address space.
552 ///
553 /// Allocations can never be larger than `isize::MAX` bytes, so if the computed offset
554 /// stays in bounds of the allocation, it is guaranteed to satisfy the first requirement.
555 /// This implies, for instance, that `vec.as_ptr().add(vec.len())` (for `vec: Vec<T>`) is always
556 /// safe.
557 ///
558 /// [allocation]: core::ptr#allocation
559 ///
560 /// # Examples
561 ///
562 /// ```
563 /// use non_null_const::NonNullConst;
564 ///
565 /// let s: &str = "123";
566 ///
567 /// unsafe {
568 /// let end: NonNullConst<u8> = NonNullConst::new(s.as_ptr().cast()).unwrap().add(3);
569 /// println!("{}", end.sub(1).read() as char);
570 /// println!("{}", end.sub(2).read() as char);
571 /// }
572 /// ```
573 #[inline(always)]
574 #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
575 #[must_use = "returns a new pointer rather than modifying its argument"]
576 pub const unsafe fn sub(self, count: usize) -> Self
577 where
578 T: Sized,
579 {
580 unsafe { Self(self.0.sub(count)) }
581 }
582
583 /// Calculates the offset from a pointer in bytes (convenience for
584 /// `.byte_offset((count as isize).wrapping_neg())`).
585 ///
586 /// `count` is in units of bytes.
587 ///
588 /// This is purely a convenience for casting to a `u8` pointer and
589 /// using [`sub`][NonNullConst::sub] on it. See that method for documentation
590 /// and safety requirements.
591 ///
592 /// For non-`Sized` pointees this operation changes only the data pointer,
593 /// leaving the metadata untouched.
594 #[must_use]
595 #[inline(always)]
596 #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
597 #[allow(clippy::missing_safety_doc)]
598 pub const unsafe fn byte_sub(self, count: usize) -> Self {
599 unsafe { Self(self.0.byte_sub(count)) }
600 }
601
602 /// Calculates the distance between two pointers within the same allocation. The returned value is in
603 /// units of T: the distance in bytes divided by `size_of::<T>()`.
604 ///
605 /// This is equivalent to `(self as isize - origin as isize) / (size_of::<T>() as isize)`,
606 /// except that it has a lot more opportunities for UB, in exchange for the compiler
607 /// better understanding what you are doing.
608 ///
609 /// The primary motivation of this method is for computing the `len` of an array/slice
610 /// of `T` that you are currently representing as a "start" and "end" pointer
611 /// (and "end" is "one past the end" of the array).
612 /// In that case, `end.offset_from(start)` gets you the length of the array.
613 ///
614 /// All of the following safety requirements are trivially satisfied for this usecase.
615 ///
616 /// [`offset`]: #method.offset
617 ///
618 /// # Safety
619 ///
620 /// If any of the following conditions are violated, the result is Undefined Behavior:
621 ///
622 /// * `self` and `origin` must either
623 ///
624 /// * point to the same address, or
625 /// * both be *derived from* a pointer to the same [allocation], and the memory range between
626 /// the two pointers must be in bounds of that object. (See below for an example.)
627 ///
628 /// * The distance between the pointers, in bytes, must be an exact multiple
629 /// of the size of `T`.
630 ///
631 /// As a consequence, the absolute distance between the pointers, in bytes, computed on
632 /// mathematical integers (without "wrapping around"), cannot overflow an `isize`. This is
633 /// implied by the in-bounds requirement, and the fact that no allocation can be larger
634 /// than `isize::MAX` bytes.
635 ///
636 /// The requirement for pointers to be derived from the same allocation is primarily
637 /// needed for `const`-compatibility: the distance between pointers into *different* allocated
638 /// objects is not known at compile-time. However, the requirement also exists at
639 /// runtime and may be exploited by optimizations. If you wish to compute the difference between
640 /// pointers that are not guaranteed to be from the same allocation, use `(self as isize -
641 /// origin as isize) / size_of::<T>()`.
642 // FIXME: recommend `addr()` instead of `as usize` once that is stable.
643 ///
644 /// [`add`]: #method.add
645 /// [allocation]: core::ptr#allocation
646 ///
647 /// # Panics
648 ///
649 /// This function panics if `T` is a Zero-Sized Type ("ZST").
650 ///
651 /// # Examples
652 ///
653 /// Basic usage:
654 ///
655 /// ```
656 /// use non_null_const::NonNullConst;
657 ///
658 /// let a = [0; 5];
659 /// let ptr1: NonNullConst<u32> = NonNullConst::from(&a[1]);
660 /// let ptr2: NonNullConst<u32> = NonNullConst::from(&a[3]);
661 /// unsafe {
662 /// assert_eq!(ptr2.offset_from(ptr1), 2);
663 /// assert_eq!(ptr1.offset_from(ptr2), -2);
664 /// assert_eq!(ptr1.offset(2), ptr2);
665 /// assert_eq!(ptr2.offset(-2), ptr1);
666 /// }
667 /// ```
668 ///
669 /// *Incorrect* usage:
670 ///
671 /// ```rust,no_run
672 /// use non_null_const::NonNullConst;
673 ///
674 /// let ptr1 = NonNullConst::new(Box::into_raw(Box::new(0u8))).unwrap();
675 /// let ptr2 = NonNullConst::new(Box::into_raw(Box::new(1u8))).unwrap();
676 /// let diff = (ptr2.addr().get() as isize).wrapping_sub(ptr1.addr().get() as isize);
677 /// // Make ptr2_other an "alias" of ptr2.add(1), but derived from ptr1.
678 /// let diff_plus_1 = diff.wrapping_add(1);
679 /// let ptr2_other = NonNullConst::new(ptr1.as_ptr().wrapping_byte_offset(diff_plus_1)).unwrap();
680 /// assert_eq!(ptr2.addr(), ptr2_other.addr());
681 /// // Since ptr2_other and ptr2 are derived from pointers to different objects,
682 /// // computing their offset is undefined behavior, even though
683 /// // they point to addresses that are in-bounds of the same object!
684 ///
685 /// let one = unsafe { ptr2_other.offset_from(ptr2) }; // Undefined Behavior! ⚠️
686 /// ```
687 #[inline]
688 #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
689 pub const unsafe fn offset_from(self, origin: NonNullConst<T>) -> isize
690 where
691 T: Sized,
692 {
693 unsafe { self.0.offset_from(origin.0) }
694 }
695
696 /// Calculates the distance between two pointers within the same allocation. The returned value is in
697 /// units of **bytes**.
698 ///
699 /// This is purely a convenience for casting to a `u8` pointer and
700 /// using [`offset_from`][NonNullConst::offset_from] on it. See that method for
701 /// documentation and safety requirements.
702 ///
703 /// For non-`Sized` pointees this operation considers only the data pointers,
704 /// ignoring the metadata.
705 #[inline(always)]
706 #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
707 #[allow(clippy::missing_safety_doc)]
708 pub const unsafe fn byte_offset_from<U: ?Sized>(self, origin: NonNullConst<U>) -> isize {
709 unsafe { self.0.byte_offset_from(origin.0) }
710 }
711
712 // N.B. `wrapping_offset``, `wrapping_add`, etc are not implemented because they can wrap to null
713
714 /// Calculates the distance between two pointers within the same allocation, *where it's known that
715 /// `self` is equal to or greater than `origin`*. The returned value is in
716 /// units of T: the distance in bytes is divided by `size_of::<T>()`.
717 ///
718 /// This computes the same value that [`offset_from`](#method.offset_from)
719 /// would compute, but with the added precondition that the offset is
720 /// guaranteed to be non-negative. This method is equivalent to
721 /// `usize::try_from(self.offset_from(origin)).unwrap_unchecked()`,
722 /// but it provides slightly more information to the optimizer, which can
723 /// sometimes allow it to optimize slightly better with some backends.
724 ///
725 /// This method can be though of as recovering the `count` that was passed
726 /// to [`add`](#method.add) (or, with the parameters in the other order,
727 /// to [`sub`](#method.sub)). The following are all equivalent, assuming
728 /// that their safety preconditions are met:
729 /// ```rust
730 /// # unsafe fn blah(ptr: non_null_const::NonNullConst<u32>, origin: non_null_const::NonNullConst<u32>, count: usize) -> bool { unsafe {
731 /// ptr.offset_from_unsigned(origin) == count
732 /// # &&
733 /// origin.add(count) == ptr
734 /// # &&
735 /// ptr.sub(count) == origin
736 /// # } }
737 /// ```
738 ///
739 /// # Safety
740 ///
741 /// - The distance between the pointers must be non-negative (`self >= origin`)
742 ///
743 /// - *All* the safety conditions of [`offset_from`](#method.offset_from)
744 /// apply to this method as well; see it for the full details.
745 ///
746 /// Importantly, despite the return type of this method being able to represent
747 /// a larger offset, it's still *not permitted* to pass pointers which differ
748 /// by more than `isize::MAX` *bytes*. As such, the result of this method will
749 /// always be less than or equal to `isize::MAX as usize`.
750 ///
751 /// # Panics
752 ///
753 /// This function panics if `T` is a Zero-Sized Type ("ZST").
754 ///
755 /// # Examples
756 ///
757 /// ```
758 /// use non_null_const::NonNullConst;
759 ///
760 /// let a = [0; 5];
761 /// let ptr1: NonNullConst<u32> = NonNullConst::from(&a[1]);
762 /// let ptr2: NonNullConst<u32> = NonNullConst::from(&a[3]);
763 /// unsafe {
764 /// assert_eq!(ptr2.offset_from_unsigned(ptr1), 2);
765 /// assert_eq!(ptr1.add(2), ptr2);
766 /// assert_eq!(ptr2.sub(2), ptr1);
767 /// assert_eq!(ptr2.offset_from_unsigned(ptr2), 0);
768 /// }
769 ///
770 /// // This would be incorrect, as the pointers are not correctly ordered:
771 /// // ptr1.offset_from_unsigned(ptr2)
772 /// ```
773 #[inline]
774 #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
775 pub const unsafe fn offset_from_unsigned(self, subtracted: NonNullConst<T>) -> usize
776 where
777 T: Sized,
778 {
779 unsafe { self.0.offset_from_unsigned(subtracted.0) }
780 }
781
782 /// Calculates the distance between two pointers within the same allocation, *where it's known that
783 /// `self` is equal to or greater than `origin`*. The returned value is in
784 /// units of **bytes**.
785 ///
786 /// This is purely a convenience for casting to a `u8` pointer and
787 /// using [`offset_from_unsigned`][NonNullConst::offset_from_unsigned] on it.
788 /// See that method for documentation and safety requirements.
789 ///
790 /// For non-`Sized` pointees this operation considers only the data pointers,
791 /// ignoring the metadata.
792 #[inline(always)]
793 #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
794 #[allow(clippy::missing_safety_doc)]
795 pub const unsafe fn byte_offset_from_unsigned<U: ?Sized>(
796 self,
797 origin: NonNullConst<U>,
798 ) -> usize {
799 unsafe { self.0.byte_offset_from_unsigned(origin.0) }
800 }
801
802 /// Reads the value from `self` without moving it. This leaves the
803 /// memory in `self` unchanged.
804 ///
805 /// See [`ptr::read`] for safety concerns and examples.
806 ///
807 /// [`ptr::read`]: core::ptr::read()
808 #[inline]
809 #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
810 #[allow(clippy::missing_safety_doc)]
811 pub const unsafe fn read(self) -> T
812 where
813 T: Sized,
814 {
815 unsafe { self.0.read() }
816 }
817
818 /// Performs a volatile read of the value from `self` without moving it. This
819 /// leaves the memory in `self` unchanged.
820 ///
821 /// Volatile operations are intended to act on I/O memory, and are guaranteed
822 /// to not be elided or reordered by the compiler across other volatile
823 /// operations.
824 ///
825 /// See [`ptr::read_volatile`] for safety concerns and examples.
826 ///
827 /// [`ptr::read_volatile`]: core::ptr::read_volatile()
828 #[inline]
829 #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
830 #[allow(clippy::missing_safety_doc)]
831 pub unsafe fn read_volatile(self) -> T
832 where
833 T: Sized,
834 {
835 unsafe { self.0.read_volatile() }
836 }
837
838 /// Reads the value from `self` without moving it. This leaves the
839 /// memory in `self` unchanged.
840 ///
841 /// Unlike `read`, the pointer may be unaligned.
842 ///
843 /// See [`ptr::read_unaligned`] for safety concerns and examples.
844 ///
845 /// [`ptr::read_unaligned`]: core::ptr::read_unaligned()
846 #[inline]
847 #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
848 #[allow(clippy::missing_safety_doc)]
849 pub const unsafe fn read_unaligned(self) -> T
850 where
851 T: Sized,
852 {
853 unsafe { self.0.read_unaligned() }
854 }
855
856 /// Copies `count * size_of::<T>()` bytes from `self` to `dest`. The source
857 /// and destination may overlap.
858 ///
859 /// NOTE: this has the *same* argument order as [`ptr::copy`].
860 ///
861 /// See [`ptr::copy`] for safety concerns and examples.
862 ///
863 /// [`ptr::copy`]: core::ptr::copy()
864 #[inline(always)]
865 #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
866 #[allow(clippy::missing_safety_doc)]
867 pub const unsafe fn copy_to(self, dest: NonNullConst<T>, count: usize)
868 where
869 T: Sized,
870 {
871 unsafe {
872 self.0.copy_to(dest.0, count);
873 }
874 }
875
876 /// Copies `count * size_of::<T>()` bytes from `self` to `dest`. The source
877 /// and destination may *not* overlap.
878 ///
879 /// NOTE: this has the *same* argument order as [`ptr::copy_nonoverlapping`].
880 ///
881 /// See [`ptr::copy_nonoverlapping`] for safety concerns and examples.
882 ///
883 /// [`ptr::copy_nonoverlapping`]: core::ptr::copy_nonoverlapping()
884 #[inline(always)]
885 #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
886 #[allow(clippy::missing_safety_doc)]
887 pub const unsafe fn copy_to_nonoverlapping(self, dest: NonNullConst<T>, count: usize)
888 where
889 T: Sized,
890 {
891 unsafe {
892 self.0.copy_to_nonoverlapping(dest.0, count);
893 }
894 }
895
896 /// Computes the offset that needs to be applied to the pointer in order to make it aligned to
897 /// `align`.
898 ///
899 /// If it is not possible to align the pointer, the implementation returns
900 /// `usize::MAX`.
901 ///
902 /// The offset is expressed in number of `T` elements, and not bytes.
903 ///
904 /// There are no guarantees whatsoever that offsetting the pointer will not overflow or go
905 /// beyond the allocation that the pointer points into. It is up to the caller to ensure that
906 /// the returned offset is correct in all terms other than alignment.
907 ///
908 /// When this is called during compile-time evaluation (which is unstable), the implementation
909 /// may return `usize::MAX` in cases where that can never happen at runtime. This is because the
910 /// actual alignment of pointers is not known yet during compile-time, so an offset with
911 /// guaranteed alignment can sometimes not be computed. For example, a buffer declared as `[u8;
912 /// N]` might be allocated at an odd or an even address, but at compile-time this is not yet
913 /// known, so the execution has to be correct for either choice. It is therefore impossible to
914 /// find an offset that is guaranteed to be 2-aligned. (This behavior is subject to change, as usual
915 /// for unstable APIs.)
916 ///
917 /// # Panics
918 ///
919 /// The function panics if `align` is not a power-of-two.
920 ///
921 /// # Examples
922 ///
923 /// Accessing adjacent `u8` as `u16`
924 ///
925 /// ```
926 /// use non_null_const::NonNullConst;
927 ///
928 /// # unsafe {
929 /// let x = [5_u8, 6, 7, 8, 9];
930 /// let ptr = NonNullConst::new(x.as_ptr() as *const u8).unwrap();
931 /// let offset = ptr.align_offset(align_of::<u16>());
932 ///
933 /// if offset < x.len() - 1 {
934 /// let u16_ptr = ptr.add(offset).cast::<u16>();
935 /// assert!(u16_ptr.read() == u16::from_ne_bytes([5, 6]) || u16_ptr.read() == u16::from_ne_bytes([6, 7]));
936 /// } else {
937 /// // while the pointer can be aligned via `offset`, it would point
938 /// // outside the allocation
939 /// }
940 /// # }
941 /// ```
942 #[inline]
943 #[must_use]
944 pub fn align_offset(self, align: usize) -> usize
945 where
946 T: Sized,
947 {
948 self.0.align_offset(align)
949 }
950
951 /// Returns whether the pointer is properly aligned for `T`.
952 ///
953 /// # Examples
954 ///
955 /// ```
956 /// use non_null_const::NonNullConst;
957 ///
958 /// // On some platforms, the alignment of i32 is less than 4.
959 /// #[repr(align(4))]
960 /// struct AlignedI32(i32);
961 ///
962 /// let data = AlignedI32(42);
963 /// let ptr = NonNullConst::<AlignedI32>::from(&data);
964 ///
965 /// assert!(ptr.is_aligned());
966 /// assert!(!NonNullConst::new(ptr.as_ptr().wrapping_byte_add(1)).unwrap().is_aligned());
967 /// ```
968 #[inline]
969 #[must_use]
970 pub fn is_aligned(self) -> bool
971 where
972 T: Sized,
973 {
974 self.0.is_aligned()
975 }
976
977 /// Returns whether the pointer is aligned to `align`.
978 ///
979 /// For non-`Sized` pointees this operation considers only the data pointer,
980 /// ignoring the metadata.
981 ///
982 /// # Panics
983 ///
984 /// The function panics if `align` is not a power-of-two (this includes 0).
985 ///
986 /// # Examples
987 ///
988 /// ```
989 /// #![feature(pointer_is_aligned_to)]
990 ///
991 /// // On some platforms, the alignment of i32 is less than 4.
992 /// #[repr(align(4))]
993 /// struct AlignedI32(i32);
994 ///
995 /// let data = AlignedI32(42);
996 /// let ptr = &data as *const AlignedI32;
997 ///
998 /// assert!(ptr.is_aligned_to(1));
999 /// assert!(ptr.is_aligned_to(2));
1000 /// assert!(ptr.is_aligned_to(4));
1001 ///
1002 /// assert!(ptr.wrapping_byte_add(2).is_aligned_to(2));
1003 /// assert!(!ptr.wrapping_byte_add(2).is_aligned_to(4));
1004 ///
1005 /// assert_ne!(ptr.is_aligned_to(8), ptr.wrapping_add(1).is_aligned_to(8));
1006 /// ```
1007 #[inline]
1008 #[must_use]
1009 #[cfg(feature = "pointer_is_aligned_to")]
1010 pub fn is_aligned_to(self, align: usize) -> bool {
1011 self.0.is_aligned_to(align)
1012 }
1013}
1014
1015impl<T> NonNullConst<T> {
1016 /// Casts from a type to its maybe-uninitialized version.
1017 #[must_use]
1018 #[inline(always)]
1019 #[cfg(feature = "cast_maybe_uninit")]
1020 pub const fn cast_uninit(self) -> NonNullConst<MaybeUninit<T>> {
1021 NonNullConst(self.0.cast())
1022 }
1023}
1024impl<T> NonNullConst<MaybeUninit<T>> {
1025 /// Casts from a maybe-uninitialized type to its initialized version.
1026 ///
1027 /// This is always safe, since UB can only occur if the pointer is read
1028 /// before being initialized.
1029 #[must_use]
1030 #[inline(always)]
1031 #[cfg(feature = "cast_maybe_uninit")]
1032 pub const fn cast_init(self) -> NonNullConst<T> {
1033 NonNullConst(self.0.cast())
1034 }
1035}
1036
1037impl<T> NonNullConst<[T]> {
1038 /// Creates a non-null raw slice from a thin pointer and a length.
1039 ///
1040 /// The `len` argument is the number of **elements**, not the number of bytes.
1041 ///
1042 /// This function is safe, but dereferencing the return value is unsafe.
1043 /// See the documentation of [`slice::from_raw_parts`](core::slice::from_raw_parts) for slice safety requirements.
1044 ///
1045 /// # Examples
1046 ///
1047 /// ```rust
1048 /// use non_null_const::NonNullConst;
1049 ///
1050 /// // create a slice pointer when starting out with a pointer to the first element
1051 /// let x = [5, 6, 7];
1052 /// let nonnullconst_pointer = NonNullConst::new(x.as_ptr()).unwrap();
1053 /// let slice = NonNullConst::slice_from_raw_parts(nonnullconst_pointer, 3);
1054 /// assert_eq!(unsafe { slice.as_ref()[2] }, 7);
1055 /// ```
1056 ///
1057 /// (Note that this example artificially demonstrates a use of this method,
1058 /// but `let slice = NonNullConst::from(&x[..]);` would be a better way to write code like this.)
1059 #[must_use]
1060 #[inline]
1061 pub const fn slice_from_raw_parts(data: NonNullConst<T>, len: usize) -> Self {
1062 Self(NonNullMut::slice_from_raw_parts(data.0, len))
1063 }
1064
1065 /// Returns the length of a non-null raw slice.
1066 ///
1067 /// The returned value is the number of **elements**, not the number of bytes.
1068 ///
1069 /// This function is safe, even when the non-null raw slice cannot be dereferenced to a slice
1070 /// because the pointer does not have a valid address.
1071 ///
1072 /// # Examples
1073 ///
1074 /// ```rust
1075 /// use non_null_const::NonNullConst;
1076 ///
1077 /// let slice: NonNullConst<[i8]> = NonNullConst::slice_from_raw_parts(NonNullConst::dangling(), 3);
1078 /// assert_eq!(slice.len(), 3);
1079 /// ```
1080 #[must_use]
1081 #[inline]
1082 pub const fn len(self) -> usize {
1083 self.0.len()
1084 }
1085
1086 /// Returns `true` if the non-null raw slice has a length of 0.
1087 ///
1088 /// # Examples
1089 ///
1090 /// ```rust
1091 /// use non_null_const::NonNullConst;
1092 ///
1093 /// let slice: NonNullConst<[i8]> = NonNullConst::slice_from_raw_parts(NonNullConst::dangling(), 3);
1094 /// assert!(!slice.is_empty());
1095 /// ```
1096 #[must_use]
1097 #[inline]
1098 pub const fn is_empty(self) -> bool {
1099 self.0.is_empty()
1100 }
1101
1102 /// Returns a non-null pointer to the slice's buffer.
1103 ///
1104 /// # Examples
1105 ///
1106 /// ```rust
1107 /// #![feature(slice_ptr_get)]
1108 /// use non_null_const::NonNullConst;
1109 ///
1110 /// let slice: NonNullConst<[i8]> = NonNullConst::slice_from_raw_parts(NonNullConst::dangling(), 3);
1111 /// assert_eq!(slice.as_non_null_ptr(), NonNullConst::<i8>::dangling());
1112 /// ```
1113 #[inline]
1114 #[must_use]
1115 #[cfg(feature = "slice_ptr_get")]
1116 pub const fn as_non_null_ptr(self) -> NonNullConst<T> {
1117 NonNullConst(self.0.as_non_null_ptr())
1118 }
1119
1120 /// Returns a shared reference to a slice of possibly uninitialized values. In contrast to
1121 /// [`as_ref`], this does not require that the value has to be initialized.
1122 ///
1123 /// [`as_ref`]: NonNullConst::as_ref
1124 ///
1125 /// # Safety
1126 ///
1127 /// When calling this method, you have to ensure that all of the following is true:
1128 ///
1129 /// * The pointer must be [valid] for reads for `ptr.len() * size_of::<T>()` many bytes,
1130 /// and it must be properly aligned. This means in particular:
1131 ///
1132 /// * The entire memory range of this slice must be contained within a single allocation!
1133 /// Slices can never span across multiple allocations.
1134 ///
1135 /// * The pointer must be aligned even for zero-length slices. One
1136 /// reason for this is that enum layout optimizations may rely on references
1137 /// (including slices of any length) being aligned and non-null to distinguish
1138 /// them from other data. You can obtain a pointer that is usable as `data`
1139 /// for zero-length slices using [`NonNullConst::dangling()`].
1140 ///
1141 /// * The total size `ptr.len() * size_of::<T>()` of the slice must be no larger than `isize::MAX`.
1142 /// See the safety documentation of [`pointer::offset`].
1143 ///
1144 /// * You must enforce Rust's aliasing rules, since the returned lifetime `'a` is
1145 /// arbitrarily chosen and does not necessarily reflect the actual lifetime of the data.
1146 ///
1147 /// This applies even if the result of this method is unused!
1148 ///
1149 /// See also [`slice::from_raw_parts`](core::slice::from_raw_parts).
1150 ///
1151 /// [valid]: core::ptr#safety
1152 #[inline]
1153 #[must_use]
1154 #[cfg(feature = "ptr_as_uninit")]
1155 pub const unsafe fn as_uninit_slice<'a>(self) -> &'a [MaybeUninit<T>] {
1156 unsafe { self.0.as_uninit_slice() }
1157 }
1158
1159 #[cfg(feature = "slice_ptr_get")]
1160 cfg_tt! {
1161 /// Returns a raw pointer to an element or subslice, without doing bounds
1162 /// checking.
1163 ///
1164 /// Calling this method with an out-of-bounds index or when `self` is not dereferenceable
1165 /// is *[undefined behavior]* even if the resulting pointer is not used.
1166 ///
1167 /// [undefined behavior]: https://doc.rust-lang.org/reference/behavior-considered-undefined.html
1168 ///
1169 /// # Examples
1170 ///
1171 /// ```
1172 /// #![feature(slice_ptr_get)]
1173 /// use non_null_const::NonNullConst;
1174 ///
1175 /// let x = &[1, 2, 4];
1176 /// let x = NonNullConst::slice_from_raw_parts(NonNullConst::new(x.as_ptr()).unwrap(), x.len());
1177 ///
1178 /// unsafe {
1179 /// assert_eq!(x.get_unchecked(1).as_ptr(), x.as_non_null_ptr().as_ptr().add(1));
1180 /// }
1181 /// ```
1182 #[inline]
1183 pub #[cfg(feature = "const_index")] const unsafe fn get_unchecked<I>(self, index: I) -> NonNullConst<I::Output>
1184 where
1185 I: #[cfg(feature = "const_index")]([const]) SliceIndex<[T]>,
1186 {
1187 unsafe { NonNullConst(self.0.get_unchecked_mut(index)) }
1188 }
1189 }
1190}
1191
1192#[allow(clippy::non_canonical_clone_impl)]
1193impl<T: PointeeSized> Clone for NonNullConst<T> {
1194 #[inline(always)]
1195 fn clone(&self) -> Self {
1196 Self(self.0)
1197 }
1198}
1199
1200impl<T: PointeeSized> Copy for NonNullConst<T> {}
1201
1202#[cfg(feature = "coerce_unsized")]
1203impl<T: PointeeSized, U: PointeeSized> CoerceUnsized<NonNullConst<U>> for NonNullConst<T> where
1204 T: Unsize<U>
1205{
1206}
1207
1208#[cfg(feature = "dispatch_from_dyn")]
1209impl<T: PointeeSized, U: PointeeSized> DispatchFromDyn<NonNullConst<U>> for NonNullConst<T> where
1210 T: Unsize<U>
1211{
1212}
1213
1214unsafe impl<T: PointeeSized> PinCoerceUnsized for NonNullConst<T> {}
1215
1216impl<T: PointeeSized> fmt::Debug for NonNullConst<T> {
1217 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
1218 self.0.fmt(f)
1219 }
1220}
1221
1222impl<T: PointeeSized> fmt::Pointer for NonNullConst<T> {
1223 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
1224 self.0.fmt(f)
1225 }
1226}
1227
1228impl<T: PointeeSized> Eq for NonNullConst<T> {}
1229
1230impl<T: PointeeSized> PartialEq for NonNullConst<T> {
1231 #[inline]
1232 #[allow(ambiguous_wide_pointer_comparisons)]
1233 fn eq(&self, other: &Self) -> bool {
1234 self.0.eq(&other.0)
1235 }
1236}
1237
1238impl<T: PointeeSized> Ord for NonNullConst<T> {
1239 #[inline]
1240 #[allow(ambiguous_wide_pointer_comparisons)]
1241 fn cmp(&self, other: &Self) -> Ordering {
1242 self.0.cmp(&other.0)
1243 }
1244}
1245
1246#[allow(clippy::non_canonical_partial_ord_impl)]
1247impl<T: PointeeSized> PartialOrd for NonNullConst<T> {
1248 #[inline]
1249 #[allow(ambiguous_wide_pointer_comparisons)]
1250 fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
1251 self.0.partial_cmp(&other.0)
1252 }
1253}
1254
1255impl<T: PointeeSized> hash::Hash for NonNullConst<T> {
1256 #[inline]
1257 fn hash<H: hash::Hasher>(&self, state: &mut H) {
1258 self.0.hash(state);
1259 }
1260}
1261
1262#[cfg(feature = "ptr_internals")]
1263cfg_tt! {
1264impl<T: PointeeSized> #[cfg(feature = "const_convert")] const From<Unique<T>> for NonNullConst<T> {
1265 #[inline]
1266 fn from(unique: Unique<T>) -> Self {
1267 Self(NonNullMut::from(unique))
1268 }
1269}
1270}
1271
1272impl<T: PointeeSized> const From<&mut T> for NonNullConst<T> {
1273 /// Converts a `&mut T` to a `NonNullConst<T>`.
1274 ///
1275 /// This conversion is safe and infallible since references cannot be null.
1276 #[inline]
1277 fn from(r: &mut T) -> Self {
1278 Self(NonNullMut::from(r))
1279 }
1280}
1281
1282impl<T: PointeeSized> const From<&T> for NonNullConst<T> {
1283 /// Converts a `&T` to a `NonNullConst<T>`.
1284 ///
1285 /// This conversion is safe and infallible since references cannot be null.
1286 #[inline]
1287 fn from(r: &T) -> Self {
1288 Self(NonNullMut::from(r))
1289 }
1290}
1291
1292// --- Extra traits and methods ---
1293impl<T: PointeeSized> const From<NonNullMut<T>> for NonNullConst<T> {
1294 fn from(ptr: NonNullMut<T>) -> Self {
1295 Self(ptr)
1296 }
1297}
1298
1299impl<T: PointeeSized> const From<NonNullConst<T>> for NonNullMut<T> {
1300 fn from(ptr: NonNullConst<T>) -> Self {
1301 ptr.0
1302 }
1303}