use super::NumBits;
use core::cmp::Ordering;
use core::hash::{Hash, Hasher};
use core::marker::PhantomData;
use core::ptr::NonNull;
fn validate_align_offset(offset: usize) {
assert!(
offset != usize::MAX,
"error: align_offset returned usize::MAX\n\n\
This should not happen; please file an issue at:\n{}",
env!("CARGO_PKG_REPOSITORY"),
);
}
#[repr(transparent)]
pub(super) struct PtrImpl<T, B = PhantomData<T>>(
NonNull<u8>,
#[allow(clippy::type_complexity)] PhantomData<(NonNull<T>, fn() -> B)>,
);
impl<T, B: NumBits> PtrImpl<T, B> {
pub fn new(ptr: NonNull<T>, tag: usize) -> Self {
Self::assert();
let byte_ptr = ptr.as_ptr().cast::<u8>();
let offset = byte_ptr.align_offset(Self::ALIGNMENT);
validate_align_offset(offset);
assert!(offset & Self::MASK == 0, "`ptr` is not aligned enough");
unsafe { Self::new_unchecked(ptr, tag & Self::MASK) }
}
pub unsafe fn new_unchecked(ptr: NonNull<T>, tag: usize) -> Self {
Self::assert();
debug_assert!(tag < Self::ALIGNMENT);
let tagged = ptr.as_ptr().cast::<u8>().wrapping_add(tag);
Self(unsafe { NonNull::new_unchecked(tagged) }, PhantomData)
}
pub unsafe fn new_unchecked_dereferenceable(
ptr: NonNull<T>,
tag: usize,
) -> Self {
Self::assert();
debug_assert!(tag < Self::ALIGNMENT);
let tagged = unsafe { ptr.as_ptr().cast::<u8>().add(tag) };
Self(unsafe { NonNull::new_unchecked(tagged) }, PhantomData)
}
pub fn get(self) -> (NonNull<T>, usize) {
let ptr = self.0.as_ptr();
let offset = ptr.align_offset(Self::ALIGNMENT);
validate_align_offset(offset);
let tag = Self::ALIGNMENT.wrapping_sub(offset) & Self::MASK;
let ptr = ptr.wrapping_sub(tag).cast::<T>();
debug_assert!(!ptr.is_null());
(unsafe { NonNull::new_unchecked(ptr) }, tag)
}
}
impl<T, B> PartialEq for PtrImpl<T, B> {
fn eq(&self, other: &Self) -> bool {
self.0 == other.0
}
}
impl<T, B> Ord for PtrImpl<T, B> {
fn cmp(&self, other: &Self) -> Ordering {
self.0.cmp(&other.0)
}
}
impl<T, B> Hash for PtrImpl<T, B> {
fn hash<H: Hasher>(&self, state: &mut H) {
self.0.hash(state);
}
}