use std::mem::align_of;
use std::sync::atomic::{AtomicPtr, Ordering};
#[allow(clippy::missing_safety_doc)]
#[allow(dead_code)] pub unsafe trait StrictProvenance<T>: Sized {
fn addr(self) -> usize;
fn map_addr(self, f: impl FnOnce(usize) -> usize) -> Self;
fn unpack(self) -> Tagged<T>
where
T: Unpack;
}
pub trait Unpack: Sized {
const MASK: usize;
const ASSERT_ALIGNMENT: () = assert!(align_of::<Self>() > !Self::MASK);
}
unsafe impl<T> StrictProvenance<T> for *mut T {
#[inline(always)]
fn addr(self) -> usize {
self as usize
}
#[inline(always)]
fn map_addr(self, f: impl FnOnce(usize) -> usize) -> Self {
f(self.addr()) as Self
}
#[inline(always)]
fn unpack(self) -> Tagged<T>
where
T: Unpack,
{
let () = T::ASSERT_ALIGNMENT;
Tagged {
raw: self,
ptr: self.map_addr(|addr| addr & T::MASK),
}
}
}
pub struct Tagged<T> {
pub raw: *mut T,
pub ptr: *mut T,
}
#[inline]
pub fn untagged<T>(value: *mut T) -> Tagged<T> {
Tagged {
raw: value,
ptr: value,
}
}
impl<T> Tagged<T>
where
T: Unpack,
{
#[inline]
pub fn tag(self) -> usize {
self.raw.addr() & !T::MASK
}
#[inline]
pub fn map_tag(self, f: impl FnOnce(usize) -> usize) -> Self {
Tagged {
raw: self.raw.map_addr(f),
ptr: self.ptr,
}
}
}
impl<T> Copy for Tagged<T> {}
impl<T> Clone for Tagged<T> {
fn clone(&self) -> Self {
*self
}
}
#[allow(dead_code)] pub trait AtomicPtrFetchOps<T> {
fn fetch_or(&self, value: usize, ordering: Ordering) -> *mut T;
}
impl<T> AtomicPtrFetchOps<T> for AtomicPtr<T> {
#[inline]
fn fetch_or(&self, value: usize, ordering: Ordering) -> *mut T {
#[cfg(not(miri))]
{
use std::sync::atomic::AtomicUsize;
unsafe { &*(self as *const AtomicPtr<T> as *const AtomicUsize) }
.fetch_or(value, ordering) as *mut T
}
#[cfg(miri)]
{
const fn read_ordering(ordering: Ordering) -> Ordering {
match ordering {
Ordering::SeqCst => Ordering::SeqCst,
Ordering::AcqRel => Ordering::Acquire,
_ => Ordering::Relaxed,
}
}
self.fetch_update(ordering, read_ordering(ordering), |ptr| {
Some(ptr.map_addr(|addr| addr | value))
})
.unwrap()
}
}
}