use crate::utils::SpinLock;
use std::panic::RefUnwindSafe;
use std::panic::UnwindSafe;
use crate::alloc::{MemPool, PmemUsage};
use crate::cell::VCell;
use crate::clone::*;
use crate::ptr::Ptr;
use crate::stm::*;
use crate::*;
use std::clone::Clone as StdClone;
use std::cmp::Ordering;
use std::hash::Hash;
use std::hash::Hasher;
use std::marker::PhantomData;
use std::mem::MaybeUninit;
use std::ops::Deref;
use std::sync::atomic::{self, AtomicBool, Ordering::*};
use std::*;
const MAX_REFCOUNT: usize = (isize::MAX) as usize;
struct Counter<A: MemPool> {
strong: usize,
weak: usize,
lock: VCell<u8, A>,
}
unsafe impl<A: MemPool> PSafe for Counter<A> {}
pub struct ParcInner<T: ?Sized, A: MemPool> {
counter: Counter<A>,
#[cfg(not(feature = "no_volatile_pointers"))]
vlist: VCell<VWeakList, A>,
marker: PhantomData<A>,
value: T,
}
unsafe impl<T: PSafe + ?Sized, A: MemPool> PSafe for ParcInner<T, A> {}
impl<T: ?Sized, A: MemPool> !VSafe for ParcInner<T, A> {}
unsafe fn set_data_ptr<T, U>(mut ptr: *mut T, data: *mut U) -> *mut T {
std::ptr::write(&mut ptr as *mut _ as *mut *mut u8, data as *mut u8);
ptr
}
pub struct Parc<T: PSafe + ?Sized, A: MemPool> {
ptr: Ptr<ParcInner<T, A>, A>,
phantom: PhantomData<T>,
}
impl<T: ?Sized, A: MemPool> !TxOutSafe for Parc<T, A> {}
impl<T: ?Sized, A: MemPool> !Send for Parc<T, A> {}
impl<T: ?Sized, A: MemPool> !VSafe for Parc<T, A> {}
impl<T: PSafe + ?Sized, A: MemPool> UnwindSafe for Parc<T, A> {}
impl<T: PSafe + ?Sized, A: MemPool> RefUnwindSafe for Parc<T, A> {}
unsafe impl<T: PSafe + ?Sized, A: MemPool> TxInSafe for Parc<T, A> {}
impl<T: PSafe, A: MemPool> Parc<T, A> {
pub fn new(value: T, journal: &Journal<A>) -> Parc<T, A> {
unsafe {
let ptr = Ptr::new_unchecked(A::new(
ParcInner::<T, A> {
counter: Counter {
strong: 1,
weak: 1,
lock: VCell::new(0),
},
#[cfg(not(feature = "no_volatile_pointers"))]
vlist: VCell::new(VWeakList::default()),
marker: PhantomData,
value,
},
journal,
));
Self::from_inner(ptr)
}
}
pub fn new_uninit(journal: &Journal<A>) -> Parc<MaybeUninit<T>, A> {
unsafe {
Parc::from_inner(Ptr::from_mut(A::new(
ParcInner {
counter: Counter {
strong: 1,
weak: 1,
lock: VCell::new(0),
},
#[cfg(not(feature = "no_volatile_pointers"))]
vlist: VCell::new(VWeakList::default()),
marker: PhantomData,
value: MaybeUninit::<T>::uninit(),
},
journal,
)))
}
}
pub fn new_zeroed(journal: &Journal<A>) -> Parc<mem::MaybeUninit<T>, A> {
unsafe {
let mut uninit = Self::new_uninit(journal);
std::ptr::write_bytes::<T>(Parc::get_mut_unchecked(&mut uninit).as_mut_ptr(), 0, 1);
uninit
}
}
}
impl<T: PSafe + ?Sized, A: MemPool> Parc<T, A> {
#[inline]
fn from_inner(ptr: Ptr<ParcInner<T, A>, A>) -> Self {
Parc {
ptr,
phantom: PhantomData,
}
}
#[inline(always)]
fn inner(&self) -> &mut ParcInner<T, A> {
self.ptr.get_mut()
}
#[allow(clippy::missing_safety_doc)]
unsafe fn from_ptr(ptr: *mut ParcInner<T, A>, j: &Journal<A>) -> Self {
let off = A::off_unchecked(ptr);
let res = Self::from_inner(Ptr::from_off_unchecked(off));
fetch_inc((*ptr).counter.lock.as_mut(), &mut (*ptr).counter.strong, j);
res
}
#[inline(never)]
unsafe fn drop_slow(&mut self, j: &Journal<A>) {
std::ptr::drop_in_place(&mut self.ptr.as_mut().value);
let inner = self.inner();
if fetch_dec(inner.counter.lock.as_mut(), &mut inner.counter.weak, j) == 1 {
atomic::fence(Acquire);
A::free(self.ptr.as_mut());
#[cfg(not(feature = "no_volatile_pointers"))]
std::ptr::drop_in_place(self.ptr.as_mut().vlist.as_mut());
}
}
}
impl<T: PSafe, A: MemPool> Parc<mem::MaybeUninit<T>, A> {
#[inline]
pub unsafe fn assume_init(self) -> Parc<T, A> {
Parc::from_inner(mem::ManuallyDrop::new(self).ptr.cast())
}
}
impl<T: PSafe, A: MemPool> Parc<MaybeUninit<T>, A> {
#[inline]
pub fn get_mut(this: &mut Self) -> Option<&mut MaybeUninit<T>> {
if Parc::is_unique(this) {
unsafe { Some(Parc::get_mut_unchecked(this)) }
} else {
None
}
}
#[inline]
pub unsafe fn get_mut_unchecked(this: &mut Self) -> &mut MaybeUninit<T> {
&mut this.ptr.value
}
}
impl<T: PSafe + ?Sized, A: MemPool> Parc<T, A> {
pub fn downgrade(this: &Self, j: &Journal<A>) -> Weak<T, A> {
let inner = this.inner();
let _lock = SpinLock::acquire(inner.counter.lock.as_mut());
lock_free_fetch_inc(&mut inner.counter.weak, j);
Weak {
ptr: this.ptr.clone(),
}
}
pub fn demote(&self) -> VWeak<T, A> {
debug_assert!(!self.ptr.is_dangling());
assert!(
!Journal::<A>::is_running(),
"Parc::demote() cannot be called from a transaction"
);
VWeak::new(self)
}
pub unsafe fn unsafe_demote(&self) -> VWeak<T, A> {
debug_assert!(!self.ptr.is_dangling());
VWeak::new(self)
}
#[inline]
pub fn weak_count(this: &Self) -> usize {
let inner = this.inner();
let cnt = load(inner.counter.lock.as_mut(), &this.inner().counter.weak);
if cnt == usize::MAX {
0
} else {
cnt - 1
}
}
#[inline]
pub fn strong_count(this: &Self) -> usize {
let inner = this.inner();
load(inner.counter.lock.as_mut(), &inner.counter.strong)
}
#[inline]
fn is_unique(this: &Self) -> bool {
Parc::weak_count(this) == 0 && Parc::strong_count(this) == 1
}
#[inline]
pub fn ptr_eq(this: &Self, other: &Self) -> bool {
this.ptr.off() == other.ptr.off()
}
}
impl<T: PSafe, A: MemPool> PmemUsage for Parc<T, A> {
default fn size_of() -> usize {
Ptr::<ParcInner<T, A>, A>::size_of()
}
}
impl<T: PSafe + PmemUsage + ?Sized, A: MemPool> PmemUsage for Parc<T, A> {
fn size_of() -> usize {
Ptr::<ParcInner<T, A>, A>::size_of() + T::size_of()
}
}
impl<T: PSafe + ?Sized, A: MemPool> Deref for Parc<T, A> {
type Target = T;
#[inline(always)]
fn deref(&self) -> &T {
&self.inner().value
}
}
impl<T: PSafe, A: MemPool> Parc<T, A> {
pub fn initialize(arc: &Option<Parc<T, A>>, value: T) -> crate::result::Result<()> {
assert!(
!Journal::<A>::is_running(),
"Parc::initialize() cannot be used inside a transaction"
);
match arc {
Some(_) => Err("already initialized".to_string()),
None => if A::valid(arc) {
unsafe {
let new = A::atomic_new(
ParcInner::<T, A> {
counter: Counter {
strong: 1,
weak: 1,
lock: VCell::new(0),
},
#[cfg(not(feature = "no_volatile_pointers"))]
vlist: VCell::new(VWeakList::default()),
marker: PhantomData,
value,
});
let pnew = Some(Parc::<T, A>::from_inner(Ptr::from_off_unchecked(new.1)));
let src = crate::utils::as_slice64(&pnew);
let mut base = A::off_unchecked(arc);
for i in src {
A::log64(base, *i, new.3);
base += 8;
}
A::perform(new.3);
}
Ok(())
} else {
Err("The object is not in the PM".to_string())
}
}
}
}
unsafe impl<#[may_dangle] T: PSafe + ?Sized, A: MemPool> Drop for Parc<T, A> {
fn drop(&mut self) {
unsafe {
let journal = &*Journal::<A>::current(true).unwrap().0;
let inner = self.inner();
if fetch_dec(inner.counter.lock.as_mut(),
&mut inner.counter.strong, journal) != 1
{
return;
}
self.drop_slow(journal);
}
}
}
impl<T: PSafe + ?Sized, A: MemPool> PClone<A> for Parc<T, A> {
#[inline]
fn pclone(&self, j: &Journal<A>) -> Parc<T, A> {
let inner = self.inner();
let old_size = fetch_inc(inner.counter.lock.as_mut(),
&mut inner.counter.strong, j);
if old_size > MAX_REFCOUNT {
std::process::abort();
}
Self::from_inner(self.ptr)
}
}
impl<T: RootObj<A> + PSafe, A: MemPool> RootObj<A> for Parc<T, A> {
#[inline]
default fn init(journal: &Journal<A>) -> Parc<T, A> {
Parc::new(T::init(journal), journal)
}
}
trait RcEqIdent<T: PartialEq + PSafe + ?Sized, A: MemPool> {
fn eq(&self, other: &Parc<T, A>) -> bool;
fn ne(&self, other: &Parc<T, A>) -> bool;
}
impl<T: PartialEq + PSafe + ?Sized, A: MemPool> RcEqIdent<T, A> for Parc<T, A> {
#[inline]
fn eq(&self, other: &Parc<T, A>) -> bool {
**self == **other
}
#[inline]
fn ne(&self, other: &Parc<T, A>) -> bool {
**self != **other
}
}
impl<T: PartialEq + PSafe + ?Sized, A: MemPool> PartialEq for Parc<T, A> {
#[inline]
fn eq(&self, other: &Parc<T, A>) -> bool {
RcEqIdent::eq(self, other)
}
}
impl<T: Eq + PSafe + ?Sized, A: MemPool> Eq for Parc<T, A> {}
impl<T: PartialOrd + PSafe + ?Sized, A: MemPool> PartialOrd for Parc<T, A> {
#[inline(always)]
fn partial_cmp(&self, other: &Parc<T, A>) -> Option<Ordering> {
(**self).partial_cmp(&**other)
}
#[inline(always)]
fn lt(&self, other: &Parc<T, A>) -> bool {
**self < **other
}
#[inline(always)]
fn le(&self, other: &Parc<T, A>) -> bool {
**self <= **other
}
#[inline(always)]
fn gt(&self, other: &Parc<T, A>) -> bool {
**self > **other
}
#[inline(always)]
fn ge(&self, other: &Parc<T, A>) -> bool {
**self >= **other
}
}
impl<T: Ord + PSafe + ?Sized, A: MemPool> Ord for Parc<T, A> {
#[inline]
fn cmp(&self, other: &Parc<T, A>) -> Ordering {
(**self).cmp(&**other)
}
}
impl<T: Hash + PSafe, A: MemPool> Hash for Parc<T, A> {
fn hash<H: Hasher>(&self, state: &mut H) {
(**self).hash(state);
}
}
impl<T: fmt::Display + PSafe, A: MemPool> fmt::Display for Parc<T, A> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
fmt::Display::fmt(&**self, f)
}
}
impl<T: fmt::Debug + PSafe, A: MemPool> fmt::Debug for Parc<T, A> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
self.deref().fmt(f)
}
}
impl<T: PSafe + ?Sized, A: MemPool> fmt::Pointer for Parc<T, A> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
fmt::Pointer::fmt(&(&**self as *const T), f)
}
}
pub struct Weak<T: PSafe + ?Sized, A: MemPool> {
ptr: Ptr<ParcInner<T, A>, A>,
}
impl<T: ?Sized, A: MemPool> !TxOutSafe for Weak<T, A> {}
impl<T: ?Sized, A: MemPool> !Sync for Weak<T, A> {}
impl<T: ?Sized, A: MemPool> !Send for Weak<T, A> {}
impl<T: ?Sized, A: MemPool> !VSafe for Weak<T, A> {}
impl<T: PSafe, A: MemPool> Weak<T, A> {
pub fn as_raw(&self) -> *const T {
match self.inner() {
None => std::ptr::null(),
Some(inner) => {
let offset = data_offset_sized::<T, A>();
let ptr = inner as *const ParcInner<T, A>;
let ptr = unsafe { (ptr as *const u8).offset(offset) };
ptr as *const T
}
}
}
pub fn into_raw(self) -> *const T {
let result = self.as_raw();
mem::forget(self);
result
}
#[allow(clippy::missing_safety_doc)]
pub unsafe fn from_raw(ptr: *const T) -> Self {
if ptr.is_null() {
Self::new()
} else {
let offset = data_offset::<T, A>(ptr);
let fake_ptr = ptr as *mut ParcInner<T, A>;
let ptr = set_data_ptr(fake_ptr, (ptr as *mut u8).offset(-offset));
Weak {
ptr: Ptr::from_raw(ptr),
}
}
}
}
impl<T: PSafe + ?Sized, A: MemPool> Weak<T, A> {
pub fn new() -> Weak<T, A> {
Weak {
ptr: Ptr::dangling(),
}
}
fn is_dangling(&self) -> bool {
self.ptr.is_dangling()
}
pub fn upgrade(&self, j: &Journal<A>) -> Option<Parc<T, A>> {
let inner = self.inner()?;
let _lock = SpinLock::acquire(inner.counter.lock.as_mut());
let n = inner.counter.strong;
if n == 0 {
return None;
}
if n > MAX_REFCOUNT {
std::process::abort();
}
lock_free_fetch_inc(&mut inner.counter.strong, j);
Some(Parc::from_inner(self.ptr))
}
pub fn strong_count(&self) -> usize {
if let Some(inner) = self.inner() {
load(inner.counter.lock.as_mut(), &inner.counter.strong)
} else {
0
}
}
pub fn weak_count(&self) -> usize {
self.inner()
.map(|inner| {
let weak = load(inner.counter.lock.as_mut(), &inner.counter.weak);
let strong = load(inner.counter.lock.as_mut(), &inner.counter.strong);
if strong == 0 {
0
} else {
weak - 1
}
})
.unwrap_or(0)
}
#[inline]
fn inner(&self) -> Option<&mut ParcInner<T, A>> {
if self.ptr.is_dangling() {
None
} else {
Some(self.ptr.get_mut())
}
}
#[inline]
pub fn ptr_eq(&self, other: &Self) -> bool {
self.ptr == other.ptr
}
}
impl<T: PSafe + ?Sized, A: MemPool> Drop for Weak<T, A> {
fn drop(&mut self) {
if let Some(_inner) = self.inner() {
let j = unsafe { &*Journal::<A>::current(true).unwrap().0 };
let inner = if let Some(inner) = self.inner() {
inner
} else {
return;
};
if fetch_dec(inner.counter.lock.as_mut(),
&mut inner.counter.weak, j) == 1
{
unsafe {
A::free(self.ptr.as_mut());
}
}
}
}
}
impl<T: PSafe + ?Sized, A: MemPool> PClone<A> for Weak<T, A> {
#[inline]
fn pclone(&self, j: &Journal<A>) -> Weak<T, A> {
let inner = if let Some(inner) = self.inner() {
inner
} else {
return Weak { ptr: self.ptr };
};
let old_size = fetch_inc(inner.counter.lock.as_mut(),
&mut inner.counter.weak, j);
if old_size > MAX_REFCOUNT {
std::process::abort();
}
Weak { ptr: self.ptr }
}
}
impl<T: PSafe + ?Sized + fmt::Debug, A: MemPool> fmt::Debug for Weak<T, A> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "(Weak)")
}
}
impl<T: PSafe + ?Sized, A: MemPool> Default for Weak<T, A> {
fn default() -> Self {
Weak::new()
}
}
trait ParcBoxPtr<T: PSafe + ?Sized, A: MemPool> {
fn count(&self) -> &Counter<A>;
}
#[inline]
fn load(lock: *mut u8, cnt: &usize) -> usize {
let _lock = SpinLock::acquire(lock);
*cnt
}
#[inline]
fn lock_free_fetch_inc<A: MemPool>(cnt: &mut usize, journal: &Journal<A>) -> usize {
unsafe {
let mut log = if cfg!(not(feature = "no_log_rc")) {
if A::valid(cnt) {
Log::recount_on_failure(u64::MAX, false, journal)
} else {
Ptr::dangling()
}
} else {
Ptr::dangling()
};
let res = *cnt;
if log.is_dangling() {
*cnt += 1;
} else {
let off = A::off_unchecked(cnt);
let z = A::zone(off);
A::prepare(z);
A::log64(off, res as u64 + 1, z);
log.set(off, 1, z);
A::perform(z);
}
res
}
}
#[inline]
fn fetch_inc<A: MemPool>(lock: *mut u8, cnt: &mut usize, journal: &Journal<A>) -> usize {
unsafe {
let _lock = SpinLock::acquire(lock);
let mut log = if cfg!(not(feature = "no_log_rc")) {
if A::valid(cnt) {
Log::recount_on_failure(u64::MAX, false, journal)
} else {
Ptr::dangling()
}
} else {
Ptr::dangling()
};
let res = *cnt;
if log.is_dangling() {
*cnt += 1;
} else {
let off = A::off_unchecked(cnt);
let z = A::zone(off);
A::prepare(z);
A::log64(off, res as u64 + 1, z);
log.set(off, 1, z);
A::perform(z);
}
res
}
}
#[inline]
fn fetch_dec<A: MemPool>(lock: *mut u8, cnt: &mut usize, journal: &Journal<A>) -> usize {
unsafe {
let _lock = SpinLock::acquire(lock);
let mut log = if cfg!(not(feature = "no_log_rc")) {
if A::valid(cnt) {
Log::recount_on_failure(u64::MAX, true, journal)
} else {
Ptr::dangling()
}
} else {
Ptr::dangling()
};
let res = *cnt;
if log.is_dangling() {
*cnt -= 1;
} else {
let off = A::off_unchecked(cnt);
let z = A::zone(off);
A::prepare(z);
A::log64(off, res as u64 - 1, z);
log.set(off, 1, z);
A::perform(z);
}
res
}
}
impl<T: PSafe + ?Sized, A: MemPool> ParcBoxPtr<T, A> for Parc<T, A> {
#[inline(always)]
fn count(&self) -> &Counter<A> {
&self.ptr.counter
}
}
impl<T: PSafe + ?Sized, A: MemPool> ParcBoxPtr<T, A> for ParcInner<T, A> {
#[inline(always)]
fn count(&self) -> &Counter<A> {
&self.counter
}
}
impl<T: PSafe + ?Sized, A: MemPool> borrow::Borrow<T> for Parc<T, A> {
fn borrow(&self) -> &T {
&self.inner().value
}
}
impl<T: PSafe + ?Sized, A: MemPool> AsRef<T> for Parc<T, A> {
fn as_ref(&self) -> &T {
&self.inner().value
}
}
impl<T: PSafe + ?Sized, A: MemPool> Unpin for Parc<T, A> {}
unsafe fn data_offset<T, A: MemPool>(ptr: *const T) -> isize {
data_offset_align::<A>(mem::align_of_val(&*ptr))
}
fn data_offset_sized<T, A: MemPool>() -> isize {
data_offset_align::<A>(mem::align_of::<T>())
}
#[inline]
fn data_offset_align<A: MemPool>(align: usize) -> isize {
let layout = std::alloc::Layout::new::<ParcInner<(), A>>();
(layout.size() + layout.padding_needed_for(align)) as isize
}
pub struct VWeak<T: ?Sized, A: MemPool> {
ptr: *mut ParcInner<T, A>,
valid: *mut VWeakValid,
gen: u32,
}
impl<T: ?Sized, A: MemPool> UnwindSafe for VWeak<T, A> {}
impl<T: ?Sized, A: MemPool> RefUnwindSafe for VWeak<T, A> {}
unsafe impl<T: PSend + ?Sized, A: MemPool> Send for VWeak<T, A> {}
unsafe impl<T: PSend + ?Sized, A: MemPool> Sync for VWeak<T, A> {}
unsafe impl<T: ?Sized, A: MemPool> TxInSafe for VWeak<T, A> {}
unsafe impl<T: ?Sized, A: MemPool> TxOutSafe for VWeak<T, A> {}
unsafe impl<T: ?Sized, A: MemPool> PSafe for VWeak<T, A> {}
impl<T: PSafe + ?Sized, A: MemPool> VWeak<T, A> {
fn new(parc: &Parc<T, A>) -> VWeak<T, A> {
let list = parc.ptr.vlist.as_mut();
VWeak {
ptr: parc.ptr.get_mut_ptr(),
valid: list.append(),
gen: A::gen(),
}
}
pub fn promote(&self, j: &Journal<A>) -> Option<Parc<T, A>> {
let inner = self.inner()?;
let _lock = SpinLock::acquire(inner.counter.lock.as_mut());
let n = inner.counter.strong;
if n == 0 {
return None;
}
if n > MAX_REFCOUNT {
std::process::abort();
}
lock_free_fetch_inc(&mut inner.counter.strong, j);
Some(Parc::from_inner(unsafe { Ptr::from_raw(self.ptr) }))
}
#[inline]
fn inner(&self) -> Option<&mut ParcInner<T, A>> {
unsafe {
if !(*self.valid).valid.load(Acquire) || self.gen != A::gen() {
None
} else {
Some(&mut *self.ptr)
}
}
}
}
impl<T: PSafe + ?Sized, A: MemPool> Clone for VWeak<T, A> {
fn clone(&self) -> Self {
if self.gen == A::gen() {
unsafe {
if (*self.valid).valid.load(Acquire) {
let list = (*self.ptr).vlist.as_mut();
return VWeak {
ptr: self.ptr,
valid: list.append(),
gen: self.gen,
};
}
}
}
VWeak {
ptr: self.ptr,
valid: self.valid,
gen: self.gen,
}
}
}
impl<T: ?Sized, A: MemPool> Drop for VWeak<T, A> {
fn drop(&mut self) {
unsafe {
let this = &mut *self.valid;
if !this.list.is_null() {
let mut head = match (*this.list).head.lock() {
Ok(g) => g,
Err(p) => p.into_inner(),
};
if this.prev.is_null() {
*head = this.next;
} else {
(*this.prev).next = this.next;
}
if !this.next.is_null() {
(*this.next).prev = this.prev;
}
}
}
}
}
struct VWeakValid {
valid: AtomicBool,
next: *mut VWeakValid,
prev: *mut VWeakValid,
list: *mut VWeakList,
}
use std::sync::Mutex as StdMutex;
struct VWeakList {
head: StdMutex<*mut VWeakValid>,
}
impl VWeakList {
fn append(&mut self) -> *mut VWeakValid {
let list = self as *mut Self;
let mut head = match self.head.lock() {
Ok(g) => g,
Err(p) => p.into_inner(),
};
let new = Box::into_raw(Box::new(VWeakValid {
valid: AtomicBool::new(true),
next: *head,
prev: std::ptr::null_mut(),
list,
}));
if !(*head).is_null() {
unsafe {
(**head).prev = new;
}
}
*head = new;
new
}
}
impl Default for VWeakList {
fn default() -> Self {
VWeakList {
head: StdMutex::new(std::ptr::null_mut()),
}
}
}
impl Drop for VWeakList {
fn drop(&mut self) {
let head = match self.head.lock() {
Ok(g) => g,
Err(p) => p.into_inner(),
};
unsafe {
let mut curr = *head;
while !curr.is_null() {
(*curr).valid.store(false, Release);
(*curr).list = std::ptr::null_mut();
curr = (*curr).next;
}
}
}
}