use std::panic::RefUnwindSafe;
use std::panic::UnwindSafe;
use crate::alloc::{MemPool, PmemUsage};
use crate::cell::VCell;
use crate::clone::*;
use crate::ptr::Ptr;
use crate::stm::*;
use crate::*;
use std::cmp::Ordering;
use std::hash::Hash;
use std::hash::Hasher;
use std::marker::PhantomData;
use std::mem::MaybeUninit;
use std::ops::Deref;
use std::*;
#[derive(Debug)]
struct Counter {
strong: usize,
weak: usize,
#[cfg(not(feature = "no_log_rc"))]
has_log: u8,
}
pub struct PrcBox<T: ?Sized, A: MemPool> {
counter: Counter,
#[cfg(not(feature = "no_volatile_pointers"))]
vlist: VCell<VWeakList, A>,
dummy: [A; 0],
value: T,
}
unsafe impl<T: ?Sized, A: MemPool> PSafe for PrcBox<T, A> {}
unsafe impl<T: ?Sized, A: MemPool> TxInSafe for PrcBox<T, A> {}
impl<T: ?Sized, A: MemPool> UnwindSafe for PrcBox<T, A> {}
impl<T: ?Sized, A: MemPool> RefUnwindSafe for PrcBox<T, A> {}
impl<T: ?Sized, A: MemPool> !VSafe for PrcBox<T, A> {}
unsafe fn set_data_ptr<T: ?Sized, U>(mut ptr: *mut T, data: *mut U) -> *mut T {
std::ptr::write(&mut ptr as *mut _ as *mut *mut u8, data as *mut u8);
ptr
}
pub struct Prc<T: PSafe + ?Sized, A: MemPool> {
ptr: Ptr<PrcBox<T, A>, A>,
phantom: PhantomData<T>,
}
impl<T: ?Sized, A: MemPool> !TxOutSafe for Prc<T, A> {}
impl<T: ?Sized, A: MemPool> !Send for Prc<T, A> {}
impl<T: ?Sized, A: MemPool> !Sync for Prc<T, A> {}
impl<T: ?Sized, A: MemPool> !VSafe for Prc<T, A> {}
impl<T: PSafe, A: MemPool> Prc<T, A> {
pub fn new(value: T, journal: &Journal<A>) -> Prc<T, A> {
unsafe {
let ptr = Ptr::new_unchecked(A::new(
PrcBox::<T, A> {
counter: Counter {
strong: 1,
weak: 1,
#[cfg(not(feature = "no_log_rc"))]
has_log: 0,
},
#[cfg(not(feature = "no_volatile_pointers"))]
vlist: VCell::new(VWeakList::default()),
dummy: [],
value,
},
journal,
));
Self::from_inner(ptr)
}
}
pub fn new_uninit(journal: &Journal<A>) -> Prc<MaybeUninit<T>, A> {
unsafe {
Prc::from_inner(Ptr::from_mut(A::new(
PrcBox {
counter: Counter {
strong: 1,
weak: 1,
#[cfg(not(feature = "no_log_rc"))]
has_log: 0,
},
#[cfg(not(feature = "no_volatile_pointers"))]
vlist: VCell::new(VWeakList::default()),
dummy: [],
value: MaybeUninit::<T>::uninit(),
},
journal,
)))
}
}
pub fn new_zeroed(journal: &Journal<A>) -> Prc<mem::MaybeUninit<T>, A> {
unsafe {
let mut uninit = Self::new_uninit(journal);
std::ptr::write_bytes::<T>(Prc::get_mut_unchecked(&mut uninit).as_mut_ptr(), 0, 1);
uninit
}
}
pub fn from(p: Prc<T, A>) -> Self {
let res = Self::from_inner(p.ptr);
mem::forget(p);
res
}
}
impl<T: PSafe + ?Sized, A: MemPool> Prc<T, A> {
#[inline]
fn from_inner(ptr: Ptr<PrcBox<T, A>, A>) -> Self {
Prc {
ptr,
phantom: PhantomData,
}
}
#[inline(always)]
fn inner(&self) -> &PrcBox<T, A> {
self.ptr.as_ref()
}
#[allow(clippy::missing_safety_doc)]
unsafe fn from_ptr(ptr: *mut PrcBox<T, A>, j: &Journal<A>) -> Self {
let off = A::off_unchecked(ptr);
let res = Self::from_inner(Ptr::from_off_unchecked(off));
res.inc_strong(j);
res
}
}
impl<T: PSafe, A: MemPool> Prc<mem::MaybeUninit<T>, A> {
#[inline]
pub unsafe fn assume_init(self) -> Prc<T, A> {
Prc::from_inner(mem::ManuallyDrop::new(self).ptr.cast())
}
}
impl<T: PSafe, A: MemPool> Prc<MaybeUninit<T>, A> {
#[inline]
pub fn get_mut(this: &mut Self) -> Option<&mut MaybeUninit<T>> {
if Prc::is_unique(this) {
unsafe { Some(Prc::get_mut_unchecked(this)) }
} else {
None
}
}
#[inline]
pub unsafe fn get_mut_unchecked(this: &mut Self) -> &mut MaybeUninit<T> {
&mut this.ptr.value
}
}
impl<T: PSafe + ?Sized, A: MemPool> Prc<T, A> {
pub fn downgrade(this: &Self, journal: &Journal<A>) -> Weak<T, A> {
this.inc_weak(journal);
debug_assert!(!this.ptr.is_dangling());
Weak { ptr: this.ptr }
}
pub fn volatile(this: &Self) -> VWeak<T, A> {
debug_assert!(!this.ptr.is_dangling());
VWeak::new(this)
}
#[inline]
pub fn weak_count(this: &Self) -> usize {
this.weak() - 1
}
#[inline]
pub fn strong_count(this: &Self) -> usize {
this.strong()
}
#[inline]
fn is_unique(this: &Self) -> bool {
Prc::weak_count(this) == 0 && Prc::strong_count(this) == 1
}
#[inline]
pub fn ptr_eq(this: &Self, other: &Self) -> bool {
this.ptr.off() == other.ptr.off()
}
}
impl<T: PSafe, A: MemPool> PmemUsage for Prc<T, A> {
default fn size_of() -> usize {
Ptr::<PrcBox<T, A>, A>::size_of()
}
}
impl<T: PSafe + PmemUsage + ?Sized, A: MemPool> PmemUsage for Prc<T, A> {
fn size_of() -> usize {
Ptr::<PrcBox<T, A>, A>::size_of() + T::size_of()
}
}
impl<T: PSafe + ?Sized, A: MemPool> Deref for Prc<T, A> {
type Target = T;
#[inline(always)]
fn deref(&self) -> &T {
&self.inner().value
}
}
unsafe impl<#[may_dangle] T: PSafe + ?Sized, A: MemPool> Drop for Prc<T, A> {
fn drop(&mut self) {
unsafe {
let journal = Journal::<A>::current(true).unwrap();
self.dec_strong(journal.0);
if self.strong() == 0 { std::ptr::drop_in_place(&mut self.ptr.as_mut().value);
self.dec_weak(journal.0);
if self.weak() == 0 {
A::free(self.ptr.as_mut());
#[cfg(not(feature = "no_volatile_pointers"))]
std::ptr::drop_in_place(&mut self.ptr.as_mut().vlist);
}
}
}
}
}
impl<T: PSafe + ?Sized, A: MemPool> PClone<A> for Prc<T, A> {
#[inline]
fn pclone(&self, journal: &Journal<A>) -> Prc<T, A> {
self.inc_strong(journal);
Self::from_inner(self.ptr)
}
}
impl<T: RootObj<A> + PSafe, A: MemPool> RootObj<A> for Prc<T, A> {
#[inline]
default fn init(journal: &Journal<A>) -> Prc<T, A> {
Prc::new(T::init(journal), journal)
}
}
impl<T: Default + PSafe + ?Sized, A: MemPool> RootObj<A> for Prc<T, A> {
#[inline]
default fn init(journal: &Journal<A>) -> Prc<T, A> {
Prc::new(T::default(), journal)
}
}
trait RcEqIdent<T: PartialEq + PSafe + ?Sized, A: MemPool> {
fn eq(&self, other: &Prc<T, A>) -> bool;
fn ne(&self, other: &Prc<T, A>) -> bool;
}
impl<T: PartialEq + PSafe + ?Sized, A: MemPool> RcEqIdent<T, A> for Prc<T, A> {
#[inline]
fn eq(&self, other: &Prc<T, A>) -> bool {
**self == **other
}
#[inline]
fn ne(&self, other: &Prc<T, A>) -> bool {
**self != **other
}
}
impl<T: PartialEq + PSafe + ?Sized, A: MemPool> PartialEq for Prc<T, A> {
#[inline]
fn eq(&self, other: &Prc<T, A>) -> bool {
RcEqIdent::eq(self, other)
}
}
impl<T: Eq + PSafe + ?Sized, A: MemPool> Eq for Prc<T, A> {}
impl<T: PartialOrd + PSafe + ?Sized, A: MemPool> PartialOrd for Prc<T, A> {
#[inline(always)]
fn partial_cmp(&self, other: &Prc<T, A>) -> Option<Ordering> {
(**self).partial_cmp(&**other)
}
#[inline(always)]
fn lt(&self, other: &Prc<T, A>) -> bool {
**self < **other
}
#[inline(always)]
fn le(&self, other: &Prc<T, A>) -> bool {
**self <= **other
}
#[inline(always)]
fn gt(&self, other: &Prc<T, A>) -> bool {
**self > **other
}
#[inline(always)]
fn ge(&self, other: &Prc<T, A>) -> bool {
**self >= **other
}
}
impl<T: Ord + PSafe + ?Sized, A: MemPool> Ord for Prc<T, A> {
#[inline]
fn cmp(&self, other: &Prc<T, A>) -> Ordering {
(**self).cmp(&**other)
}
}
impl<T: Hash + PSafe + ?Sized, A: MemPool> Hash for Prc<T, A> {
fn hash<H: Hasher>(&self, state: &mut H) {
(**self).hash(state);
}
}
impl<T: fmt::Display + PSafe + ?Sized, A: MemPool> fmt::Display for Prc<T, A> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
fmt::Display::fmt(&**self, f)
}
}
impl<T: fmt::Debug + PSafe + ?Sized, A: MemPool> fmt::Debug for Prc<T, A> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
self.deref().fmt(f)
}
}
impl<T: PSafe + ?Sized, A: MemPool> fmt::Pointer for Prc<T, A> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
fmt::Pointer::fmt(&(&**self as *const T), f)
}
}
pub struct Weak<T: PSafe + ?Sized, A: MemPool> {
ptr: Ptr<PrcBox<T, A>, A>,
}
impl<T: ?Sized, A: MemPool> !TxOutSafe for Weak<T, A> {}
impl<T: ?Sized, A: MemPool> !Send for Weak<T, A> {}
impl<T: ?Sized, A: MemPool> !Sync for Weak<T, A> {}
impl<T: ?Sized, A: MemPool> !VSafe for Weak<T, A> {}
impl<T: PSafe, A: MemPool> Weak<T, A> {
pub fn as_raw(&self) -> *const T {
match self.inner() {
None => std::ptr::null(),
Some(inner) => {
let offset = data_offset_sized::<T, A>();
let ptr = inner as *const PrcBox<T, A>;
let ptr = unsafe { (ptr as *const u8).offset(offset) };
ptr as *const T
}
}
}
pub fn into_raw(self) -> *const T {
let result = self.as_raw();
mem::forget(self);
result
}
#[allow(clippy::missing_safety_doc)]
pub unsafe fn from_raw(ptr: *const T) -> Self {
if ptr.is_null() {
Self::new()
} else {
let offset = data_offset::<T, A>(ptr);
let fake_ptr = ptr as *mut PrcBox<T, A>;
let ptr = set_data_ptr(fake_ptr, (ptr as *mut u8).offset(-offset));
Weak {
ptr: Ptr::from_raw(ptr),
}
}
}
}
impl<T: PSafe + ?Sized, A: MemPool> Weak<T, A> {
pub fn new() -> Weak<T, A> {
Weak {
ptr: Ptr::dangling(),
}
}
pub fn upgrade(&self, journal: &Journal<A>) -> Option<Prc<T, A>> {
let inner = self.inner()?;
if inner.strong() == 0 {
None
} else {
inner.inc_strong(journal);
Some(Prc::from_inner(self.ptr))
}
}
pub fn strong_count(&self) -> usize {
if let Some(inner) = self.inner() {
inner.strong()
} else {
0
}
}
pub fn weak_count(&self) -> Option<usize> {
self.inner().map(|inner| {
if inner.strong() > 0 {
inner.weak() - 1 } else {
inner.weak()
}
})
}
#[inline]
fn inner(&self) -> Option<&PrcBox<T, A>> {
if self.ptr.is_dangling() {
None
} else {
Some(self.ptr.get_mut())
}
}
#[inline]
pub fn ptr_eq(&self, other: &Self) -> bool {
self.ptr == other.ptr
}
}
impl<T: PSafe + ?Sized, A: MemPool> Drop for Weak<T, A> {
fn drop(&mut self) {
if let Some(inner) = self.inner() {
let journal = Journal::<A>::current(true).unwrap();
inner.dec_weak(journal.0);
if inner.weak() == 0 {
unsafe {
A::free(self.ptr.as_mut());
#[cfg(not(feature = "no_volatile_pointers"))]
std::ptr::drop_in_place(&mut self.ptr.as_mut().vlist);
}
}
}
}
}
impl<T: PSafe + ?Sized, A: MemPool> PClone<A> for Weak<T, A> {
#[inline]
fn pclone(&self, journal: &Journal<A>) -> Weak<T, A> {
if let Some(inner) = self.inner() {
inner.inc_weak(journal)
}
Weak { ptr: self.ptr }
}
}
impl<T: PSafe + fmt::Debug + ?Sized, A: MemPool> fmt::Debug for Weak<T, A> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "(Weak)")
}
}
impl<T: PSafe + ?Sized, A: MemPool> RootObj<A> for Weak<T, A> {
fn init(_: &Journal<A>) -> Weak<T, A> {
Weak::new()
}
}
trait PrcBoxPtr<T: PSafe + ?Sized, A: MemPool> {
#[allow(clippy::mut_from_ref)]
fn count(&self) -> &mut Counter;
#[inline]
fn strong(&self) -> usize {
self.count().strong
}
#[inline]
#[cfg(not(feature = "no_log_rc"))]
fn log_count(&self, journal: &Journal<A>) {
let inner = self.count();
if inner.has_log == 0 {
unsafe {
inner.take_log(journal, Notifier::NonAtomic(Ptr::from_ref(&inner.has_log)));
}
}
}
#[inline]
fn inc_strong(&self, _journal: &Journal<A>) {
let inner = self.count();
let strong = inner.strong;
if strong == 0 || strong == usize::max_value() {
std::process::abort();
}
#[cfg(not(feature = "no_log_rc"))]
self.log_count(_journal);
inner.strong += 1;
}
#[inline]
fn dec_strong(&self, _journal: &Journal<A>) {
#[cfg(not(feature = "no_log_rc"))]
self.log_count(_journal);
self.count().strong -= 1;
}
#[inline]
fn weak(&self) -> usize {
self.count().weak
}
#[inline]
fn inc_weak(&self, _journal: &Journal<A>) {
let weak = self.weak();
if weak == 0 || weak == usize::max_value() {
std::process::abort();
}
#[cfg(not(feature = "no_log_rc"))]
self.log_count(_journal);
self.count().weak += 1;
}
#[inline]
fn dec_weak(&self, _journal: &Journal<A>) {
#[cfg(not(feature = "no_log_rc"))]
self.log_count(_journal);
self.count().weak -= 1;
}
}
impl<T: PSafe + ?Sized, A: MemPool> PrcBoxPtr<T, A> for Prc<T, A> {
#[inline(always)]
fn count(&self) -> &mut Counter {
&mut self.ptr.get_mut().counter
}
}
impl<T: PSafe + ?Sized, A: MemPool> PrcBoxPtr<T, A> for PrcBox<T, A> {
#[inline(always)]
fn count(&self) -> &mut Counter {
unsafe {
let ptr: *const Self = self;
let ptr: *mut Self = ptr as *mut Self;
let rcbox: &mut Self = &mut *ptr;
&mut rcbox.counter
}
}
}
impl<T: PSafe + ?Sized, A: MemPool> borrow::Borrow<T> for Prc<T, A> {
fn borrow(&self) -> &T {
&self.inner().value
}
}
impl<T: PSafe + ?Sized, A: MemPool> AsRef<T> for Prc<T, A> {
fn as_ref(&self) -> &T {
&self.inner().value
}
}
impl<T: PSafe + ?Sized, A: MemPool> Unpin for Prc<T, A> {}
unsafe fn data_offset<T: ?Sized, A: MemPool>(ptr: *const T) -> isize {
data_offset_align::<A>(mem::align_of_val(&*ptr))
}
fn data_offset_sized<T, A: MemPool>() -> isize {
data_offset_align::<A>(mem::align_of::<T>())
}
#[inline]
fn data_offset_align<A: MemPool>(align: usize) -> isize {
let layout = std::alloc::Layout::new::<PrcBox<(), A>>();
(layout.size() + layout.padding_needed_for(align)) as isize
}
pub fn ws<T: PSafe, A: MemPool>(ptr: &Prc<T, A>) -> (usize, usize) {
let i = ptr.inner();
(i.strong(), i.weak())
}
pub struct VWeak<T: ?Sized, A: MemPool> {
ptr: *const PrcBox<T, A>,
valid: *mut VWeakValid,
gen: u32,
}
impl<T: ?Sized, A: MemPool> !Send for VWeak<T, A> {}
impl<T: ?Sized, A: MemPool> !Sync for VWeak<T, A> {}
impl<T: ?Sized, A: MemPool> UnwindSafe for VWeak<T, A> {}
impl<T: ?Sized, A: MemPool> RefUnwindSafe for VWeak<T, A> {}
unsafe impl<T: ?Sized, A: MemPool> TxInSafe for VWeak<T, A> {}
unsafe impl<T: ?Sized, A: MemPool> TxOutSafe for VWeak<T, A> {}
unsafe impl<T: ?Sized, A: MemPool> PSafe for VWeak<T, A> {}
impl<T: PSafe + ?Sized, A: MemPool> VWeak<T, A> {
fn new(prc: &Prc<T, A>) -> VWeak<T, A> {
let list = prc.ptr.vlist.as_mut();
VWeak {
ptr: prc.ptr.as_ref(),
valid: list.append(),
gen: A::gen(),
}
}
pub fn null() -> VWeak<T, A> where T: Sized {
VWeak {
ptr: std::ptr::null(),
valid: std::ptr::null_mut(),
gen: u32::MAX,
}
}
pub fn upgrade(&self, journal: &Journal<A>) -> Option<Prc<T, A>> {
let inner = self.inner()?;
let strong = inner.counter.strong;
if strong == 0 {
None
} else {
unsafe { Some(Prc::from_ptr(self.ptr as *const _ as *mut _, journal)) }
}
}
#[inline]
fn inner(&self) -> Option<&PrcBox<T, A>> {
unsafe {
if self.gen != A::gen() {
None
} else if !(*self.valid).valid {
None
} else {
Some(&*self.ptr)
}
}
}
}
impl<T: PSafe + ?Sized, A: MemPool> Clone for VWeak<T, A> {
fn clone(&self) -> Self {
if self.gen == A::gen() {
unsafe {
if (*self.valid).valid {
let list = (*self.ptr).vlist.as_mut();
return VWeak {
ptr: self.ptr,
valid: list.append(),
gen: self.gen,
};
}
}
}
VWeak {
ptr: self.ptr,
valid: self.valid,
gen: self.gen,
}
}
}
impl<T: ?Sized, A: MemPool> Drop for VWeak<T, A> {
fn drop(&mut self) {
unsafe {
let this = &mut *self.valid;
if self.gen == A::gen() {
if !this.list.is_null() {
let head = &mut (*this.list).head;
if this.prev.is_null() {
*head = this.next;
} else {
(*this.prev).next = this.next;
}
if !this.next.is_null() {
(*this.next).prev = this.prev;
}
}
}
}
}
}
struct VWeakValid {
valid: bool,
next: *mut VWeakValid,
prev: *mut VWeakValid,
list: *mut VWeakList,
}
struct VWeakList {
head: *mut VWeakValid,
}
impl VWeakList {
fn append(&mut self) -> *mut VWeakValid {
let new = Box::into_raw(Box::new(VWeakValid {
valid: true,
next: self.head,
prev: std::ptr::null_mut(),
list: self as *mut Self,
}));
if !self.head.is_null() {
unsafe {
(*self.head).prev = new;
}
}
self.head = new;
new
}
}
impl Default for VWeakList {
fn default() -> Self {
VWeakList {
head: std::ptr::null_mut(),
}
}
}
impl Drop for VWeakList {
fn drop(&mut self) {
unsafe {
let mut curr = self.head;
while !curr.is_null() {
(*curr).valid = false;
(*curr).list = std::ptr::null_mut();
curr = (*curr).next;
}
}
}
}