use alloc::rc::Rc;
use core::{mem, ptr};
use core::ptr::{drop_in_place, NonNull};
#[cfg(feature = "nightly")]
use core::{
marker::Unsize,
ops::CoerceUnsized,
};
use core::fmt::{self, Debug, Formatter};
use core::mem::MaybeUninit;
use core::marker::PhantomData;
use crate::cc::{BoxedMetadata, CcBox};
use crate::state::try_state;
use crate::{Cc, Context, Finalize, Trace};
use crate::utils::{cc_dealloc, dealloc_other};
use crate::weak::weak_counter_marker::WeakCounterMarker;
pub(crate) mod weak_counter_marker;
pub struct Weak<T: ?Sized + Trace + 'static> {
metadata: Option<NonNull<BoxedMetadata>>, cc: NonNull<CcBox<T>>,
_phantom: PhantomData<Rc<T>>, }
#[cfg(feature = "nightly")]
impl<T, U> CoerceUnsized<Weak<U>> for Weak<T>
where
T: ?Sized + Trace + Unsize<U> + 'static,
U: ?Sized + Trace + 'static,
{
}
impl<T: Trace> Weak<T> {
#[inline]
pub fn new() -> Self {
Weak {
metadata: None,
cc: NonNull::dangling(),
_phantom: PhantomData,
}
}
}
impl<T: ?Sized + Trace> Weak<T> {
#[inline]
#[must_use = "newly created Cc is immediately dropped"]
#[track_caller]
pub fn upgrade(&self) -> Option<Cc<T>> {
#[cfg(debug_assertions)]
if crate::state::state(|state| state.is_tracing()) {
panic!("Cannot upgrade while tracing!");
}
if self.strong_count() == 0 {
None
} else {
if unsafe { self.cc.as_ref() }.counter_marker().increment_counter().is_err() {
panic!("Too many references has been created to a single Cc");
}
let upgraded = Cc::__new_internal(self.cc);
upgraded.mark_alive();
Some(upgraded)
}
}
#[inline]
pub fn ptr_eq(this: &Weak<T>, other: &Weak<T>) -> bool {
match (this.metadata, other.metadata) {
(None, None) => true,
(None, Some(_)) => false,
(Some(_), None) => false,
(Some(m1), Some(m2)) => ptr::eq(m1.as_ptr() as *const (), m2.as_ptr() as *const ()),
}
}
#[inline]
pub fn strong_count(&self) -> u32 {
if self.weak_counter_marker().map_or(false, |wcm| wcm.is_accessible()) {
let counter_marker = unsafe { self.cc.as_ref() }.counter_marker();
let counter = counter_marker.counter();
if counter == 0 || counter_marker.is_dropped() || (
counter_marker.is_in_list_or_queue() && try_state(|state| state.is_dropping()).unwrap_or(true)
) {
0
} else {
counter as u32
}
} else {
0
}
}
#[inline]
pub fn weak_count(&self) -> u32 {
self.weak_counter_marker().map_or(0, |wcm| wcm.counter() as u32)
}
#[inline]
fn weak_counter_marker(&self) -> Option<&WeakCounterMarker> {
Some(unsafe { &self.metadata?.as_ref().weak_counter_marker })
}
}
impl<T: ?Sized + Trace> Clone for Weak<T> {
#[inline]
#[track_caller]
fn clone(&self) -> Self {
#[cfg(debug_assertions)]
if crate::state::state(|state| state.is_tracing()) {
panic!("Cannot clone while tracing!");
}
if let Some(wcm) = self.weak_counter_marker() {
if wcm.increment_counter().is_err() {
panic!("Too many references has been created to a single Weak");
}
}
Weak {
metadata: self.metadata,
cc: self.cc,
_phantom: PhantomData,
}
}
}
impl<T: ?Sized + Trace> Drop for Weak<T> {
#[inline]
fn drop(&mut self) {
let Some(metadata) = self.metadata else { return; };
unsafe {
let res = metadata.as_ref().weak_counter_marker.decrement_counter();
debug_assert!(res.is_ok());
if metadata.as_ref().weak_counter_marker.counter() == 0 && !metadata.as_ref().weak_counter_marker.is_accessible() {
dealloc_other(metadata);
}
}
}
}
unsafe impl<T: ?Sized + Trace> Trace for Weak<T> {
#[inline(always)]
fn trace(&self, _: &mut Context<'_>) {
}
}
impl<T: ?Sized + Trace> Finalize for Weak<T> {
}
impl<T: Trace> Cc<T> {
#[cfg_attr(
feature = "derive",
doc = r"```rust"
)]
#[cfg_attr(
not(feature = "derive"),
doc = r"```rust,ignore"
)]
#[doc = r"# use rust_cc::*;
# use rust_cc::*;
# use rust_cc::weak::*;
# use rust_cc_derive::*;
#[derive(Trace, Finalize)]
struct Cyclic {
cyclic: Weak<Self>,
}
let cyclic = Cc::new_cyclic(|weak| {
Cyclic {
cyclic: weak.clone(),
}
});
```"]
#[must_use = "newly created Cc is immediately dropped"]
#[track_caller]
pub fn new_cyclic<F>(f: F) -> Cc<T>
where
F: FnOnce(&Weak<T>) -> T,
{
#[cfg(debug_assertions)]
if crate::state::state(|state| state.is_tracing()) {
panic!("Cannot create a new Cc while tracing!");
}
let cc = Cc::new(NewCyclicWrapper::new());
let invalid_cc: NonNull<CcBox<_>> = cc.inner_ptr();
mem::forget(cc);
let metadata: NonNull<BoxedMetadata> = unsafe { invalid_cc.as_ref() }.get_or_init_metadata();
let _ = unsafe { metadata.as_ref() }.weak_counter_marker.increment_counter();
{
let counter_marker = unsafe { invalid_cc.as_ref() }.counter_marker();
debug_assert_eq!(1, counter_marker.counter());
let _ = counter_marker.decrement_counter();
}
let weak: Weak<T> = Weak {
metadata: Some(metadata),
cc: invalid_cc.cast(), _phantom: PhantomData,
};
struct PanicGuard<T: Trace + 'static> {
invalid_cc: NonNull<CcBox<NewCyclicWrapper<T>>>,
}
impl<T: Trace> Drop for PanicGuard<T> {
fn drop(&mut self) {
unsafe {
let layout = self.invalid_cc.as_ref().layout();
self.invalid_cc.as_ref().drop_metadata();
let _ = try_state(|state| {
cc_dealloc(self.invalid_cc, layout, state);
});
}
}
}
let panic_guard = PanicGuard { invalid_cc };
let to_write = f(&weak);
mem::forget(panic_guard);
unsafe {
(*invalid_cc.as_ref().get_elem_mut()).inner.write(to_write);
}
let _ = unsafe { invalid_cc.as_ref() }.counter_marker().increment_counter();
let cc: Cc<T> = Cc::__new_internal(invalid_cc.cast());
debug_assert_eq!(1, cc.inner().counter_marker().counter());
cc
}
}
impl<T: ?Sized + Trace> Cc<T> {
#[inline]
#[must_use = "newly created Weak is immediately dropped"]
#[track_caller]
pub fn downgrade(&self) -> Weak<T> {
#[cfg(debug_assertions)]
if crate::state::state(|state| state.is_tracing()) {
panic!("Cannot downgrade while tracing!");
}
let metadata = self.inner().get_or_init_metadata();
if unsafe { metadata.as_ref() }.weak_counter_marker.increment_counter().is_err() {
panic!("Too many references has been created to a single Weak");
}
self.mark_alive();
Weak {
metadata: Some(metadata),
cc: self.inner_ptr(),
_phantom: PhantomData,
}
}
#[inline]
pub fn weak_count(&self) -> u32 {
if self.inner().counter_marker().has_allocated_for_metadata() {
unsafe { self.inner().get_metadata_unchecked().as_ref() }.weak_counter_marker.counter() as u32
} else {
0
}
}
}
#[repr(transparent)]
struct NewCyclicWrapper<T: Trace + 'static> {
inner: MaybeUninit<T>,
}
impl<T: Trace> NewCyclicWrapper<T> {
#[inline(always)]
fn new() -> NewCyclicWrapper<T> {
NewCyclicWrapper {
inner: MaybeUninit::uninit(),
}
}
}
unsafe impl<T: Trace> Trace for NewCyclicWrapper<T> {
#[inline]
fn trace(&self, ctx: &mut Context<'_>) {
unsafe {
self.inner.assume_init_ref().trace(ctx);
}
}
}
impl<T: Trace> Finalize for NewCyclicWrapper<T> {
#[inline]
fn finalize(&self) {
unsafe {
self.inner.assume_init_ref().finalize();
}
}
}
impl<T: Trace> Drop for NewCyclicWrapper<T> {
#[inline]
fn drop(&mut self) {
unsafe {
let ptr = self.inner.assume_init_mut() as *mut T;
drop_in_place(ptr);
}
}
}
impl<T: Trace> Default for Weak<T> {
#[inline]
fn default() -> Self {
Weak::new()
}
}
impl<T: ?Sized + Trace + Debug> Debug for Weak<T> {
#[inline]
fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
write!(f, "(Weak)")
}
}