1use std::{
2 cmp::Ordering,
3 fmt::{self, Debug, Formatter},
4 hash::{Hash, Hasher},
5 ops::Deref,
6 sync::{Arc as StdArc, Weak},
7};
8
9use consume_on_drop::ConsumeOnDrop;
10use derive_where::derive_where;
11use parking_lot::{RwLock, RwLockUpgradableReadGuard};
12
13use self::inner::{ArcInner, ArcPoolInner};
14
15pub use self::inner::ArcIndex;
16
17mod inner;
18
19pub struct ArcPool<T>(RwLock<StdArc<ArcPoolInner<T>>>);
21
22unsafe impl<T: Send + Sync> Send for ArcPool<T> {}
23unsafe impl<T: Send + Sync> Sync for ArcPool<T> {}
24
25impl<T> Default for ArcPool<T> {
26 fn default() -> Self {
27 Self::new()
28 }
29}
30
31impl<T> ArcPool<T> {
32 pub fn new() -> Self {
33 Self::with_capacity(0)
34 }
35
36 pub fn is_empty(&self) -> bool {
37 self.0.read().is_empty()
38 }
39
40 pub fn with_capacity(cap: usize) -> Self {
41 Self(RwLock::new(StdArc::new(ArcPoolInner::with_capacity(
42 cap,
43 0,
44 Weak::new(),
45 ))))
46 }
47
48 pub fn alloc(&self, mut value: T) -> Arc<T> {
49 let read_guard = self.0.read();
50 value = match read_guard.try_alloc(value) {
51 Ok(arc_inner) => return Arc(ConsumeOnDrop::new(arc_inner)),
52 Err(value) => value,
53 };
54 drop(read_guard);
55 let read_guard = self.0.upgradable_read();
56 let inner = StdArc::new(ArcPoolInner::with_capacity(
57 (read_guard.capacity() * 2).max(1),
58 read_guard.offset() + read_guard.capacity(),
59 StdArc::downgrade(&read_guard),
60 ));
61 read_guard.set_next(inner.clone());
62 let arc_inner = inner.try_alloc(value).unwrap_or_else(|_| unreachable!());
63 *RwLockUpgradableReadGuard::upgrade(read_guard) = inner;
64 Arc(ConsumeOnDrop::new(arc_inner))
65 }
66}
67
68#[derive_where(Clone)]
71pub struct Arc<T>(ConsumeOnDrop<ArcInner<T>>);
72
73impl<T> Arc<T> {
74 pub fn get_mut(this: &mut Self) -> Option<&mut T> {
75 this.0.get_mut()
76 }
77
78 pub fn into_inner(Self(inner): Self) -> Option<T> {
79 ConsumeOnDrop::into_inner(inner).into_inner()
80 }
81
82 pub fn try_unwrap(Self(inner): Self) -> Result<T, Self> {
83 match ConsumeOnDrop::into_inner(inner).try_unwrap() {
84 Ok(value) => Ok(value),
85 Err(inner) => Err(Self(ConsumeOnDrop::new(inner))),
86 }
87 }
88
89 pub fn into_index(this: Self) -> ArcIndex<T> {
90 ArcInner::into_index(ConsumeOnDrop::into_inner(this.0))
91 }
92
93 pub fn ptr_eq(this: &Self, other: &Self) -> bool {
94 ArcInner::ptr_eq(&this.0, &other.0)
95 }
96
97 pub unsafe fn from_index(pool: &ArcPool<T>, index: ArcIndex<T>) -> Self {
100 Self(ConsumeOnDrop::new(ArcInner::from_index(
101 pool.0.read().clone(),
102 index,
103 )))
104 }
105
106 pub unsafe fn clone_from_index(pool: &ArcPool<T>, index: &ArcIndex<T>) -> Self {
109 Self(ConsumeOnDrop::new(ArcInner::clone_from_index(
110 pool.0.read().clone(),
111 index,
112 )))
113 }
114
115 pub fn ref_count(this: &Self) -> usize {
116 this.0.ref_count()
117 }
118
119 pub fn is_unique(this: &Self) -> bool {
123 this.0.is_unique()
124 }
125}
126
127impl<T> Deref for Arc<T> {
128 type Target = T;
129
130 fn deref(&self) -> &Self::Target {
131 &self.0
132 }
133}
134
135impl<T: Debug> Debug for Arc<T> {
136 fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
137 (**self).fmt(f)
138 }
139}
140
141impl<T: Hash> Hash for Arc<T> {
142 fn hash<H: Hasher>(&self, state: &mut H) {
143 (**self).hash(state)
144 }
145}
146
147impl<T: PartialEq> PartialEq for Arc<T> {
148 fn eq(&self, other: &Self) -> bool {
149 **self == **other
150 }
151}
152
153impl<T: Eq> Eq for Arc<T> {}
154
155impl<T: PartialOrd> PartialOrd for Arc<T> {
156 fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
157 (**self).partial_cmp(&**other)
158 }
159}
160
161impl<T: Ord> Ord for Arc<T> {
162 fn cmp(&self, other: &Self) -> Ordering {
163 (**self).cmp(&**other)
164 }
165}