1use std::{cmp::PartialEq, fmt, ops::Deref};
2
3use crate::{CellRef, RefOverflow};
4
5pub struct Ref<'a, V>
7where
8 V: 'a,
9{
10 pub(crate) inner: CellRef<'a, V>,
11}
12
13impl<'a, V> Ref<'a, V> {
14 pub fn new(inner: CellRef<'a, V>) -> Self {
16 Self { inner }
17 }
18
19 #[allow(clippy::doc_overindented_list_items)]
34 pub fn try_clone(&self) -> Result<Self, RefOverflow> {
35 self.inner.try_clone().map(Self::new)
36 }
37}
38
39impl<'a, V> Deref for Ref<'a, V> {
40 type Target = V;
41
42 fn deref(&self) -> &V {
43 &self.inner
44 }
45}
46
47impl<'a, V> fmt::Debug for Ref<'a, V>
48where
49 V: fmt::Debug + 'a,
50{
51 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
52 let inner: &V = self;
53 f.debug_struct("Ref").field("inner", inner).finish()
54 }
55}
56
57impl<'a, V> PartialEq for Ref<'a, V>
58where
59 V: PartialEq + 'a,
60{
61 fn eq(&self, other: &Self) -> bool {
62 let r_self: &V = self;
63 let r_other: &V = other;
64 r_self == r_other
65 }
66}
67
68impl<'a, V> Clone for Ref<'a, V> {
69 #[allow(clippy::doc_overindented_list_items)]
84 fn clone(&self) -> Self {
85 Ref {
86 inner: self.inner.clone(),
87 }
88 }
89}
90
91#[cfg(test)]
92mod tests {
93 use std::{
94 fmt::{self, Write},
95 sync::atomic::{AtomicUsize, Ordering},
96 };
97
98 use crate::{cell_ref::REF_LIMIT_MAX, CellRef, RefOverflow};
99
100 use super::Ref;
101
102 #[test]
103 fn debug_includes_inner_field() -> fmt::Result {
104 let flag = AtomicUsize::new(0);
105 let value = A(1);
106 let r#ref = Ref::new(CellRef {
107 flag: &flag,
108 value: &value,
109 });
110
111 let mut debug_string = String::with_capacity(64);
112 write!(&mut debug_string, "{:?}", r#ref)?;
113 assert_eq!("Ref { inner: A(1) }", debug_string.as_str());
114
115 Ok(())
116 }
117
118 #[test]
119 fn partial_eq_compares_value() -> fmt::Result {
120 let flag = AtomicUsize::new(0);
121 let value = A(1);
122 let r#ref = Ref::new(CellRef {
123 flag: &flag,
124 value: &value,
125 });
126
127 assert_eq!(
128 Ref::new(CellRef {
129 flag: &flag,
130 value: &value,
131 }),
132 r#ref
133 );
134 assert_ne!(
135 Ref::new(CellRef {
136 flag: &flag,
137 value: &A(2),
138 }),
139 r#ref
140 );
141
142 Ok(())
143 }
144
145 #[test]
146 fn try_clone_returns_ok_when_ref_count_less_than_usize_max() {
147 let flag = &AtomicUsize::new(1);
148 let value = &A(1);
149 let ref_0 = Ref::new(CellRef { flag, value });
150
151 assert_eq!(1, ref_0.inner.flag.load(Ordering::SeqCst));
152
153 let try_clone_result = ref_0.try_clone();
154
155 let ref_1 = try_clone_result.expect("try_clone_result to be ok");
156 assert_eq!(2, ref_0.inner.flag.load(Ordering::SeqCst));
157 assert_eq!(2, ref_1.inner.flag.load(Ordering::SeqCst));
158 }
159
160 #[test]
161 fn try_clone_returns_err_when_ref_count_equals_usize_max() {
162 let flag = &AtomicUsize::new(REF_LIMIT_MAX);
163 let value = &A(1);
164 let ref_0 = Ref::new(CellRef { flag, value });
165
166 assert_eq!(REF_LIMIT_MAX, ref_0.inner.flag.load(Ordering::SeqCst));
167
168 let try_clone_result = ref_0.try_clone();
169
170 let e = try_clone_result.expect_err("try_clone_result to be err");
171 assert_eq!(RefOverflow, e);
172
173 assert_eq!(REF_LIMIT_MAX, ref_0.inner.flag.load(Ordering::SeqCst));
175 }
176
177 #[test]
178 fn clone_increments_cell_ref_count() {
179 let flag = &AtomicUsize::new(1);
180 let value = &A(1);
181 let ref_0 = Ref::new(CellRef { flag, value });
182
183 assert_eq!(1, ref_0.inner.flag.load(Ordering::SeqCst));
184
185 let ref_1 = ref_0.clone();
186
187 assert_eq!(2, ref_0.inner.flag.load(Ordering::SeqCst));
188 assert_eq!(2, ref_1.inner.flag.load(Ordering::SeqCst));
189 }
190
191 #[test]
192 #[should_panic(expected = "Failed to clone `CellRef`: Ref count exceeded `isize::MAX`")]
193 fn clone_panics_when_ref_count_equals_usize_max() {
194 let flag = &AtomicUsize::new(REF_LIMIT_MAX);
195 let value = &A(1);
196 let ref_0 = Ref::new(CellRef { flag, value });
197
198 assert_eq!(REF_LIMIT_MAX, ref_0.inner.flag.load(Ordering::SeqCst));
199
200 let _cloned = ref_0.clone();
201 }
202
203 #[derive(Debug, Clone, PartialEq)]
204 struct A(usize);
205}