unique_pointer/
refcounter.rs1use std::alloc::Layout;
2use std::cmp::{Eq, Ord, Ordering, PartialEq, PartialOrd};
3use std::convert::{AsMut, AsRef};
4use std::hash::{Hash, Hasher};
5use std::marker::PhantomData;
6use std::ops::{AddAssign, Deref, DerefMut, SubAssign};
7pub struct RefCounter {
23 data: *mut usize,
24}
25
26impl RefCounter {
27 pub fn null() -> RefCounter {
30 RefCounter {
31 data: std::ptr::null_mut::<usize>(),
32 }
33 }
34
35 pub fn new() -> RefCounter {
37 let mut ref_counter = RefCounter::null();
38 ref_counter.incr();
39 ref_counter
40 }
41
42 pub fn reset(&self) {
45 let mut up = unsafe { self.meta_mut() };
46 up.write(1);
47 }
48
49 pub fn incr(&self) {
51 let mut up = unsafe { self.meta_mut() };
52 up.incr_by(1);
53 }
54
55 pub fn incr_by(&self, by: usize) {
57 let mut up = unsafe { self.meta_mut() };
58 up.write(up.read() + by);
59 }
60
61 pub fn decr(&self) {
63 let mut up = unsafe { self.meta_mut() };
64 up.decr_by(1);
65 }
66
67 pub fn decr_by(&self, by: usize) {
69 let mut up = unsafe { self.meta_mut() };
70 let data = up.read();
71 if data >= by {
72 up.write(data - by);
73 }
74 }
75
76 pub fn drain(&mut self) {
80 if !self.data.is_null() {
81 unsafe {
82 self.data.drop_in_place();
83 self.alloc();
84 }
85 }
86 }
87
88 pub fn read(&self) -> usize {
89 if self.data.is_null() {
90 0
91 } else {
92 let mut ptr = self.cast_const();
93 unsafe { ptr.read() }
94 }
95 }
96
97 fn alloc(&self) {
98 if !self.data.is_null() {
99 return;
100 }
101
102 let layout = Layout::new::<usize>();
103 let ptr = unsafe {
104 let ptr = std::alloc::alloc(layout);
105 if ptr.is_null() {
106 std::alloc::handle_alloc_error(layout);
107 }
108 ptr as *mut usize
109 };
110 let mut up = unsafe { self.meta_mut() };
111 up.data = ptr;
112 up.write(1);
113 }
114
115 pub fn write(&mut self, data: usize) {
118 self.alloc();
119 let mut ptr = self.cast_mut();
120 unsafe {
121 ptr.write(data);
122 }
123 }
124
125 pub fn inner_ref<'c>(&self) -> &'c usize {
129 if self.data.is_null() {
130 &0
131 } else {
132 let ptr = self.cast_const();
133 unsafe { &*ptr }
134 }
135 }
136
137 pub fn inner_mut<'c>(&mut self) -> &'c mut usize {
141 if self.data.is_null() {
142 self.write(0);
143 }
144 let mut ptr = self.cast_mut();
145 unsafe { &mut *ptr }
146 }
147}
148impl RefCounter {
149 fn cast_mut(&self) -> *mut usize {
151 self.data
152 }
153
154 fn cast_const(&self) -> *const usize {
155 self.data.cast_const()
156 }
157}
158impl From<usize> for RefCounter {
159 fn from(refs: usize) -> RefCounter {
160 let mut ref_counter = RefCounter::new();
161 ref_counter.write(refs);
162 ref_counter
163 }
164}
165impl AsRef<usize> for RefCounter {
166 fn as_ref(&self) -> &usize {
167 self.inner_ref()
168 }
169}
170impl AsMut<usize> for RefCounter {
171 fn as_mut(&mut self) -> &mut usize {
172 if self.data.is_null() {
173 self.write(0);
174 }
175 let mut ptr = self.cast_mut();
176 unsafe { &mut *ptr }
177 }
178}
179impl Deref for RefCounter {
180 type Target = usize;
181
182 fn deref(&self) -> &usize {
183 self.inner_ref()
184 }
185}
186impl DerefMut for RefCounter {
187 fn deref_mut(&mut self) -> &mut usize {
188 self.inner_mut()
189 }
190}
191
192impl Drop for RefCounter {
193 fn drop(&mut self) {
194 self.drain()
195 }
196}
197
198impl Clone for RefCounter {
199 fn clone(&self) -> RefCounter {
200 let mut clone = RefCounter::new();
201 clone.data = self.data;
202 clone
203 }
204}
205
206impl std::fmt::Debug for RefCounter {
207 fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
208 write!(
209 f,
210 "{}",
211 [
212 format!("RefCounter@"),
213 format!("{:016x}", self.data.addr()),
214 format!("[data={}]", self.read()),
215 ]
216 .join("")
217 )
218 }
219}
220impl std::fmt::Display for RefCounter {
221 fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
222 write!(f, "{}", self.read())
223 }
224}
225
226#[allow(invalid_reference_casting)]
227impl<'c> RefCounter {
228 unsafe fn meta_mut(&'c self) -> &'c mut RefCounter {
231 unsafe {
232 let ptr = self.meta_mut_ptr();
233 let mut up = &mut *ptr;
234 std::mem::transmute::<&mut RefCounter, &'c mut RefCounter>(up)
235 }
236 }
237
238 unsafe fn meta_mut_ptr(&self) -> *mut RefCounter {
240 let ptr = self as *const RefCounter;
241 unsafe {
242 let ptr: *mut RefCounter =
243 std::mem::transmute::<*const RefCounter, *mut RefCounter>(ptr);
244 ptr
245 }
246 }
247}
248
249impl AddAssign<usize> for RefCounter {
250 fn add_assign(&mut self, other: usize) {
251 self.incr_by(other)
252 }
253}
254
255impl SubAssign<usize> for RefCounter {
256 fn sub_assign(&mut self, other: usize) {
257 self.decr_by(other)
258 }
259}
260
261impl PartialOrd<usize> for RefCounter {
262 fn partial_cmp(&self, other: &usize) -> Option<Ordering> {
263 self.read().partial_cmp(other)
264 }
265}
266
267impl PartialEq<usize> for RefCounter {
268 fn eq(&self, other: &usize) -> bool {
269 self.read().eq(other)
270 }
271}
272
273impl PartialOrd for RefCounter {
274 fn partial_cmp(&self, other: &RefCounter) -> Option<Ordering> {
275 self.read().partial_cmp(other.inner_ref())
276 }
277}
278
279impl Ord for RefCounter {
280 fn cmp(&self, other: &RefCounter) -> Ordering {
281 self.read().cmp(other.inner_ref())
282 }
283}
284
285impl PartialEq for RefCounter {
286 fn eq(&self, other: &RefCounter) -> bool {
287 self.read().eq(other.inner_ref())
288 }
289}
290
291impl Eq for RefCounter {}
292
293impl Hash for RefCounter {
294 fn hash<H: Hasher>(&self, state: &mut H) {
295 self.read().hash(state)
296 }
297}