1#![allow(unsafe_code)]
2
3use std::mem::ManuallyDrop;
4use std::sync::Arc;
5
6pub struct SelfRef<T: 'static> {
16 value: ManuallyDrop<T>,
18
19 backing: ManuallyDrop<Backing>,
21}
22
23pub trait SharedBacking: Send + Sync + 'static {
25 fn as_bytes(&self) -> &[u8];
27}
28
29pub enum Backing {
31 Boxed(Box<[u8]>),
34 Shared(Arc<dyn SharedBacking>),
36}
37
38impl Backing {
39 pub fn shared(shared: Arc<dyn SharedBacking>) -> Self {
41 Self::Shared(shared)
42 }
43
44 pub fn as_bytes(&self) -> &[u8] {
46 match self {
47 Backing::Boxed(b) => b,
48 Backing::Shared(s) => s.as_bytes(),
49 }
50 }
51}
52
53impl<T: 'static> Drop for SelfRef<T> {
54 fn drop(&mut self) {
55 unsafe {
57 ManuallyDrop::drop(&mut self.value);
58 ManuallyDrop::drop(&mut self.backing);
59 }
60 }
61}
62
63impl<T: 'static> SelfRef<T> {
64 pub fn try_new<E>(
70 backing: Backing,
71 builder: impl FnOnce(&'static [u8]) -> Result<T, E>,
72 ) -> Result<Self, E> {
73 let bytes: &'static [u8] = unsafe {
78 let b = backing.as_bytes();
79 std::slice::from_raw_parts(b.as_ptr(), b.len())
80 };
81
82 let value = builder(bytes)?;
83
84 Ok(Self {
85 value: ManuallyDrop::new(value),
86 backing: ManuallyDrop::new(backing),
87 })
88 }
89
90 pub fn new(backing: Backing, builder: impl FnOnce(&'static [u8]) -> T) -> Self {
92 Self::try_new(backing, |bytes| {
93 Ok::<_, std::convert::Infallible>(builder(bytes))
94 })
95 .unwrap_or_else(|e: std::convert::Infallible| match e {})
96 }
97 pub fn owning(backing: Backing, value: T) -> Self {
103 Self {
104 value: ManuallyDrop::new(value),
105 backing: ManuallyDrop::new(backing),
106 }
107 }
108
109 pub fn try_repack<U: 'static, E>(
120 mut self,
121 f: impl FnOnce(T, &'static [u8]) -> Result<U, E>,
122 ) -> Result<SelfRef<U>, E> {
123 let value = unsafe { ManuallyDrop::take(&mut self.value) };
124 let backing = unsafe { ManuallyDrop::take(&mut self.backing) };
125 core::mem::forget(self);
126
127 let bytes: &'static [u8] = unsafe {
128 let b = backing.as_bytes();
129 std::slice::from_raw_parts(b.as_ptr(), b.len())
130 };
131
132 match f(value, bytes) {
133 Ok(u) => Ok(SelfRef {
134 value: ManuallyDrop::new(u),
135 backing: ManuallyDrop::new(backing),
136 }),
137 Err(e) => Err(e),
138 }
139 }
140
141 pub fn try_map<U: 'static, E>(
142 mut self,
143 f: impl FnOnce(T) -> Result<U, E>,
144 ) -> Result<SelfRef<U>, E> {
145 let value = unsafe { ManuallyDrop::take(&mut self.value) };
146 let backing = unsafe { ManuallyDrop::take(&mut self.backing) };
147 core::mem::forget(self);
148
149 match f(value) {
150 Ok(u) => Ok(SelfRef {
151 value: ManuallyDrop::new(u),
152 backing: ManuallyDrop::new(backing),
153 }),
154 Err(e) => Err(e),
155 }
156 }
157
158 pub fn map<U: 'static>(mut self, f: impl FnOnce(T) -> U) -> SelfRef<U> {
159 let value = unsafe { ManuallyDrop::take(&mut self.value) };
162 let backing = unsafe { ManuallyDrop::take(&mut self.backing) };
163 core::mem::forget(self);
164
165 SelfRef {
166 value: ManuallyDrop::new(f(value)),
167 backing: ManuallyDrop::new(backing),
168 }
169 }
170}
171
172impl<T: 'static> core::ops::Deref for SelfRef<T> {
173 type Target = T;
174 fn deref(&self) -> &T {
175 &self.value
176 }
177}
178
179#[macro_export]
200macro_rules! selfref_match {
201 (
202 $selfref:expr, $field:ident {
203 $( $first:ident $(:: $rest:ident)* ($binding:tt) => $body:block )*
204 }
205 ) => {{
206 let __sref = $selfref;
207 $(
208 if ::core::matches!(&__sref.$field, $first$(::$rest)*(_)) {
209 #[allow(unused_variables)]
210 let $binding = __sref.map(|__v| match __v.$field {
211 $first$(::$rest)*(__inner) => __inner,
212 _ => unreachable!(),
213 });
214 $body
215 } else
216 )*
217 {
218 let _ = __sref;
220 }
221 }};
222}
223
224#[cfg(test)]
225mod tests {
226 use super::*;
227 use std::sync::atomic::{AtomicBool, Ordering};
228
229 struct TestSharedBacking {
230 bytes: Vec<u8>,
231 dropped: Arc<AtomicBool>,
232 }
233
234 impl SharedBacking for TestSharedBacking {
235 fn as_bytes(&self) -> &[u8] {
236 &self.bytes
237 }
238 }
239
240 impl Drop for TestSharedBacking {
241 fn drop(&mut self) {
242 self.dropped.store(true, Ordering::Release);
243 }
244 }
245
246 struct DropOrderValue {
247 backing_dropped: Arc<AtomicBool>,
248 value_dropped_before_backing: Arc<AtomicBool>,
249 }
250
251 impl Drop for DropOrderValue {
252 fn drop(&mut self) {
253 let backing_is_dropped = self.backing_dropped.load(Ordering::Acquire);
254 self.value_dropped_before_backing
255 .store(!backing_is_dropped, Ordering::Release);
256 }
257 }
258
259 #[test]
260 fn try_new_builds_borrowing_value_from_backing() {
261 let backing = Backing::Boxed(Box::from([1_u8, 2, 3, 4]));
262 let sref = SelfRef::try_new(backing, |bytes| Ok::<_, ()>(&bytes[1..3]))
263 .expect("try_new should succeed");
264 assert_eq!(&**sref, &[2_u8, 3]);
265 }
266
267 #[test]
268 fn try_new_propagates_builder_error() {
269 let backing = Backing::Boxed(Box::from([9_u8, 8, 7]));
270 let err = match SelfRef::<u32>::try_new(backing, |_| Err::<u32, _>("boom")) {
271 Ok(_) => panic!("try_new should return builder error"),
272 Err(err) => err,
273 };
274 assert_eq!(err, "boom");
275 }
276
277 #[test]
278 fn try_map_and_try_repack_preserve_backing_and_transform_value() {
279 let backing = Backing::Boxed(Box::from(*b"hello"));
280 let sref = SelfRef::new(backing, |bytes| bytes);
281 let len_ref = sref
282 .try_map(|bytes| Ok::<_, ()>(bytes.len()))
283 .expect("try_map should succeed");
284 assert_eq!(*len_ref, 5);
285
286 let backing = Backing::Boxed(Box::from(*b"abcdef"));
287 let sref = SelfRef::new(backing, |_| 10_u32);
288 let repacked = sref
289 .try_repack(|value, bytes| Ok::<_, ()>((value + 1, bytes[0], bytes[5])))
290 .expect("try_repack should succeed");
291 assert_eq!(*repacked, (11_u32, b'a', b'f'));
292 }
293
294 #[test]
295 fn try_map_and_try_repack_propagate_errors() {
296 let backing = Backing::Boxed(Box::from([1_u8, 2, 3]));
297 let sref = SelfRef::new(backing, |_| 7_u8);
298 let err = match sref.try_map::<u8, _>(|_| Err::<u8, _>("nope")) {
299 Ok(_) => panic!("try_map error should propagate"),
300 Err(err) => err,
301 };
302 assert_eq!(err, "nope");
303
304 let backing = Backing::Boxed(Box::from([4_u8, 5, 6]));
305 let sref = SelfRef::new(backing, |_| 9_u8);
306 let err = match sref.try_repack::<u8, _>(|_, _| Err::<u8, _>("bad")) {
307 Ok(_) => panic!("try_repack error should propagate"),
308 Err(err) => err,
309 };
310 assert_eq!(err, "bad");
311 }
312
313 #[test]
314 fn drop_order_drops_value_before_backing() {
315 let backing_dropped = Arc::new(AtomicBool::new(false));
316 let value_dropped_before_backing = Arc::new(AtomicBool::new(false));
317
318 let shared = Arc::new(TestSharedBacking {
319 bytes: vec![1, 2, 3],
320 dropped: Arc::clone(&backing_dropped),
321 });
322
323 let value = DropOrderValue {
324 backing_dropped: Arc::clone(&backing_dropped),
325 value_dropped_before_backing: Arc::clone(&value_dropped_before_backing),
326 };
327
328 let sref = SelfRef::owning(Backing::shared(shared), value);
329 drop(sref);
330
331 assert!(
332 value_dropped_before_backing.load(Ordering::Acquire),
333 "value should drop before backing"
334 );
335 assert!(
336 backing_dropped.load(Ordering::Acquire),
337 "backing should eventually be dropped"
338 );
339 }
340}