1use std::cell::UnsafeCell;
2use std::mem::forget;
3use std::ops::{Deref, DerefMut};
4use std::sync::atomic::{AtomicUsize, Ordering};
5
6macro_rules! borrow_panic {
7 ($s:expr) => {{
8 panic!(
9 "Tried to fetch data of type {:?}, but it was already borrowed{}.",
10 ::std::any::type_name::<T>(),
11 $s,
12 )
13 }};
14}
15
16#[derive(Debug)]
18pub struct Cell<T> {
19 flag: AtomicUsize,
20 inner: UnsafeCell<T>,
21}
22
23impl<T> Cell<T> {
24 pub fn new(inner: T) -> Self {
26 Cell {
27 flag: AtomicUsize::new(0),
28 inner: UnsafeCell::new(inner),
29 }
30 }
31
32 pub fn into_inner(self) -> T {
34 self.inner.into_inner()
35 }
36
37 pub fn borrow(&self) -> Ref<T> {
46 if !self.check_flag_read() {
47 borrow_panic!(" mutably");
48 }
49
50 Ref {
51 flag: &self.flag,
52 value: unsafe { &*self.inner.get() },
53 }
54 }
55
56 pub fn try_borrow(&self) -> Option<Ref<T>> {
61 if self.check_flag_read() {
62 Some(Ref {
63 flag: &self.flag,
64 value: unsafe { &*self.inner.get() },
65 })
66 } else {
67 None
68 }
69 }
70
71 pub fn borrow_mut(&self) -> RefMut<T> {
80 if !self.check_flag_write() {
81 borrow_panic!("");
82 }
83
84 RefMut {
85 flag: &self.flag,
86 value: unsafe { &mut *self.inner.get() },
87 }
88 }
89
90 pub fn try_borrow_mut(&self) -> Option<RefMut<T>> {
95 if self.check_flag_write() {
96 Some(RefMut {
97 flag: &self.flag,
98 value: unsafe { &mut *self.inner.get() },
99 })
100 } else {
101 None
102 }
103 }
104
105 pub fn get_mut(&mut self) -> &mut T {
109 unsafe { &mut *self.inner.get() }
110 }
111
112 fn check_flag_read(&self) -> bool {
115 loop {
116 let val = self.flag.load(Ordering::Acquire);
117
118 if val == usize::MAX {
119 return false;
120 }
121
122 if self.flag.compare_and_swap(val, val + 1, Ordering::AcqRel) == val {
123 return true;
124 }
125 }
126 }
127
128 fn check_flag_write(&self) -> bool {
131 self.flag.compare_and_swap(0, usize::MAX, Ordering::AcqRel) == 0
132 }
133}
134
135unsafe impl<T> Sync for Cell<T> where T: Sync {}
136
137#[derive(Debug)]
141pub struct Ref<'a, T>
142where
143 T: ?Sized + 'a,
144{
145 flag: &'a AtomicUsize,
146 value: &'a T,
147}
148
149impl<'a, T> Ref<'a, T>
150where
151 T: ?Sized,
152{
153 pub fn map<U, F>(self, f: F) -> Ref<'a, U>
196 where
197 F: FnOnce(&T) -> &U,
198 U: ?Sized,
199 {
200 let flag = unsafe { &*(self.flag as *const _) };
201 let value = unsafe { &*(self.value as *const _) };
202
203 forget(self);
204
205 Ref {
206 flag,
207 value: f(value),
208 }
209 }
210}
211
212impl<'a, T> Deref for Ref<'a, T>
213where
214 T: ?Sized,
215{
216 type Target = T;
217
218 fn deref(&self) -> &T {
219 self.value
220 }
221}
222
223impl<'a, T> Drop for Ref<'a, T>
224where
225 T: ?Sized,
226{
227 fn drop(&mut self) {
228 self.flag.fetch_sub(1, Ordering::Release);
229 }
230}
231
232impl<'a, T> Clone for Ref<'a, T>
233where
234 T: ?Sized,
235{
236 fn clone(&self) -> Self {
237 self.flag.fetch_add(1, Ordering::Release);
238
239 Ref {
240 flag: self.flag,
241 value: self.value,
242 }
243 }
244}
245
246#[derive(Debug)]
250pub struct RefMut<'a, T>
251where
252 T: ?Sized + 'a,
253{
254 flag: &'a AtomicUsize,
255 value: &'a mut T,
256}
257
258impl<'a, T> RefMut<'a, T>
259where
260 T: ?Sized,
261{
262 pub fn map<U, F>(self, f: F) -> RefMut<'a, U>
307 where
308 F: FnOnce(&mut T) -> &mut U,
309 U: ?Sized,
310 {
311 let flag = unsafe { &*(self.flag as *const _) };
312 let value = unsafe { &mut *(self.value as *mut _) };
313
314 forget(self);
315
316 RefMut {
317 flag,
318 value: f(value),
319 }
320 }
321}
322
323impl<'a, T> Deref for RefMut<'a, T>
324where
325 T: ?Sized,
326{
327 type Target = T;
328
329 fn deref(&self) -> &T {
330 self.value
331 }
332}
333
334impl<'a, T> DerefMut for RefMut<'a, T>
335where
336 T: ?Sized,
337{
338 fn deref_mut(&mut self) -> &mut T {
339 self.value
340 }
341}
342
343impl<'a, T> Drop for RefMut<'a, T>
344where
345 T: ?Sized,
346{
347 fn drop(&mut self) {
348 self.flag.store(0, Ordering::Release)
349 }
350}
351
352#[cfg(test)]
353mod tests {
354 use super::*;
355
356 #[test]
357 fn allow_multiple_reads() {
358 let cell = Cell::new(5);
359
360 let a = cell.borrow();
361 let b = cell.borrow();
362
363 assert_eq!(10, *a + *b);
364 }
365
366 #[test]
367 fn allow_clone_reads() {
368 let cell = Cell::new(5);
369
370 let a = cell.borrow();
371 let b = a.clone();
372
373 assert_eq!(10, *a + *b);
374 }
375
376 #[test]
377 fn allow_single_write() {
378 let cell = Cell::new(5);
379
380 {
381 let mut a = cell.borrow_mut();
382 *a += 2;
383 *a += 3;
384 }
385
386 assert_eq!(10, *cell.borrow());
387 }
388
389 #[test]
390 #[should_panic(expected = "but it was already borrowed mutably")]
391 fn panic_write_and_read() {
392 let cell = Cell::new(5);
393
394 let mut a = cell.borrow_mut();
395 *a = 7;
396
397 assert_eq!(7, *cell.borrow());
398 }
399
400 #[test]
401 #[should_panic(expected = "but it was already borrowed")]
402 fn panic_write_and_write() {
403 let cell = Cell::new(5);
404
405 let mut a = cell.borrow_mut();
406 *a = 7;
407
408 assert_eq!(7, *cell.borrow_mut());
409 }
410
411 #[test]
412 #[should_panic(expected = "Tried to fetch data of type \"i32\", but it was already borrowed.")]
413 fn panic_read_and_write() {
414 let cell = Cell::new(5);
415
416 let _a = cell.borrow();
417
418 assert_eq!(7, *cell.borrow_mut());
419 }
420
421 #[test]
422 fn try_write_and_read() {
423 let cell = Cell::new(5);
424
425 let mut a = cell.try_borrow_mut().unwrap();
426 *a = 7;
427
428 assert!(cell.try_borrow().is_none());
429
430 *a = 8;
431 }
432
433 #[test]
434 fn try_write_and_write() {
435 let cell = Cell::new(5);
436
437 let mut a = cell.try_borrow_mut().unwrap();
438 *a = 7;
439
440 assert!(cell.try_borrow_mut().is_none());
441
442 *a = 8;
443 }
444
445 #[test]
446 fn try_read_and_write() {
447 let cell = Cell::new(5);
448
449 let _a = cell.try_borrow().unwrap();
450
451 assert!(cell.try_borrow_mut().is_none());
452 }
453
454 #[test]
455 fn cloned_borrow_does_not_allow_write() {
456 let cell = Cell::new(5);
457
458 let a = cell.borrow();
459 let b = a.clone();
460
461 drop(a);
462
463 assert!(cell.try_borrow_mut().is_none());
464 assert_eq!(5, *b);
465 }
466
467 #[test]
468 fn ref_with_non_sized() {
469 let r: Ref<'_, [i32]> = Ref {
470 flag: &AtomicUsize::new(1),
471 value: &[2, 3, 4, 5][..],
472 };
473
474 assert_eq!(&*r, &[2, 3, 4, 5][..]);
475 }
476
477 #[test]
478 fn ref_with_non_sized_clone() {
479 let r: Ref<'_, [i32]> = Ref {
480 flag: &AtomicUsize::new(1),
481 value: &[2, 3, 4, 5][..],
482 };
483 let rr = r.clone();
484
485 assert_eq!(&*r, &[2, 3, 4, 5][..]);
486 assert_eq!(r.flag.load(Ordering::SeqCst), 2);
487
488 assert_eq!(&*rr, &[2, 3, 4, 5][..]);
489 assert_eq!(rr.flag.load(Ordering::SeqCst), 2);
490 }
491
492 #[test]
493 fn ref_with_trait_obj() {
494 let ra: Ref<'_, dyn std::any::Any> = Ref {
495 flag: &AtomicUsize::new(1),
496 value: &2i32,
497 };
498
499 assert_eq!(ra.downcast_ref::<i32>().unwrap(), &2i32);
500 }
501
502 #[test]
503 fn ref_mut_with_non_sized() {
504 let mut r: RefMut<'_, [i32]> = RefMut {
505 flag: &AtomicUsize::new(1),
506 value: &mut [2, 3, 4, 5][..],
507 };
508
509 assert_eq!(&mut *r, &mut [2, 3, 4, 5][..]);
510 }
511
512 #[test]
513 fn ref_mut_with_trait_obj() {
514 let mut ra: RefMut<'_, dyn std::any::Any> = RefMut {
515 flag: &AtomicUsize::new(1),
516 value: &mut 2i32,
517 };
518
519 assert_eq!(ra.downcast_mut::<i32>().unwrap(), &mut 2i32);
520 }
521
522 #[test]
523 fn ref_map_box() {
524 let cell = Cell::new(Box::new(10));
525
526 let r: Ref<'_, Box<usize>> = cell.borrow();
527 assert_eq!(&**r, &10);
528
529 let rr: Ref<'_, usize> = cell.borrow().map(Box::as_ref);
530 assert_eq!(&*rr, &10);
531 }
532
533 #[test]
534 fn ref_map_preserves_flag() {
535 let cell = Cell::new(Box::new(10));
536
537 let r: Ref<'_, Box<usize>> = cell.borrow();
538 assert_eq!(cell.flag.load(Ordering::SeqCst), 1);
539 let _nr: Ref<'_, usize> = r.map(Box::as_ref);
540 assert_eq!(cell.flag.load(Ordering::SeqCst), 1);
541 }
542
543 #[test]
544 fn ref_map_retains_borrow() {
545 let cell = Cell::new(Box::new(10));
546
547 let _r: Ref<'_, usize> = cell.borrow().map(Box::as_ref);
548 assert_eq!(cell.flag.load(Ordering::SeqCst), 1);
549
550 let _rr: Ref<'_, usize> = cell.borrow().map(Box::as_ref);
551 assert_eq!(cell.flag.load(Ordering::SeqCst), 2);
552 }
553
554 #[test]
555 fn ref_map_drops_borrow() {
556 let cell = Cell::new(Box::new(10));
557
558 let r: Ref<'_, usize> = cell.borrow().map(Box::as_ref);
559
560 assert_eq!(cell.flag.load(Ordering::SeqCst), 1);
561 drop(r);
562 assert_eq!(cell.flag.load(Ordering::SeqCst), 0);
563 }
564
565 #[test]
566 fn ref_mut_map_box() {
567 let cell = Cell::new(Box::new(10));
568
569 {
570 let mut r: RefMut<'_, Box<usize>> = cell.borrow_mut();
571 assert_eq!(&mut **r, &mut 10);
572 }
573 {
574 let mut rr: RefMut<'_, usize> = cell.borrow_mut().map(Box::as_mut);
575 assert_eq!(&mut *rr, &mut 10);
576 }
577 }
578
579 #[test]
580 fn ref_mut_map_preserves_flag() {
581 let cell = Cell::new(Box::new(10));
582
583 let r: RefMut<'_, Box<usize>> = cell.borrow_mut();
584 assert_eq!(cell.flag.load(Ordering::SeqCst), std::usize::MAX);
585 let _nr: RefMut<'_, usize> = r.map(Box::as_mut);
586 assert_eq!(cell.flag.load(Ordering::SeqCst), std::usize::MAX);
587 }
588
589 #[test]
590 #[should_panic(
591 expected = "Tried to fetch data of type \"alloc::boxed::Box<usize>\", but it was already borrowed."
592 )]
593 fn ref_mut_map_retains_mut_borrow() {
594 let cell = Cell::new(Box::new(10));
595
596 let _rr: RefMut<'_, usize> = cell.borrow_mut().map(Box::as_mut);
597
598 let _ = cell.borrow_mut();
599 }
600
601 #[test]
602 fn ref_mut_map_drops_borrow() {
603 let cell = Cell::new(Box::new(10));
604
605 let r: RefMut<'_, usize> = cell.borrow_mut().map(Box::as_mut);
606
607 assert_eq!(cell.flag.load(Ordering::SeqCst), std::usize::MAX);
608 drop(r);
609 assert_eq!(cell.flag.load(Ordering::SeqCst), 0);
610 }
611}