1use std::cell::Cell;
30use std::sync::atomic::{AtomicU64, Ordering};
31use std::sync::{Arc, Weak};
32
33use parking_lot::RwLock;
34
35static NEXT_GENERATION: AtomicU64 = AtomicU64::new(1);
36
37thread_local! {
38 static CLONE_GENERATION: Cell<u64> = const { Cell::new(0) };
39 static CLONE_DEPTH: Cell<u32> = const { Cell::new(0) };
40}
41
42pub struct RefGraph<T: Send + Sync> {
45 data: RwLock<Vec<T>>,
46 clone_cache: RwLock<Vec<(u64, Weak<RefGraph<T>>)>>,
47}
48
49impl<T: Send + Sync> RefGraph<T> {
50 pub fn new() -> Arc<Self> {
52 Arc::new(RefGraph {
53 data: RwLock::new(Vec::new()),
54 clone_cache: RwLock::new(Vec::new()),
55 })
56 }
57
58 pub fn create(self: &Arc<Self>, value: T) -> GraphRef<T> {
60 let mut data = self.data.write();
61 let index = data.len();
62 data.push(value);
63 GraphRef {
64 graph: Arc::clone(self),
65 index,
66 }
67 }
68
69 pub fn len(&self) -> usize {
71 self.data.read().len()
72 }
73
74 pub fn is_empty(&self) -> bool {
76 self.data.read().is_empty()
77 }
78
79 pub fn clear_cache(&self) {
81 self.clone_cache.write().clear();
82 }
83}
84
85impl<T: Clone + Send + Sync> RefGraph<T> {
86 fn deep_clone_graph(&self) -> Arc<RefGraph<T>> {
88 let data = self.data.read();
89 Arc::new(RefGraph {
90 data: RwLock::new(data.clone()),
91 clone_cache: RwLock::new(Vec::new()),
92 })
93 }
94
95 #[inline]
98 fn get_or_create_clone(self: &Arc<Self>, current_gen: u64) -> Arc<RefGraph<T>> {
99 {
101 let cache = self.clone_cache.read();
102 for (generation, weak) in cache.iter() {
103 if *generation == current_gen {
104 if let Some(arc) = weak.upgrade() {
105 return arc;
106 }
107 break;
108 }
109 }
110 }
111
112 let new_graph = self.deep_clone_graph();
114 {
115 let mut cache = self.clone_cache.write();
116 for (generation, weak) in cache.iter() {
118 if *generation == current_gen {
119 if let Some(arc) = weak.upgrade() {
120 return arc;
122 }
123 break;
124 }
125 }
126 cache.retain(|(_, w)| w.strong_count() > 0);
128 cache.push((current_gen, Arc::downgrade(&new_graph)));
129 }
130 new_graph
131 }
132}
133
134pub struct GraphRef<T: Send + Sync> {
139 graph: Arc<RefGraph<T>>,
140 index: usize,
141}
142
143impl<T: Send + Sync> GraphRef<T> {
144 pub fn get(&self) -> T
146 where
147 T: Clone,
148 {
149 self.graph.data.read()[self.index].clone()
150 }
151
152 pub fn set(&self, value: T) {
154 self.graph.data.write()[self.index] = value;
155 }
156
157 pub fn update<F>(&self, f: F)
159 where
160 F: FnOnce(&mut T),
161 {
162 f(&mut self.graph.data.write()[self.index]);
163 }
164
165 pub fn index(&self) -> usize {
167 self.index
168 }
169
170 pub fn ptr_eq(&self, other: &GraphRef<T>) -> bool {
172 Arc::ptr_eq(&self.graph, &other.graph) && self.index == other.index
173 }
174
175 pub fn same_graph(&self, other: &GraphRef<T>) -> bool {
177 Arc::ptr_eq(&self.graph, &other.graph)
178 }
179}
180
181impl<T: Clone + Send + Sync> Clone for GraphRef<T> {
182 #[inline]
183 fn clone(&self) -> Self {
184 let current_gen = CLONE_GENERATION.with(|g| g.get());
185
186 if current_gen > 0 {
187 let new_graph = self.graph.get_or_create_clone(current_gen);
189 GraphRef {
190 graph: new_graph,
191 index: self.index,
192 }
193 } else {
194 GraphRef {
196 graph: Arc::clone(&self.graph),
197 index: self.index,
198 }
199 }
200 }
201}
202
203impl<T: std::fmt::Debug + Clone + Send + Sync> std::fmt::Debug for GraphRef<T> {
204 fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
205 f.debug_struct("GraphRef")
206 .field("value", &self.get())
207 .field("index", &self.index)
208 .finish()
209 }
210}
211
212pub fn deep_clone<T: Clone>(value: &T) -> T {
235 let _guard = begin_deep_clone();
236 value.clone()
237}
238
239pub struct DeepCloneGuard {
241 _private: (),
242}
243
244impl Drop for DeepCloneGuard {
245 fn drop(&mut self) {
246 let depth = CLONE_DEPTH.with(|d| {
247 let new = d.get().saturating_sub(1);
248 d.set(new);
249 new
250 });
251 if depth == 0 {
252 CLONE_GENERATION.with(|g| g.set(0));
253 }
254 }
255}
256
257pub fn begin_deep_clone() -> DeepCloneGuard {
262 let depth = CLONE_DEPTH.with(|d| {
263 let c = d.get();
264 d.set(c + 1);
265 c
266 });
267 if depth == 0 {
268 let generation = NEXT_GENERATION.fetch_add(1, Ordering::Relaxed);
269 let generation = if generation == 0 {
271 NEXT_GENERATION.fetch_add(1, Ordering::Relaxed)
272 } else {
273 generation
274 };
275 CLONE_GENERATION.with(|g| g.set(generation));
276 }
277 DeepCloneGuard { _private: () }
278}
279
280pub mod hashmap_based {
285 use std::cell::RefCell;
289 use std::collections::HashMap;
290 use std::rc::Rc;
291
292 pub struct HashMapRefGraph<T> {
294 data: RefCell<Vec<RefCell<T>>>,
295 }
296
297 impl<T> HashMapRefGraph<T> {
298 pub fn new() -> Rc<Self> {
299 Rc::new(HashMapRefGraph {
300 data: RefCell::new(Vec::new()),
301 })
302 }
303
304 pub fn create(self: &Rc<Self>, value: T) -> HashMapGraphRef<T> {
305 let mut data = self.data.borrow_mut();
306 let index = data.len();
307 data.push(RefCell::new(value));
308 HashMapGraphRef {
309 graph: Rc::clone(self),
310 index,
311 }
312 }
313 }
314
315 impl<T: Clone> HashMapRefGraph<T> {
316 fn deep_clone_graph(self: &Rc<Self>) -> Rc<HashMapRefGraph<T>> {
317 let data = self.data.borrow();
318 let cloned_data: Vec<RefCell<T>> = data
319 .iter()
320 .map(|cell| RefCell::new(cell.borrow().clone()))
321 .collect();
322
323 Rc::new(HashMapRefGraph {
324 data: RefCell::new(cloned_data),
325 })
326 }
327 }
328
329 pub struct HashMapGraphRef<T> {
330 graph: Rc<HashMapRefGraph<T>>,
331 index: usize,
332 }
333
334 impl<T: Clone> HashMapGraphRef<T> {
335 pub fn get(&self) -> T {
336 self.graph.data.borrow()[self.index].borrow().clone()
337 }
338
339 pub fn set(&self, value: T) {
340 *self.graph.data.borrow()[self.index].borrow_mut() = value;
341 }
342 }
343
344 impl<T> HashMapGraphRef<T> {
345 pub fn ptr_eq(&self, other: &HashMapGraphRef<T>) -> bool {
346 Rc::ptr_eq(&self.graph, &other.graph) && self.index == other.index
347 }
348 }
349
350 pub struct HashMapCloneContext<T> {
352 map: RefCell<HashMap<*const HashMapRefGraph<T>, Rc<HashMapRefGraph<T>>>>,
353 }
354
355 impl<T: Clone> HashMapCloneContext<T> {
356 pub fn new() -> Self {
357 HashMapCloneContext {
358 map: RefCell::new(HashMap::new()),
359 }
360 }
361
362 pub fn clone_ref(&self, r: &HashMapGraphRef<T>) -> HashMapGraphRef<T> {
364 let ptr = Rc::as_ptr(&r.graph);
365 let mut map = self.map.borrow_mut();
366
367 let new_graph = map
368 .entry(ptr)
369 .or_insert_with(|| r.graph.deep_clone_graph())
370 .clone();
371
372 HashMapGraphRef {
373 graph: new_graph,
374 index: r.index,
375 }
376 }
377 }
378
379 impl<T: Clone> Default for HashMapCloneContext<T> {
380 fn default() -> Self {
381 Self::new()
382 }
383 }
384}
385
386pub mod index_based {
391 use std::cell::RefCell;
402 use std::collections::HashMap;
403 use std::rc::Rc;
404
405 pub struct IndexRef<T> {
407 inner: Rc<RefCell<T>>,
408 }
409
410 impl<T> IndexRef<T> {
411 pub fn new(value: T) -> Self {
412 IndexRef {
413 inner: Rc::new(RefCell::new(value)),
414 }
415 }
416
417 pub fn ptr(&self) -> *const RefCell<T> {
418 Rc::as_ptr(&self.inner)
419 }
420 }
421
422 impl<T: Clone> IndexRef<T> {
423 pub fn get(&self) -> T {
424 self.inner.borrow().clone()
425 }
426
427 pub fn set(&self, value: T) {
428 *self.inner.borrow_mut() = value;
429 }
430 }
431
432 impl<T> Clone for IndexRef<T> {
433 fn clone(&self) -> Self {
434 IndexRef {
435 inner: Rc::clone(&self.inner),
436 }
437 }
438 }
439
440 impl<T> IndexRef<T> {
441 pub fn ptr_eq(&self, other: &IndexRef<T>) -> bool {
442 Rc::ptr_eq(&self.inner, &other.inner)
443 }
444 }
445
446 pub struct PrepareIndexCloningNode<T> {
449 pub pos_array: Vec<(bool, usize)>,
451 lookup_map: HashMap<*const RefCell<T>, usize>,
453 ref_pos_counter: usize,
455 }
456
457 impl<T> PrepareIndexCloningNode<T> {
458 pub fn new() -> Self {
459 Self {
460 pos_array: Vec::new(),
461 lookup_map: HashMap::new(),
462 ref_pos_counter: 0,
463 }
464 }
465
466 pub fn with_capacity(capacity: usize) -> Self {
467 Self {
468 pos_array: Vec::with_capacity(capacity),
469 lookup_map: HashMap::with_capacity(capacity),
470 ref_pos_counter: 0,
471 }
472 }
473
474 pub fn handle_reference(&mut self, r: &IndexRef<T>) {
477 let ptr = r.ptr();
478 if let Some(&existing_pos) = self.lookup_map.get(&ptr) {
479 self.pos_array.push((true, existing_pos));
481 } else {
482 let pos = self.ref_pos_counter;
484 self.ref_pos_counter += 1;
485 self.lookup_map.insert(ptr, pos);
486 self.pos_array.push((false, pos));
487 }
488 }
489
490 pub fn unique_count(&self) -> usize {
492 self.ref_pos_counter
493 }
494 }
495
496 impl<T> Default for PrepareIndexCloningNode<T> {
497 fn default() -> Self {
498 Self::new()
499 }
500 }
501
502 pub struct PerformIndexCloningNode<T> {
504 counter: usize,
506 ref_array: Vec<Rc<RefCell<T>>>,
508 pos_array: Vec<(bool, usize)>,
510 }
511
512 impl<T: Clone> PerformIndexCloningNode<T> {
513 pub fn from_prepare_node(node: &PrepareIndexCloningNode<T>) -> Self {
515 let mut ref_array = Vec::with_capacity(node.ref_pos_counter);
517 ref_array.resize_with(node.ref_pos_counter, || {
518 Rc::new(RefCell::new(unsafe {
519 std::mem::zeroed()
521 }))
522 });
523
524 Self {
525 counter: 0,
526 ref_array,
527 pos_array: node.pos_array.clone(), }
529 }
530
531 pub fn handle_reference(&mut self, orig: &IndexRef<T>) -> IndexRef<T> {
534 let pos = self.counter;
535 self.counter += 1;
536
537 let (exists, array_pos) = self.pos_array[pos];
538
539 if !exists {
540 let new_ref = Rc::new(RefCell::new(orig.inner.borrow().clone()));
542 self.ref_array[array_pos] = Rc::clone(&new_ref);
543 IndexRef { inner: new_ref }
544 } else {
545 IndexRef {
547 inner: Rc::clone(&self.ref_array[array_pos]),
548 }
549 }
550 }
551 }
552
553 pub fn prepare_refs<T>(refs: &[IndexRef<T>]) -> PrepareIndexCloningNode<T> {
555 let mut node = PrepareIndexCloningNode::with_capacity(refs.len());
556 for r in refs {
557 node.handle_reference(r);
558 }
559 node
560 }
561
562 pub fn clone_refs<T: Clone>(
564 refs: &[IndexRef<T>],
565 prepare_node: &PrepareIndexCloningNode<T>,
566 ) -> Vec<IndexRef<T>> {
567 let mut perform_node = PerformIndexCloningNode::from_prepare_node(prepare_node);
568 refs.iter()
569 .map(|r| perform_node.handle_reference(r))
570 .collect()
571 }
572}
573
574#[cfg(test)]
575mod tests {
576 use super::*;
577
578 #[test]
579 fn test_shallow_clone_same_data() {
580 let graph = RefGraph::new();
581 let a = graph.create(42);
582 let b = a.clone();
583
584 assert!(a.ptr_eq(&b));
585 assert_eq!(a.get(), 42);
586 assert_eq!(b.get(), 42);
587
588 a.set(100);
589 assert_eq!(b.get(), 100);
590 }
591
592 #[test]
593 fn test_deep_clone_preserves_structure() {
594 let graph = RefGraph::new();
595 let a = graph.create(42);
596 let b = a.clone();
597
598 let (a2, b2) = deep_clone(&(a.clone(), b.clone()));
599
600 assert!(a2.ptr_eq(&b2));
602
603 assert!(!a.ptr_eq(&a2));
605 assert!(!b.ptr_eq(&b2));
606
607 a2.set(999);
609 assert_eq!(b2.get(), 999); assert_eq!(a.get(), 42); }
612
613 #[test]
614 fn test_deep_clone_multiple_graphs() {
615 let graph1 = RefGraph::new();
616 let graph2 = RefGraph::new();
617
618 let a1 = graph1.create(1);
619 let b1 = a1.clone();
620 let a2 = graph2.create(2);
621 let b2 = a2.clone();
622
623 let cloned = deep_clone(&(a1.clone(), b1.clone(), a2.clone(), b2.clone()));
624
625 assert!(cloned.0.ptr_eq(&cloned.1));
627 assert!(cloned.2.ptr_eq(&cloned.3));
628
629 assert!(!cloned.0.ptr_eq(&cloned.2));
631 }
632
633 #[test]
634 fn test_deep_clone_different_indices() {
635 let graph = RefGraph::new();
636 let a = graph.create(1);
637 let b = graph.create(2);
638 let c = a.clone(); let (a2, b2, c2) = deep_clone(&(a.clone(), b.clone(), c.clone()));
641
642 assert!(a2.ptr_eq(&c2));
644 assert!(!a2.ptr_eq(&b2));
645 assert!(a2.same_graph(&b2));
646
647 assert_eq!(a2.get(), 1);
649 assert_eq!(b2.get(), 2);
650 }
651
652 #[test]
653 fn test_nested_struct() {
654 #[derive(Clone)]
655 struct Inner {
656 x: GraphRef<i32>,
657 y: GraphRef<i32>,
658 }
659
660 #[derive(Clone)]
661 struct Outer {
662 inner: Inner,
663 z: GraphRef<i32>,
664 }
665
666 let graph = RefGraph::new();
667 let r = graph.create(42);
668
669 let original = Outer {
670 inner: Inner {
671 x: r.clone(),
672 y: r.clone(),
673 },
674 z: r.clone(),
675 };
676
677 let cloned = deep_clone(&original);
678
679 assert!(cloned.inner.x.ptr_eq(&cloned.inner.y));
681 assert!(cloned.inner.x.ptr_eq(&cloned.z));
682
683 cloned.inner.x.set(999);
685 assert_eq!(cloned.z.get(), 999);
686 assert_eq!(original.z.get(), 42);
687 }
688
689 #[test]
694 fn test_send_sync() {
695 fn assert_send_sync<T: Send + Sync>() {}
696 assert_send_sync::<RefGraph<i32>>();
697 assert_send_sync::<GraphRef<i32>>();
698 assert_send_sync::<RefGraph<String>>();
699 assert_send_sync::<GraphRef<String>>();
700 }
701
702 #[test]
703 fn test_concurrent_deep_clone() {
704 use std::thread;
705
706 let graph = RefGraph::new();
707 let a = graph.create(42);
708 let b = a.clone();
709 let c = graph.create(100);
710
711 let data = Arc::new((a.clone(), b.clone(), c.clone()));
712
713 let handles: Vec<_> = (0..8)
714 .map(|i| {
715 let data = Arc::clone(&data);
716 thread::spawn(move || {
717 let cloned = deep_clone(data.as_ref());
718
719 assert!(cloned.0.ptr_eq(&cloned.1));
721 assert!(cloned.0.same_graph(&cloned.2));
723 assert!(!cloned.0.ptr_eq(&cloned.2));
724
725 cloned.0.set(i * 1000);
727 assert_eq!(cloned.1.get(), i * 1000);
728 assert_eq!(cloned.2.get(), 100);
729
730 assert_eq!(data.0.get(), 42);
732 })
733 })
734 .collect();
735
736 for h in handles {
737 h.join().unwrap();
738 }
739 }
740
741 #[test]
742 fn test_concurrent_read_write() {
743 use std::sync::Barrier;
744 use std::thread;
745
746 let graph = RefGraph::new();
747 let r = graph.create(0i64);
748 let barrier = Arc::new(Barrier::new(5));
749
750 let writer_ref = r.clone();
752 let writer_barrier = Arc::clone(&barrier);
753 let writer = thread::spawn(move || {
754 writer_barrier.wait();
755 for i in 0..1000 {
756 writer_ref.set(i);
757 }
758 });
759
760 let readers: Vec<_> = (0..4)
762 .map(|_| {
763 let reader_ref = r.clone();
764 let reader_barrier = Arc::clone(&barrier);
765 thread::spawn(move || {
766 reader_barrier.wait();
767 let mut last = -1i64;
768 for _ in 0..1000 {
769 let val = reader_ref.get();
770 assert!(val >= 0 && val < 1000);
772 let _ = last;
773 last = val;
774 }
775 })
776 })
777 .collect();
778
779 writer.join().unwrap();
780 for r in readers {
781 r.join().unwrap();
782 }
783 }
784
785 #[test]
786 fn test_deep_clone_stress() {
787 use std::sync::Barrier;
788 use std::thread;
789
790 let graph = RefGraph::new();
791 let refs: Vec<_> = (0..10).map(|i| graph.create(i)).collect();
792 let data = Arc::new(refs);
793 let barrier = Arc::new(Barrier::new(50));
794
795 let handles: Vec<_> = (0..50)
796 .map(|_| {
797 let data = Arc::clone(&data);
798 let barrier = Arc::clone(&barrier);
799 thread::spawn(move || {
800 barrier.wait();
801 for _ in 0..100 {
802 let cloned = deep_clone(data.as_ref());
803 for j in 1..cloned.len() {
805 assert!(cloned[0].same_graph(&cloned[j]));
806 }
807 for (j, r) in cloned.iter().enumerate() {
809 assert_eq!(r.get(), j as i32);
810 }
811 cloned[0].set(999);
813 assert_eq!(data[0].get(), 0);
814 }
815 })
816 })
817 .collect();
818
819 for h in handles {
820 h.join().unwrap();
821 }
822 }
823
824 #[test]
829 fn test_panic_resets_generation() {
830 use std::panic;
831
832 struct PanicOnClone;
833
834 impl Clone for PanicOnClone {
835 fn clone(&self) -> Self {
836 panic!("clone panic!");
837 }
838 }
839
840 let result = panic::catch_unwind(panic::AssertUnwindSafe(|| {
842 deep_clone(&PanicOnClone);
843 }));
844 assert!(result.is_err());
845
846 let graph = RefGraph::new();
848 let a = graph.create(42);
849 let b = a.clone();
850 let (a2, b2) = deep_clone(&(a.clone(), b.clone()));
851 assert!(a2.ptr_eq(&b2));
852 assert_eq!(a2.get(), 42);
853 }
854
855 #[test]
856 fn test_panic_nested_recovery() {
857 use std::panic;
858
859 let graph = RefGraph::new();
861 let a = graph.create(42);
862
863 let _result = panic::catch_unwind(panic::AssertUnwindSafe(|| {
865 let _outer_guard = begin_deep_clone();
866 let _inner_result = panic::catch_unwind(panic::AssertUnwindSafe(|| {
868 let _inner_guard = begin_deep_clone();
869 panic!("inner panic");
870 }));
871 }));
874 let b = a.clone();
878 let (a2, b2) = deep_clone(&(a.clone(), b.clone()));
879 assert!(a2.ptr_eq(&b2));
880 assert_eq!(a2.get(), 42);
881 }
882
883 #[test]
888 fn test_nested_deep_clone() {
889 #[derive(Debug)]
891 struct Container {
892 inner: Vec<GraphRef<i32>>,
893 }
894
895 impl Clone for Container {
896 fn clone(&self) -> Self {
897 Container {
900 inner: self.inner.clone(),
901 }
902 }
903 }
904
905 let graph = RefGraph::new();
906 let a = graph.create(10);
907 let b = a.clone();
908
909 let container = Container {
910 inner: vec![a.clone(), b.clone()],
911 };
912
913 let cloned = deep_clone(&container);
914 assert!(cloned.inner[0].ptr_eq(&cloned.inner[1]));
915 cloned.inner[0].set(999);
916 assert_eq!(cloned.inner[1].get(), 999);
917 assert_eq!(container.inner[0].get(), 10);
918 }
919
920 #[test]
921 fn test_begin_deep_clone_nested_guards() {
922 let graph = RefGraph::new();
923 let a = graph.create(1);
924 let b = a.clone();
925
926 {
927 let _outer = begin_deep_clone();
928 let cloned_a = a.clone(); {
930 let _inner = begin_deep_clone();
931 let cloned_b = b.clone(); assert!(cloned_a.same_graph(&cloned_b));
934 }
935 let cloned_b2 = b.clone();
937 assert!(cloned_a.ptr_eq(&cloned_b2));
938 }
939 let shallow = a.clone();
943 assert!(a.ptr_eq(&shallow));
944 }
945
946 #[test]
951 fn test_multiple_graph_types() {
952 #[derive(Clone)]
953 struct Mixed {
954 nums: Vec<GraphRef<i32>>,
955 strs: Vec<GraphRef<String>>,
956 }
957
958 let g1 = RefGraph::new();
959 let g2 = RefGraph::new();
960
961 let n = g1.create(42);
962 let s = g2.create("hello".to_string());
963
964 let data = Mixed {
965 nums: vec![n.clone(), n.clone()],
966 strs: vec![s.clone(), s.clone()],
967 };
968
969 let cloned = deep_clone(&data);
970 assert!(cloned.nums[0].ptr_eq(&cloned.nums[1]));
971 assert!(cloned.strs[0].ptr_eq(&cloned.strs[1]));
972
973 cloned.nums[0].set(999);
974 assert_eq!(cloned.nums[1].get(), 999);
975 assert_eq!(data.nums[0].get(), 42);
976
977 cloned.strs[0].set("world".to_string());
978 assert_eq!(cloned.strs[1].get(), "world");
979 assert_eq!(data.strs[0].get(), "hello");
980 }
981
982 #[test]
983 fn test_large_graph() {
984 let graph = RefGraph::new();
985 let refs: Vec<_> = (0..10_000).map(|i| graph.create(i)).collect();
986
987 let mut all_refs = refs.clone();
989 for i in 0..5_000 {
990 all_refs.push(refs[i].clone());
991 }
992
993 let cloned = deep_clone(&all_refs);
994
995 assert_eq!(cloned.len(), 15_000);
996
997 for i in 0..5_000 {
999 assert!(cloned[i].ptr_eq(&cloned[10_000 + i]));
1000 }
1001
1002 for i in 0..10_000 {
1004 assert_eq!(cloned[i].get(), i as i32);
1005 }
1006
1007 cloned[0].set(-1);
1009 assert_eq!(all_refs[0].get(), 0);
1010 }
1011}