1use std::cell::Cell;
30use std::sync::atomic::{AtomicU64, Ordering};
31use std::sync::{Arc, Weak};
32
33use parking_lot::RwLock;
34
35static NEXT_GRAPH_ID: AtomicU64 = AtomicU64::new(1);
36static NEXT_GENERATION: AtomicU64 = AtomicU64::new(1);
37
38thread_local! {
39 static CLONE_GENERATION: Cell<u64> = const { Cell::new(0) };
40 static CLONE_DEPTH: Cell<u32> = const { Cell::new(0) };
41}
42
43pub struct RefGraph<T: Send + Sync> {
46 #[allow(dead_code)]
47 id: u64,
48 data: RwLock<Vec<RwLock<T>>>,
49 clone_cache: RwLock<Vec<(u64, Weak<RefGraph<T>>)>>,
50}
51
52unsafe impl<T: Send + Sync> Send for RefGraph<T> {}
55unsafe impl<T: Send + Sync> Sync for RefGraph<T> {}
56
57impl<T: Send + Sync> RefGraph<T> {
58 pub fn new() -> Arc<Self> {
60 Arc::new(RefGraph {
61 id: NEXT_GRAPH_ID.fetch_add(1, Ordering::Relaxed),
62 data: RwLock::new(Vec::new()),
63 clone_cache: RwLock::new(Vec::new()),
64 })
65 }
66
67 pub fn create(self: &Arc<Self>, value: T) -> GraphRef<T> {
69 let mut data = self.data.write();
70 let index = data.len();
71 data.push(RwLock::new(value));
72 GraphRef {
73 graph: Arc::clone(self),
74 index,
75 }
76 }
77
78 pub fn len(&self) -> usize {
80 self.data.read().len()
81 }
82
83 pub fn is_empty(&self) -> bool {
85 self.data.read().is_empty()
86 }
87
88 pub fn clear_cache(&self) {
90 self.clone_cache.write().clear();
91 }
92}
93
94impl<T: Clone + Send + Sync> RefGraph<T> {
95 fn deep_clone_graph(&self) -> Arc<RefGraph<T>> {
97 let data = self.data.read();
98 let cloned_data: Vec<RwLock<T>> = data
99 .iter()
100 .map(|cell| RwLock::new(cell.read().clone()))
101 .collect();
102
103 Arc::new(RefGraph {
104 id: NEXT_GRAPH_ID.fetch_add(1, Ordering::Relaxed),
105 data: RwLock::new(cloned_data),
106 clone_cache: RwLock::new(Vec::new()),
107 })
108 }
109
110 #[inline]
113 fn get_or_create_clone(self: &Arc<Self>, current_gen: u64) -> Arc<RefGraph<T>> {
114 {
116 let cache = self.clone_cache.read();
117 for (gen, weak) in cache.iter() {
118 if *gen == current_gen {
119 if let Some(arc) = weak.upgrade() {
120 return arc;
121 }
122 break;
123 }
124 }
125 }
126
127 let new_graph = self.deep_clone_graph();
129 {
130 let mut cache = self.clone_cache.write();
131 for (gen, weak) in cache.iter() {
133 if *gen == current_gen {
134 if let Some(arc) = weak.upgrade() {
135 return arc;
137 }
138 break;
139 }
140 }
141 cache.retain(|(_, w)| w.strong_count() > 0);
143 cache.push((current_gen, Arc::downgrade(&new_graph)));
144 }
145 new_graph
146 }
147}
148
149pub struct GraphRef<T: Send + Sync> {
154 graph: Arc<RefGraph<T>>,
155 index: usize,
156}
157
158impl<T: Send + Sync> GraphRef<T> {
159 pub fn get(&self) -> T
161 where
162 T: Clone,
163 {
164 let data = self.graph.data.read();
165 let val = data[self.index].read().clone();
166 val
167 }
168
169 pub fn set(&self, value: T) {
171 let data = self.graph.data.read();
172 *data[self.index].write() = value;
173 }
174
175 pub fn update<F>(&self, f: F)
177 where
178 F: FnOnce(&mut T),
179 {
180 let data = self.graph.data.read();
181 f(&mut *data[self.index].write());
182 }
183
184 pub fn index(&self) -> usize {
186 self.index
187 }
188
189 pub fn ptr_eq(&self, other: &GraphRef<T>) -> bool {
191 Arc::ptr_eq(&self.graph, &other.graph) && self.index == other.index
192 }
193
194 pub fn same_graph(&self, other: &GraphRef<T>) -> bool {
196 Arc::ptr_eq(&self.graph, &other.graph)
197 }
198}
199
200impl<T: Clone + Send + Sync> Clone for GraphRef<T> {
201 #[inline]
202 fn clone(&self) -> Self {
203 let current_gen = CLONE_GENERATION.with(|g| g.get());
204
205 if current_gen > 0 {
206 let new_graph = self.graph.get_or_create_clone(current_gen);
208 GraphRef {
209 graph: new_graph,
210 index: self.index,
211 }
212 } else {
213 GraphRef {
215 graph: Arc::clone(&self.graph),
216 index: self.index,
217 }
218 }
219 }
220}
221
222impl<T: std::fmt::Debug + Clone + Send + Sync> std::fmt::Debug for GraphRef<T> {
223 fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
224 f.debug_struct("GraphRef")
225 .field("value", &self.get())
226 .field("index", &self.index)
227 .finish()
228 }
229}
230
231pub fn deep_clone<T: Clone>(value: &T) -> T {
254 let _guard = begin_deep_clone();
255 value.clone()
256}
257
258pub struct DeepCloneGuard {
260 _private: (),
261}
262
263impl Drop for DeepCloneGuard {
264 fn drop(&mut self) {
265 let depth = CLONE_DEPTH.with(|d| {
266 let new = d.get().saturating_sub(1);
267 d.set(new);
268 new
269 });
270 if depth == 0 {
271 CLONE_GENERATION.with(|g| g.set(0));
272 }
273 }
274}
275
276pub fn begin_deep_clone() -> DeepCloneGuard {
281 let depth = CLONE_DEPTH.with(|d| {
282 let c = d.get();
283 d.set(c + 1);
284 c
285 });
286 if depth == 0 {
287 let gen = NEXT_GENERATION.fetch_add(1, Ordering::Relaxed);
288 let gen = if gen == 0 {
290 NEXT_GENERATION.fetch_add(1, Ordering::Relaxed)
291 } else {
292 gen
293 };
294 CLONE_GENERATION.with(|g| g.set(gen));
295 }
296 DeepCloneGuard { _private: () }
297}
298
299pub mod hashmap_based {
304 use std::cell::RefCell;
308 use std::collections::HashMap;
309 use std::rc::Rc;
310
311 pub struct HashMapRefGraph<T> {
313 data: RefCell<Vec<RefCell<T>>>,
314 }
315
316 impl<T> HashMapRefGraph<T> {
317 pub fn new() -> Rc<Self> {
318 Rc::new(HashMapRefGraph {
319 data: RefCell::new(Vec::new()),
320 })
321 }
322
323 pub fn create(self: &Rc<Self>, value: T) -> HashMapGraphRef<T> {
324 let mut data = self.data.borrow_mut();
325 let index = data.len();
326 data.push(RefCell::new(value));
327 HashMapGraphRef {
328 graph: Rc::clone(self),
329 index,
330 }
331 }
332 }
333
334 impl<T: Clone> HashMapRefGraph<T> {
335 fn deep_clone_graph(self: &Rc<Self>) -> Rc<HashMapRefGraph<T>> {
336 let data = self.data.borrow();
337 let cloned_data: Vec<RefCell<T>> = data
338 .iter()
339 .map(|cell| RefCell::new(cell.borrow().clone()))
340 .collect();
341
342 Rc::new(HashMapRefGraph {
343 data: RefCell::new(cloned_data),
344 })
345 }
346 }
347
348 pub struct HashMapGraphRef<T> {
349 graph: Rc<HashMapRefGraph<T>>,
350 index: usize,
351 }
352
353 impl<T: Clone> HashMapGraphRef<T> {
354 pub fn get(&self) -> T {
355 self.graph.data.borrow()[self.index].borrow().clone()
356 }
357
358 pub fn set(&self, value: T) {
359 *self.graph.data.borrow()[self.index].borrow_mut() = value;
360 }
361 }
362
363 impl<T> HashMapGraphRef<T> {
364 pub fn ptr_eq(&self, other: &HashMapGraphRef<T>) -> bool {
365 Rc::ptr_eq(&self.graph, &other.graph) && self.index == other.index
366 }
367 }
368
369 pub struct HashMapCloneContext<T> {
371 map: RefCell<HashMap<*const HashMapRefGraph<T>, Rc<HashMapRefGraph<T>>>>,
372 }
373
374 impl<T: Clone> HashMapCloneContext<T> {
375 pub fn new() -> Self {
376 HashMapCloneContext {
377 map: RefCell::new(HashMap::new()),
378 }
379 }
380
381 pub fn clone_ref(&self, r: &HashMapGraphRef<T>) -> HashMapGraphRef<T> {
383 let ptr = Rc::as_ptr(&r.graph);
384 let mut map = self.map.borrow_mut();
385
386 let new_graph = map
387 .entry(ptr)
388 .or_insert_with(|| r.graph.deep_clone_graph())
389 .clone();
390
391 HashMapGraphRef {
392 graph: new_graph,
393 index: r.index,
394 }
395 }
396 }
397
398 impl<T: Clone> Default for HashMapCloneContext<T> {
399 fn default() -> Self {
400 Self::new()
401 }
402 }
403}
404
405pub mod index_based {
410 use std::cell::RefCell;
421 use std::collections::HashMap;
422 use std::rc::Rc;
423
424 pub struct IndexRef<T> {
426 inner: Rc<RefCell<T>>,
427 }
428
429 impl<T> IndexRef<T> {
430 pub fn new(value: T) -> Self {
431 IndexRef {
432 inner: Rc::new(RefCell::new(value)),
433 }
434 }
435
436 pub fn ptr(&self) -> *const RefCell<T> {
437 Rc::as_ptr(&self.inner)
438 }
439 }
440
441 impl<T: Clone> IndexRef<T> {
442 pub fn get(&self) -> T {
443 self.inner.borrow().clone()
444 }
445
446 pub fn set(&self, value: T) {
447 *self.inner.borrow_mut() = value;
448 }
449 }
450
451 impl<T> Clone for IndexRef<T> {
452 fn clone(&self) -> Self {
453 IndexRef {
454 inner: Rc::clone(&self.inner),
455 }
456 }
457 }
458
459 impl<T> IndexRef<T> {
460 pub fn ptr_eq(&self, other: &IndexRef<T>) -> bool {
461 Rc::ptr_eq(&self.inner, &other.inner)
462 }
463 }
464
465 pub struct PrepareIndexCloningNode<T> {
468 pub pos_array: Vec<(bool, usize)>,
470 lookup_map: HashMap<*const RefCell<T>, usize>,
472 ref_pos_counter: usize,
474 }
475
476 impl<T> PrepareIndexCloningNode<T> {
477 pub fn new() -> Self {
478 Self {
479 pos_array: Vec::new(),
480 lookup_map: HashMap::new(),
481 ref_pos_counter: 0,
482 }
483 }
484
485 pub fn with_capacity(capacity: usize) -> Self {
486 Self {
487 pos_array: Vec::with_capacity(capacity),
488 lookup_map: HashMap::with_capacity(capacity),
489 ref_pos_counter: 0,
490 }
491 }
492
493 pub fn handle_reference(&mut self, r: &IndexRef<T>) {
496 let ptr = r.ptr();
497 if let Some(&existing_pos) = self.lookup_map.get(&ptr) {
498 self.pos_array.push((true, existing_pos));
500 } else {
501 let pos = self.ref_pos_counter;
503 self.ref_pos_counter += 1;
504 self.lookup_map.insert(ptr, pos);
505 self.pos_array.push((false, pos));
506 }
507 }
508
509 pub fn unique_count(&self) -> usize {
511 self.ref_pos_counter
512 }
513 }
514
515 impl<T> Default for PrepareIndexCloningNode<T> {
516 fn default() -> Self {
517 Self::new()
518 }
519 }
520
521 pub struct PerformIndexCloningNode<T> {
523 counter: usize,
525 ref_array: Vec<Rc<RefCell<T>>>,
527 pos_array: Vec<(bool, usize)>,
529 }
530
531 impl<T: Clone> PerformIndexCloningNode<T> {
532 pub fn from_prepare_node(node: &PrepareIndexCloningNode<T>) -> Self {
534 let mut ref_array = Vec::with_capacity(node.ref_pos_counter);
536 ref_array.resize_with(node.ref_pos_counter, || {
537 Rc::new(RefCell::new(unsafe {
538 std::mem::zeroed()
540 }))
541 });
542
543 Self {
544 counter: 0,
545 ref_array,
546 pos_array: node.pos_array.clone(), }
548 }
549
550 pub fn handle_reference(&mut self, orig: &IndexRef<T>) -> IndexRef<T> {
553 let pos = self.counter;
554 self.counter += 1;
555
556 let (exists, array_pos) = self.pos_array[pos];
557
558 if !exists {
559 let new_ref = Rc::new(RefCell::new(orig.inner.borrow().clone()));
561 self.ref_array[array_pos] = Rc::clone(&new_ref);
562 IndexRef { inner: new_ref }
563 } else {
564 IndexRef {
566 inner: Rc::clone(&self.ref_array[array_pos]),
567 }
568 }
569 }
570 }
571
572 pub fn prepare_refs<T>(refs: &[IndexRef<T>]) -> PrepareIndexCloningNode<T> {
574 let mut node = PrepareIndexCloningNode::with_capacity(refs.len());
575 for r in refs {
576 node.handle_reference(r);
577 }
578 node
579 }
580
581 pub fn clone_refs<T: Clone>(
583 refs: &[IndexRef<T>],
584 prepare_node: &PrepareIndexCloningNode<T>,
585 ) -> Vec<IndexRef<T>> {
586 let mut perform_node = PerformIndexCloningNode::from_prepare_node(prepare_node);
587 refs.iter()
588 .map(|r| perform_node.handle_reference(r))
589 .collect()
590 }
591}
592
593#[cfg(test)]
594mod tests {
595 use super::*;
596
597 #[test]
598 fn test_shallow_clone_same_data() {
599 let graph = RefGraph::new();
600 let a = graph.create(42);
601 let b = a.clone();
602
603 assert!(a.ptr_eq(&b));
604 assert_eq!(a.get(), 42);
605 assert_eq!(b.get(), 42);
606
607 a.set(100);
608 assert_eq!(b.get(), 100);
609 }
610
611 #[test]
612 fn test_deep_clone_preserves_structure() {
613 let graph = RefGraph::new();
614 let a = graph.create(42);
615 let b = a.clone();
616
617 let (a2, b2) = deep_clone(&(a.clone(), b.clone()));
618
619 assert!(a2.ptr_eq(&b2));
621
622 assert!(!a.ptr_eq(&a2));
624 assert!(!b.ptr_eq(&b2));
625
626 a2.set(999);
628 assert_eq!(b2.get(), 999); assert_eq!(a.get(), 42); }
631
632 #[test]
633 fn test_deep_clone_multiple_graphs() {
634 let graph1 = RefGraph::new();
635 let graph2 = RefGraph::new();
636
637 let a1 = graph1.create(1);
638 let b1 = a1.clone();
639 let a2 = graph2.create(2);
640 let b2 = a2.clone();
641
642 let cloned = deep_clone(&(a1.clone(), b1.clone(), a2.clone(), b2.clone()));
643
644 assert!(cloned.0.ptr_eq(&cloned.1));
646 assert!(cloned.2.ptr_eq(&cloned.3));
647
648 assert!(!cloned.0.ptr_eq(&cloned.2));
650 }
651
652 #[test]
653 fn test_deep_clone_different_indices() {
654 let graph = RefGraph::new();
655 let a = graph.create(1);
656 let b = graph.create(2);
657 let c = a.clone(); let (a2, b2, c2) = deep_clone(&(a.clone(), b.clone(), c.clone()));
660
661 assert!(a2.ptr_eq(&c2));
663 assert!(!a2.ptr_eq(&b2));
664 assert!(a2.same_graph(&b2));
665
666 assert_eq!(a2.get(), 1);
668 assert_eq!(b2.get(), 2);
669 }
670
671 #[test]
672 fn test_nested_struct() {
673 #[derive(Clone)]
674 struct Inner {
675 x: GraphRef<i32>,
676 y: GraphRef<i32>,
677 }
678
679 #[derive(Clone)]
680 struct Outer {
681 inner: Inner,
682 z: GraphRef<i32>,
683 }
684
685 let graph = RefGraph::new();
686 let r = graph.create(42);
687
688 let original = Outer {
689 inner: Inner {
690 x: r.clone(),
691 y: r.clone(),
692 },
693 z: r.clone(),
694 };
695
696 let cloned = deep_clone(&original);
697
698 assert!(cloned.inner.x.ptr_eq(&cloned.inner.y));
700 assert!(cloned.inner.x.ptr_eq(&cloned.z));
701
702 cloned.inner.x.set(999);
704 assert_eq!(cloned.z.get(), 999);
705 assert_eq!(original.z.get(), 42);
706 }
707
708 #[test]
713 fn test_send_sync() {
714 fn assert_send_sync<T: Send + Sync>() {}
715 assert_send_sync::<RefGraph<i32>>();
716 assert_send_sync::<GraphRef<i32>>();
717 assert_send_sync::<RefGraph<String>>();
718 assert_send_sync::<GraphRef<String>>();
719 }
720
721 #[test]
722 fn test_concurrent_deep_clone() {
723 use std::thread;
724
725 let graph = RefGraph::new();
726 let a = graph.create(42);
727 let b = a.clone();
728 let c = graph.create(100);
729
730 let data = Arc::new((a.clone(), b.clone(), c.clone()));
731
732 let handles: Vec<_> = (0..8)
733 .map(|i| {
734 let data = Arc::clone(&data);
735 thread::spawn(move || {
736 let cloned = deep_clone(data.as_ref());
737
738 assert!(cloned.0.ptr_eq(&cloned.1));
740 assert!(cloned.0.same_graph(&cloned.2));
742 assert!(!cloned.0.ptr_eq(&cloned.2));
743
744 cloned.0.set(i * 1000);
746 assert_eq!(cloned.1.get(), i * 1000);
747 assert_eq!(cloned.2.get(), 100);
748
749 assert_eq!(data.0.get(), 42);
751 })
752 })
753 .collect();
754
755 for h in handles {
756 h.join().unwrap();
757 }
758 }
759
760 #[test]
761 fn test_concurrent_read_write() {
762 use std::sync::Barrier;
763 use std::thread;
764
765 let graph = RefGraph::new();
766 let r = graph.create(0i64);
767 let barrier = Arc::new(Barrier::new(5));
768
769 let writer_ref = r.clone();
771 let writer_barrier = Arc::clone(&barrier);
772 let writer = thread::spawn(move || {
773 writer_barrier.wait();
774 for i in 0..1000 {
775 writer_ref.set(i);
776 }
777 });
778
779 let readers: Vec<_> = (0..4)
781 .map(|_| {
782 let reader_ref = r.clone();
783 let reader_barrier = Arc::clone(&barrier);
784 thread::spawn(move || {
785 reader_barrier.wait();
786 let mut last = -1i64;
787 for _ in 0..1000 {
788 let val = reader_ref.get();
789 assert!(val >= 0 && val < 1000);
791 let _ = last;
792 last = val;
793 }
794 })
795 })
796 .collect();
797
798 writer.join().unwrap();
799 for r in readers {
800 r.join().unwrap();
801 }
802 }
803
804 #[test]
805 fn test_deep_clone_stress() {
806 use std::sync::Barrier;
807 use std::thread;
808
809 let graph = RefGraph::new();
810 let refs: Vec<_> = (0..10).map(|i| graph.create(i)).collect();
811 let data = Arc::new(refs);
812 let barrier = Arc::new(Barrier::new(50));
813
814 let handles: Vec<_> = (0..50)
815 .map(|_| {
816 let data = Arc::clone(&data);
817 let barrier = Arc::clone(&barrier);
818 thread::spawn(move || {
819 barrier.wait();
820 for _ in 0..100 {
821 let cloned = deep_clone(data.as_ref());
822 for j in 1..cloned.len() {
824 assert!(cloned[0].same_graph(&cloned[j]));
825 }
826 for (j, r) in cloned.iter().enumerate() {
828 assert_eq!(r.get(), j as i32);
829 }
830 cloned[0].set(999);
832 assert_eq!(data[0].get(), 0);
833 }
834 })
835 })
836 .collect();
837
838 for h in handles {
839 h.join().unwrap();
840 }
841 }
842
843 #[test]
848 fn test_panic_resets_generation() {
849 use std::panic;
850
851 struct PanicOnClone;
852
853 impl Clone for PanicOnClone {
854 fn clone(&self) -> Self {
855 panic!("clone panic!");
856 }
857 }
858
859 let result = panic::catch_unwind(panic::AssertUnwindSafe(|| {
861 deep_clone(&PanicOnClone);
862 }));
863 assert!(result.is_err());
864
865 let graph = RefGraph::new();
867 let a = graph.create(42);
868 let b = a.clone();
869 let (a2, b2) = deep_clone(&(a.clone(), b.clone()));
870 assert!(a2.ptr_eq(&b2));
871 assert_eq!(a2.get(), 42);
872 }
873
874 #[test]
875 fn test_panic_nested_recovery() {
876 use std::panic;
877
878 let graph = RefGraph::new();
880 let a = graph.create(42);
881
882 let _result = panic::catch_unwind(panic::AssertUnwindSafe(|| {
884 let _outer_guard = begin_deep_clone();
885 let _inner_result = panic::catch_unwind(panic::AssertUnwindSafe(|| {
887 let _inner_guard = begin_deep_clone();
888 panic!("inner panic");
889 }));
890 }));
893 let b = a.clone();
897 let (a2, b2) = deep_clone(&(a.clone(), b.clone()));
898 assert!(a2.ptr_eq(&b2));
899 assert_eq!(a2.get(), 42);
900 }
901
902 #[test]
907 fn test_nested_deep_clone() {
908 #[derive(Debug)]
910 struct Container {
911 inner: Vec<GraphRef<i32>>,
912 }
913
914 impl Clone for Container {
915 fn clone(&self) -> Self {
916 Container {
919 inner: self.inner.clone(),
920 }
921 }
922 }
923
924 let graph = RefGraph::new();
925 let a = graph.create(10);
926 let b = a.clone();
927
928 let container = Container {
929 inner: vec![a.clone(), b.clone()],
930 };
931
932 let cloned = deep_clone(&container);
933 assert!(cloned.inner[0].ptr_eq(&cloned.inner[1]));
934 cloned.inner[0].set(999);
935 assert_eq!(cloned.inner[1].get(), 999);
936 assert_eq!(container.inner[0].get(), 10);
937 }
938
939 #[test]
940 fn test_begin_deep_clone_nested_guards() {
941 let graph = RefGraph::new();
942 let a = graph.create(1);
943 let b = a.clone();
944
945 {
946 let _outer = begin_deep_clone();
947 let cloned_a = a.clone(); {
949 let _inner = begin_deep_clone();
950 let cloned_b = b.clone(); assert!(cloned_a.same_graph(&cloned_b));
953 }
954 let cloned_b2 = b.clone();
956 assert!(cloned_a.ptr_eq(&cloned_b2));
957 }
958 let shallow = a.clone();
962 assert!(a.ptr_eq(&shallow));
963 }
964
965 #[test]
970 fn test_multiple_graph_types() {
971 #[derive(Clone)]
972 struct Mixed {
973 nums: Vec<GraphRef<i32>>,
974 strs: Vec<GraphRef<String>>,
975 }
976
977 let g1 = RefGraph::new();
978 let g2 = RefGraph::new();
979
980 let n = g1.create(42);
981 let s = g2.create("hello".to_string());
982
983 let data = Mixed {
984 nums: vec![n.clone(), n.clone()],
985 strs: vec![s.clone(), s.clone()],
986 };
987
988 let cloned = deep_clone(&data);
989 assert!(cloned.nums[0].ptr_eq(&cloned.nums[1]));
990 assert!(cloned.strs[0].ptr_eq(&cloned.strs[1]));
991
992 cloned.nums[0].set(999);
993 assert_eq!(cloned.nums[1].get(), 999);
994 assert_eq!(data.nums[0].get(), 42);
995
996 cloned.strs[0].set("world".to_string());
997 assert_eq!(cloned.strs[1].get(), "world");
998 assert_eq!(data.strs[0].get(), "hello");
999 }
1000
1001 #[test]
1002 fn test_large_graph() {
1003 let graph = RefGraph::new();
1004 let refs: Vec<_> = (0..10_000).map(|i| graph.create(i)).collect();
1005
1006 let mut all_refs = refs.clone();
1008 for i in 0..5_000 {
1009 all_refs.push(refs[i].clone());
1010 }
1011
1012 let cloned = deep_clone(&all_refs);
1013
1014 assert_eq!(cloned.len(), 15_000);
1015
1016 for i in 0..5_000 {
1018 assert!(cloned[i].ptr_eq(&cloned[10_000 + i]));
1019 }
1020
1021 for i in 0..10_000 {
1023 assert_eq!(cloned[i].get(), i as i32);
1024 }
1025
1026 cloned[0].set(-1);
1028 assert_eq!(all_refs[0].get(), 0);
1029 }
1030}