Skip to main content

graph_clonable_ref/
lib.rs

1//! A fast cloneable reference that preserves reference structure during deep cloning.
2//!
3//! Thread-safe (`Send + Sync`) and panic-safe. Uses `Arc`, `parking_lot::RwLock`,
4//! and generation-based tracking for O(1) reference resolution during cloning.
5//!
6//! # Example
7//! ```
8//! use graph_clonable_ref::{RefGraph, GraphRef, deep_clone};
9//!
10//! // Create a graph and some references
11//! let graph = RefGraph::new();
12//! let a = graph.create(42);
13//! let b = a.clone(); // b points to same data as a
14//!
15//! // Modify through one ref, visible through both
16//! a.set(100);
17//! assert_eq!(b.get(), 100);
18//!
19//! // Create a struct holding both refs
20//! let data = (a, b);
21//!
22//! // Deep clone preserves structure: a' and b' point to SAME new data
23//! let cloned = deep_clone(&data);
24//! cloned.0.set(999);
25//! assert_eq!(cloned.1.get(), 999); // Both see the change
26//! assert_eq!(data.0.get(), 100);   // Original unchanged
27//! ```
28
29use std::cell::Cell;
30use std::sync::atomic::{AtomicU64, Ordering};
31use std::sync::{Arc, Weak};
32
33use parking_lot::RwLock;
34
35static NEXT_GRAPH_ID: AtomicU64 = AtomicU64::new(1);
36static NEXT_GENERATION: AtomicU64 = AtomicU64::new(1);
37
38thread_local! {
39    static CLONE_GENERATION: Cell<u64> = const { Cell::new(0) };
40    static CLONE_DEPTH: Cell<u32> = const { Cell::new(0) };
41}
42
43/// Internal storage for a group of related references.
44/// All `GraphRef`s created from the same `RefGraph` share the underlying data.
45pub struct RefGraph<T: Send + Sync> {
46    #[allow(dead_code)]
47    id: u64,
48    data: RwLock<Vec<RwLock<T>>>,
49    clone_cache: RwLock<Vec<(u64, Weak<RefGraph<T>>)>>,
50}
51
52// Safety: RefGraph uses parking_lot::RwLock (Send+Sync) and Arc/Weak (Send+Sync).
53// T: Send + Sync is required by the type parameter bound.
54unsafe impl<T: Send + Sync> Send for RefGraph<T> {}
55unsafe impl<T: Send + Sync> Sync for RefGraph<T> {}
56
57impl<T: Send + Sync> RefGraph<T> {
58    /// Create a new empty RefGraph.
59    pub fn new() -> Arc<Self> {
60        Arc::new(RefGraph {
61            id: NEXT_GRAPH_ID.fetch_add(1, Ordering::Relaxed),
62            data: RwLock::new(Vec::new()),
63            clone_cache: RwLock::new(Vec::new()),
64        })
65    }
66
67    /// Create a new reference with the given initial value.
68    pub fn create(self: &Arc<Self>, value: T) -> GraphRef<T> {
69        let mut data = self.data.write();
70        let index = data.len();
71        data.push(RwLock::new(value));
72        GraphRef {
73            graph: Arc::clone(self),
74            index,
75        }
76    }
77
78    /// Get the number of values stored in this graph.
79    pub fn len(&self) -> usize {
80        self.data.read().len()
81    }
82
83    /// Check if this graph is empty.
84    pub fn is_empty(&self) -> bool {
85        self.data.read().is_empty()
86    }
87
88    /// Clear the clone cache to free memory.
89    pub fn clear_cache(&self) {
90        self.clone_cache.write().clear();
91    }
92}
93
94impl<T: Clone + Send + Sync> RefGraph<T> {
95    /// Deep clone this graph, creating independent copies of all data.
96    fn deep_clone_graph(&self) -> Arc<RefGraph<T>> {
97        let data = self.data.read();
98        let cloned_data: Vec<RwLock<T>> = data
99            .iter()
100            .map(|cell| RwLock::new(cell.read().clone()))
101            .collect();
102
103        Arc::new(RefGraph {
104            id: NEXT_GRAPH_ID.fetch_add(1, Ordering::Relaxed),
105            data: RwLock::new(cloned_data),
106            clone_cache: RwLock::new(Vec::new()),
107        })
108    }
109
110    /// Get or create a cached clone for the current generation.
111    /// Fast path is a single read-lock scan.
112    #[inline]
113    fn get_or_create_clone(self: &Arc<Self>, current_gen: u64) -> Arc<RefGraph<T>> {
114        // Fast path: read lock (concurrent, uncontended ~2ns)
115        {
116            let cache = self.clone_cache.read();
117            for (gen, weak) in cache.iter() {
118                if *gen == current_gen {
119                    if let Some(arc) = weak.upgrade() {
120                        return arc;
121                    }
122                    break;
123                }
124            }
125        }
126
127        // Slow path: clone graph, write lock, insert
128        let new_graph = self.deep_clone_graph();
129        {
130            let mut cache = self.clone_cache.write();
131            // Double-check: another thread may have inserted while we were cloning
132            for (gen, weak) in cache.iter() {
133                if *gen == current_gen {
134                    if let Some(arc) = weak.upgrade() {
135                        // Another thread beat us; drop our clone, use theirs
136                        return arc;
137                    }
138                    break;
139                }
140            }
141            // Cleanup dead entries
142            cache.retain(|(_, w)| w.strong_count() > 0);
143            cache.push((current_gen, Arc::downgrade(&new_graph)));
144        }
145        new_graph
146    }
147}
148
149/// A cloneable reference that preserves structure during deep cloning.
150///
151/// - Regular `clone()` creates a shallow copy (same underlying data)
152/// - When cloned during `deep_clone()`, creates structure-preserving deep copy
153pub struct GraphRef<T: Send + Sync> {
154    graph: Arc<RefGraph<T>>,
155    index: usize,
156}
157
158impl<T: Send + Sync> GraphRef<T> {
159    /// Get a copy of the referenced value.
160    pub fn get(&self) -> T
161    where
162        T: Clone,
163    {
164        let data = self.graph.data.read();
165        let val = data[self.index].read().clone();
166        val
167    }
168
169    /// Set the referenced value.
170    pub fn set(&self, value: T) {
171        let data = self.graph.data.read();
172        *data[self.index].write() = value;
173    }
174
175    /// Apply a function to the referenced value.
176    pub fn update<F>(&self, f: F)
177    where
178        F: FnOnce(&mut T),
179    {
180        let data = self.graph.data.read();
181        f(&mut *data[self.index].write());
182    }
183
184    /// Get the index of this reference within its graph.
185    pub fn index(&self) -> usize {
186        self.index
187    }
188
189    /// Check if two refs point to the same data (same graph and index).
190    pub fn ptr_eq(&self, other: &GraphRef<T>) -> bool {
191        Arc::ptr_eq(&self.graph, &other.graph) && self.index == other.index
192    }
193
194    /// Check if two refs are in the same graph (may have different indices).
195    pub fn same_graph(&self, other: &GraphRef<T>) -> bool {
196        Arc::ptr_eq(&self.graph, &other.graph)
197    }
198}
199
200impl<T: Clone + Send + Sync> Clone for GraphRef<T> {
201    #[inline]
202    fn clone(&self) -> Self {
203        let current_gen = CLONE_GENERATION.with(|g| g.get());
204
205        if current_gen > 0 {
206            // We're in a deep clone operation - use generation-based lookup
207            let new_graph = self.graph.get_or_create_clone(current_gen);
208            GraphRef {
209                graph: new_graph,
210                index: self.index,
211            }
212        } else {
213            // Shallow clone - same graph, same index
214            GraphRef {
215                graph: Arc::clone(&self.graph),
216                index: self.index,
217            }
218        }
219    }
220}
221
222impl<T: std::fmt::Debug + Clone + Send + Sync> std::fmt::Debug for GraphRef<T> {
223    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
224        f.debug_struct("GraphRef")
225            .field("value", &self.get())
226            .field("index", &self.index)
227            .finish()
228    }
229}
230
231/// Perform a deep clone that preserves reference structure.
232///
233/// All `GraphRef`s pointing to the same data before cloning will point
234/// to the same (new) data after cloning. Thread-safe and panic-safe.
235///
236/// # Example
237/// ```
238/// use graph_clonable_ref::{RefGraph, deep_clone};
239///
240/// let graph = RefGraph::new();
241/// let a = graph.create(1);
242/// let b = a.clone();
243///
244/// let (a2, b2) = deep_clone(&(a.clone(), b.clone()));
245///
246/// // a2 and b2 point to same NEW data
247/// a2.set(42);
248/// assert_eq!(b2.get(), 42);
249///
250/// // Original unchanged
251/// assert_eq!(a.get(), 1);
252/// ```
253pub fn deep_clone<T: Clone>(value: &T) -> T {
254    let _guard = begin_deep_clone();
255    value.clone()
256}
257
258/// Guard that ensures clone generation and depth are reset even on panic.
259pub struct DeepCloneGuard {
260    _private: (),
261}
262
263impl Drop for DeepCloneGuard {
264    fn drop(&mut self) {
265        let depth = CLONE_DEPTH.with(|d| {
266            let new = d.get().saturating_sub(1);
267            d.set(new);
268            new
269        });
270        if depth == 0 {
271            CLONE_GENERATION.with(|g| g.set(0));
272        }
273    }
274}
275
276/// Begin a deep clone operation manually. Returns a guard that resets state on drop.
277///
278/// Supports nesting: only the outermost call sets a new generation.
279/// Prefer `deep_clone()` for simple cases.
280pub fn begin_deep_clone() -> DeepCloneGuard {
281    let depth = CLONE_DEPTH.with(|d| {
282        let c = d.get();
283        d.set(c + 1);
284        c
285    });
286    if depth == 0 {
287        let gen = NEXT_GENERATION.fetch_add(1, Ordering::Relaxed);
288        // Skip 0 (0 means shallow clone)
289        let gen = if gen == 0 {
290            NEXT_GENERATION.fetch_add(1, Ordering::Relaxed)
291        } else {
292            gen
293        };
294        CLONE_GENERATION.with(|g| g.set(gen));
295    }
296    DeepCloneGuard { _private: () }
297}
298
299// ============================================================================
300// HashMap-based approach for comparison (slower, single-threaded)
301// ============================================================================
302
303pub mod hashmap_based {
304    //! HashMap-based deep cloning for comparison.
305    //! This is the traditional approach - slower due to hash lookups.
306
307    use std::cell::RefCell;
308    use std::collections::HashMap;
309    use std::rc::Rc;
310
311    /// Internal storage using HashMap for clone tracking.
312    pub struct HashMapRefGraph<T> {
313        data: RefCell<Vec<RefCell<T>>>,
314    }
315
316    impl<T> HashMapRefGraph<T> {
317        pub fn new() -> Rc<Self> {
318            Rc::new(HashMapRefGraph {
319                data: RefCell::new(Vec::new()),
320            })
321        }
322
323        pub fn create(self: &Rc<Self>, value: T) -> HashMapGraphRef<T> {
324            let mut data = self.data.borrow_mut();
325            let index = data.len();
326            data.push(RefCell::new(value));
327            HashMapGraphRef {
328                graph: Rc::clone(self),
329                index,
330            }
331        }
332    }
333
334    impl<T: Clone> HashMapRefGraph<T> {
335        fn deep_clone_graph(self: &Rc<Self>) -> Rc<HashMapRefGraph<T>> {
336            let data = self.data.borrow();
337            let cloned_data: Vec<RefCell<T>> = data
338                .iter()
339                .map(|cell| RefCell::new(cell.borrow().clone()))
340                .collect();
341
342            Rc::new(HashMapRefGraph {
343                data: RefCell::new(cloned_data),
344            })
345        }
346    }
347
348    pub struct HashMapGraphRef<T> {
349        graph: Rc<HashMapRefGraph<T>>,
350        index: usize,
351    }
352
353    impl<T: Clone> HashMapGraphRef<T> {
354        pub fn get(&self) -> T {
355            self.graph.data.borrow()[self.index].borrow().clone()
356        }
357
358        pub fn set(&self, value: T) {
359            *self.graph.data.borrow()[self.index].borrow_mut() = value;
360        }
361    }
362
363    impl<T> HashMapGraphRef<T> {
364        pub fn ptr_eq(&self, other: &HashMapGraphRef<T>) -> bool {
365            Rc::ptr_eq(&self.graph, &other.graph) && self.index == other.index
366        }
367    }
368
369    /// Clone context using HashMap for lookups.
370    pub struct HashMapCloneContext<T> {
371        map: RefCell<HashMap<*const HashMapRefGraph<T>, Rc<HashMapRefGraph<T>>>>,
372    }
373
374    impl<T: Clone> HashMapCloneContext<T> {
375        pub fn new() -> Self {
376            HashMapCloneContext {
377                map: RefCell::new(HashMap::new()),
378            }
379        }
380
381        /// Clone a single ref using HashMap lookup.
382        pub fn clone_ref(&self, r: &HashMapGraphRef<T>) -> HashMapGraphRef<T> {
383            let ptr = Rc::as_ptr(&r.graph);
384            let mut map = self.map.borrow_mut();
385
386            let new_graph = map
387                .entry(ptr)
388                .or_insert_with(|| r.graph.deep_clone_graph())
389                .clone();
390
391            HashMapGraphRef {
392                graph: new_graph,
393                index: r.index,
394            }
395        }
396    }
397
398    impl<T: Clone> Default for HashMapCloneContext<T> {
399        fn default() -> Self {
400            Self::new()
401        }
402    }
403}
404
405// ============================================================================
406// Two-phase index-based approach (like user's implementation)
407// ============================================================================
408
409pub mod index_based {
410    //! Two-phase index-based deep cloning (matching user's approach).
411    //!
412    //! Phase 1 (Prepare): Walk through all refs, build HashMap lookup and pos_array
413    //! Phase 2 (Perform): Walk through again, use pos_array for O(1) lookups
414    //!
415    //! This avoids HashMap lookups during the actual clone, but has overhead:
416    //! - Two passes over all data
417    //! - Cloning pos_array for each clone operation
418    //! - Pre-allocating ref_array
419
420    use std::cell::RefCell;
421    use std::collections::HashMap;
422    use std::rc::Rc;
423
424    /// A reference that supports two-phase cloning.
425    pub struct IndexRef<T> {
426        inner: Rc<RefCell<T>>,
427    }
428
429    impl<T> IndexRef<T> {
430        pub fn new(value: T) -> Self {
431            IndexRef {
432                inner: Rc::new(RefCell::new(value)),
433            }
434        }
435
436        pub fn ptr(&self) -> *const RefCell<T> {
437            Rc::as_ptr(&self.inner)
438        }
439    }
440
441    impl<T: Clone> IndexRef<T> {
442        pub fn get(&self) -> T {
443            self.inner.borrow().clone()
444        }
445
446        pub fn set(&self, value: T) {
447            *self.inner.borrow_mut() = value;
448        }
449    }
450
451    impl<T> Clone for IndexRef<T> {
452        fn clone(&self) -> Self {
453            IndexRef {
454                inner: Rc::clone(&self.inner),
455            }
456        }
457    }
458
459    impl<T> IndexRef<T> {
460        pub fn ptr_eq(&self, other: &IndexRef<T>) -> bool {
461            Rc::ptr_eq(&self.inner, &other.inner)
462        }
463    }
464
465    /// Preparation node - built once, stores the reference structure.
466    /// Uses HashMap to detect shared references.
467    pub struct PrepareIndexCloningNode<T> {
468        /// (is_existing, position) for each reference in traversal order
469        pub pos_array: Vec<(bool, usize)>,
470        /// Maps raw pointer -> position in ref_array
471        lookup_map: HashMap<*const RefCell<T>, usize>,
472        /// Counter for unique references
473        ref_pos_counter: usize,
474    }
475
476    impl<T> PrepareIndexCloningNode<T> {
477        pub fn new() -> Self {
478            Self {
479                pos_array: Vec::new(),
480                lookup_map: HashMap::new(),
481                ref_pos_counter: 0,
482            }
483        }
484
485        pub fn with_capacity(capacity: usize) -> Self {
486            Self {
487                pos_array: Vec::with_capacity(capacity),
488                lookup_map: HashMap::with_capacity(capacity),
489                ref_pos_counter: 0,
490            }
491        }
492
493        /// Handle a reference during preparation phase.
494        /// Records whether it's new or existing, and its position.
495        pub fn handle_reference(&mut self, r: &IndexRef<T>) {
496            let ptr = r.ptr();
497            if let Some(&existing_pos) = self.lookup_map.get(&ptr) {
498                // Already seen this reference
499                self.pos_array.push((true, existing_pos));
500            } else {
501                // New reference
502                let pos = self.ref_pos_counter;
503                self.ref_pos_counter += 1;
504                self.lookup_map.insert(ptr, pos);
505                self.pos_array.push((false, pos));
506            }
507        }
508
509        /// Get the number of unique references.
510        pub fn unique_count(&self) -> usize {
511            self.ref_pos_counter
512        }
513    }
514
515    impl<T> Default for PrepareIndexCloningNode<T> {
516        fn default() -> Self {
517            Self::new()
518        }
519    }
520
521    /// Perform node - created from PrepareNode for each clone operation.
522    pub struct PerformIndexCloningNode<T> {
523        /// Current position in pos_array
524        counter: usize,
525        /// Pre-allocated array of new references
526        ref_array: Vec<Rc<RefCell<T>>>,
527        /// Cloned from PrepareNode
528        pos_array: Vec<(bool, usize)>,
529    }
530
531    impl<T: Clone> PerformIndexCloningNode<T> {
532        /// Create from a prepare node. This clones the pos_array.
533        pub fn from_prepare_node(node: &PrepareIndexCloningNode<T>) -> Self {
534            // Pre-allocate with dummy values (will be replaced)
535            let mut ref_array = Vec::with_capacity(node.ref_pos_counter);
536            ref_array.resize_with(node.ref_pos_counter, || {
537                Rc::new(RefCell::new(unsafe {
538                    // This is safe because we'll always set before reading
539                    std::mem::zeroed()
540                }))
541            });
542
543            Self {
544                counter: 0,
545                ref_array,
546                pos_array: node.pos_array.clone(), // This clone is expensive!
547            }
548        }
549
550        /// Handle a reference during clone phase.
551        /// Must be called in the same order as during preparation.
552        pub fn handle_reference(&mut self, orig: &IndexRef<T>) -> IndexRef<T> {
553            let pos = self.counter;
554            self.counter += 1;
555
556            let (exists, array_pos) = self.pos_array[pos];
557
558            if !exists {
559                // First time seeing this reference in this clone
560                let new_ref = Rc::new(RefCell::new(orig.inner.borrow().clone()));
561                self.ref_array[array_pos] = Rc::clone(&new_ref);
562                IndexRef { inner: new_ref }
563            } else {
564                // Already cloned, return the cached one
565                IndexRef {
566                    inner: Rc::clone(&self.ref_array[array_pos]),
567                }
568            }
569        }
570    }
571
572    /// Convenience function to prepare a slice of refs.
573    pub fn prepare_refs<T>(refs: &[IndexRef<T>]) -> PrepareIndexCloningNode<T> {
574        let mut node = PrepareIndexCloningNode::with_capacity(refs.len());
575        for r in refs {
576            node.handle_reference(r);
577        }
578        node
579    }
580
581    /// Convenience function to clone refs using a prepare node.
582    pub fn clone_refs<T: Clone>(
583        refs: &[IndexRef<T>],
584        prepare_node: &PrepareIndexCloningNode<T>,
585    ) -> Vec<IndexRef<T>> {
586        let mut perform_node = PerformIndexCloningNode::from_prepare_node(prepare_node);
587        refs.iter()
588            .map(|r| perform_node.handle_reference(r))
589            .collect()
590    }
591}
592
593#[cfg(test)]
594mod tests {
595    use super::*;
596
597    #[test]
598    fn test_shallow_clone_same_data() {
599        let graph = RefGraph::new();
600        let a = graph.create(42);
601        let b = a.clone();
602
603        assert!(a.ptr_eq(&b));
604        assert_eq!(a.get(), 42);
605        assert_eq!(b.get(), 42);
606
607        a.set(100);
608        assert_eq!(b.get(), 100);
609    }
610
611    #[test]
612    fn test_deep_clone_preserves_structure() {
613        let graph = RefGraph::new();
614        let a = graph.create(42);
615        let b = a.clone();
616
617        let (a2, b2) = deep_clone(&(a.clone(), b.clone()));
618
619        // a2 and b2 should point to same new data
620        assert!(a2.ptr_eq(&b2));
621
622        // But different from original
623        assert!(!a.ptr_eq(&a2));
624        assert!(!b.ptr_eq(&b2));
625
626        // Verify independence
627        a2.set(999);
628        assert_eq!(b2.get(), 999); // Same data
629        assert_eq!(a.get(), 42); // Original unchanged
630    }
631
632    #[test]
633    fn test_deep_clone_multiple_graphs() {
634        let graph1 = RefGraph::new();
635        let graph2 = RefGraph::new();
636
637        let a1 = graph1.create(1);
638        let b1 = a1.clone();
639        let a2 = graph2.create(2);
640        let b2 = a2.clone();
641
642        let cloned = deep_clone(&(a1.clone(), b1.clone(), a2.clone(), b2.clone()));
643
644        // Same structure preserved within each graph
645        assert!(cloned.0.ptr_eq(&cloned.1));
646        assert!(cloned.2.ptr_eq(&cloned.3));
647
648        // Different graphs remain different
649        assert!(!cloned.0.ptr_eq(&cloned.2));
650    }
651
652    #[test]
653    fn test_deep_clone_different_indices() {
654        let graph = RefGraph::new();
655        let a = graph.create(1);
656        let b = graph.create(2);
657        let c = a.clone(); // Same index as a
658
659        let (a2, b2, c2) = deep_clone(&(a.clone(), b.clone(), c.clone()));
660
661        // a2 and c2 same, b2 different index
662        assert!(a2.ptr_eq(&c2));
663        assert!(!a2.ptr_eq(&b2));
664        assert!(a2.same_graph(&b2));
665
666        // Values preserved
667        assert_eq!(a2.get(), 1);
668        assert_eq!(b2.get(), 2);
669    }
670
671    #[test]
672    fn test_nested_struct() {
673        #[derive(Clone)]
674        struct Inner {
675            x: GraphRef<i32>,
676            y: GraphRef<i32>,
677        }
678
679        #[derive(Clone)]
680        struct Outer {
681            inner: Inner,
682            z: GraphRef<i32>,
683        }
684
685        let graph = RefGraph::new();
686        let r = graph.create(42);
687
688        let original = Outer {
689            inner: Inner {
690                x: r.clone(),
691                y: r.clone(),
692            },
693            z: r.clone(),
694        };
695
696        let cloned = deep_clone(&original);
697
698        // All three refs in cloned should point to same new data
699        assert!(cloned.inner.x.ptr_eq(&cloned.inner.y));
700        assert!(cloned.inner.x.ptr_eq(&cloned.z));
701
702        // Independent from original
703        cloned.inner.x.set(999);
704        assert_eq!(cloned.z.get(), 999);
705        assert_eq!(original.z.get(), 42);
706    }
707
708    // ========================================================================
709    // Thread safety tests
710    // ========================================================================
711
712    #[test]
713    fn test_send_sync() {
714        fn assert_send_sync<T: Send + Sync>() {}
715        assert_send_sync::<RefGraph<i32>>();
716        assert_send_sync::<GraphRef<i32>>();
717        assert_send_sync::<RefGraph<String>>();
718        assert_send_sync::<GraphRef<String>>();
719    }
720
721    #[test]
722    fn test_concurrent_deep_clone() {
723        use std::thread;
724
725        let graph = RefGraph::new();
726        let a = graph.create(42);
727        let b = a.clone();
728        let c = graph.create(100);
729
730        let data = Arc::new((a.clone(), b.clone(), c.clone()));
731
732        let handles: Vec<_> = (0..8)
733            .map(|i| {
734                let data = Arc::clone(&data);
735                thread::spawn(move || {
736                    let cloned = deep_clone(data.as_ref());
737
738                    // Structure preserved: a' and b' point to same new data
739                    assert!(cloned.0.ptr_eq(&cloned.1));
740                    // c' is in same graph but different index
741                    assert!(cloned.0.same_graph(&cloned.2));
742                    assert!(!cloned.0.ptr_eq(&cloned.2));
743
744                    // Independence: mutation doesn't affect original
745                    cloned.0.set(i * 1000);
746                    assert_eq!(cloned.1.get(), i * 1000);
747                    assert_eq!(cloned.2.get(), 100);
748
749                    // Original unchanged
750                    assert_eq!(data.0.get(), 42);
751                })
752            })
753            .collect();
754
755        for h in handles {
756            h.join().unwrap();
757        }
758    }
759
760    #[test]
761    fn test_concurrent_read_write() {
762        use std::sync::Barrier;
763        use std::thread;
764
765        let graph = RefGraph::new();
766        let r = graph.create(0i64);
767        let barrier = Arc::new(Barrier::new(5));
768
769        // 1 writer thread
770        let writer_ref = r.clone();
771        let writer_barrier = Arc::clone(&barrier);
772        let writer = thread::spawn(move || {
773            writer_barrier.wait();
774            for i in 0..1000 {
775                writer_ref.set(i);
776            }
777        });
778
779        // 4 reader threads
780        let readers: Vec<_> = (0..4)
781            .map(|_| {
782                let reader_ref = r.clone();
783                let reader_barrier = Arc::clone(&barrier);
784                thread::spawn(move || {
785                    reader_barrier.wait();
786                    let mut last = -1i64;
787                    for _ in 0..1000 {
788                        let val = reader_ref.get();
789                        // Values should be valid i64 (no torn reads)
790                        assert!(val >= 0 && val < 1000);
791                        let _ = last;
792                        last = val;
793                    }
794                })
795            })
796            .collect();
797
798        writer.join().unwrap();
799        for r in readers {
800            r.join().unwrap();
801        }
802    }
803
804    #[test]
805    fn test_deep_clone_stress() {
806        use std::sync::Barrier;
807        use std::thread;
808
809        let graph = RefGraph::new();
810        let refs: Vec<_> = (0..10).map(|i| graph.create(i)).collect();
811        let data = Arc::new(refs);
812        let barrier = Arc::new(Barrier::new(50));
813
814        let handles: Vec<_> = (0..50)
815            .map(|_| {
816                let data = Arc::clone(&data);
817                let barrier = Arc::clone(&barrier);
818                thread::spawn(move || {
819                    barrier.wait();
820                    for _ in 0..100 {
821                        let cloned = deep_clone(data.as_ref());
822                        // All cloned refs should be in the same graph
823                        for j in 1..cloned.len() {
824                            assert!(cloned[0].same_graph(&cloned[j]));
825                        }
826                        // Values preserved
827                        for (j, r) in cloned.iter().enumerate() {
828                            assert_eq!(r.get(), j as i32);
829                        }
830                        // Independence
831                        cloned[0].set(999);
832                        assert_eq!(data[0].get(), 0);
833                    }
834                })
835            })
836            .collect();
837
838        for h in handles {
839            h.join().unwrap();
840        }
841    }
842
843    // ========================================================================
844    // Panic safety tests
845    // ========================================================================
846
847    #[test]
848    fn test_panic_resets_generation() {
849        use std::panic;
850
851        struct PanicOnClone;
852
853        impl Clone for PanicOnClone {
854            fn clone(&self) -> Self {
855                panic!("clone panic!");
856            }
857        }
858
859        // This will panic during deep_clone
860        let result = panic::catch_unwind(panic::AssertUnwindSafe(|| {
861            deep_clone(&PanicOnClone);
862        }));
863        assert!(result.is_err());
864
865        // Generation should be reset — subsequent deep_clone should work
866        let graph = RefGraph::new();
867        let a = graph.create(42);
868        let b = a.clone();
869        let (a2, b2) = deep_clone(&(a.clone(), b.clone()));
870        assert!(a2.ptr_eq(&b2));
871        assert_eq!(a2.get(), 42);
872    }
873
874    #[test]
875    fn test_panic_nested_recovery() {
876        use std::panic;
877
878        // Outer deep_clone that will survive; inner one panics
879        let graph = RefGraph::new();
880        let a = graph.create(42);
881
882        // Simulate: begin outer deep clone, then panic in inner
883        let _result = panic::catch_unwind(panic::AssertUnwindSafe(|| {
884            let _outer_guard = begin_deep_clone();
885            // Inner deep_clone panics
886            let _inner_result = panic::catch_unwind(panic::AssertUnwindSafe(|| {
887                let _inner_guard = begin_deep_clone();
888                panic!("inner panic");
889            }));
890            // After inner panic, outer should still work at depth 0
891            // (inner guard was dropped by unwinding, decrementing depth)
892        }));
893        // outer_guard drops here, resetting generation
894
895        // Should be back to clean state
896        let b = a.clone();
897        let (a2, b2) = deep_clone(&(a.clone(), b.clone()));
898        assert!(a2.ptr_eq(&b2));
899        assert_eq!(a2.get(), 42);
900    }
901
902    // ========================================================================
903    // Nesting tests
904    // ========================================================================
905
906    #[test]
907    fn test_nested_deep_clone() {
908        // A type whose Clone impl calls deep_clone internally
909        #[derive(Debug)]
910        struct Container {
911            inner: Vec<GraphRef<i32>>,
912        }
913
914        impl Clone for Container {
915            fn clone(&self) -> Self {
916                // This should work with nesting — inner deep_clone
917                // reuses the outer generation
918                Container {
919                    inner: self.inner.clone(),
920                }
921            }
922        }
923
924        let graph = RefGraph::new();
925        let a = graph.create(10);
926        let b = a.clone();
927
928        let container = Container {
929            inner: vec![a.clone(), b.clone()],
930        };
931
932        let cloned = deep_clone(&container);
933        assert!(cloned.inner[0].ptr_eq(&cloned.inner[1]));
934        cloned.inner[0].set(999);
935        assert_eq!(cloned.inner[1].get(), 999);
936        assert_eq!(container.inner[0].get(), 10);
937    }
938
939    #[test]
940    fn test_begin_deep_clone_nested_guards() {
941        let graph = RefGraph::new();
942        let a = graph.create(1);
943        let b = a.clone();
944
945        {
946            let _outer = begin_deep_clone();
947            let cloned_a = a.clone(); // deep clone
948            {
949                let _inner = begin_deep_clone();
950                let cloned_b = b.clone(); // same generation as outer
951                // Both should point to same new graph
952                assert!(cloned_a.same_graph(&cloned_b));
953            }
954            // Inner guard dropped, but depth > 0, so generation stays
955            let cloned_b2 = b.clone();
956            assert!(cloned_a.ptr_eq(&cloned_b2));
957        }
958        // Outer guard dropped, depth == 0, generation reset
959
960        // Now a regular clone should be shallow
961        let shallow = a.clone();
962        assert!(a.ptr_eq(&shallow));
963    }
964
965    // ========================================================================
966    // Edge case tests
967    // ========================================================================
968
969    #[test]
970    fn test_multiple_graph_types() {
971        #[derive(Clone)]
972        struct Mixed {
973            nums: Vec<GraphRef<i32>>,
974            strs: Vec<GraphRef<String>>,
975        }
976
977        let g1 = RefGraph::new();
978        let g2 = RefGraph::new();
979
980        let n = g1.create(42);
981        let s = g2.create("hello".to_string());
982
983        let data = Mixed {
984            nums: vec![n.clone(), n.clone()],
985            strs: vec![s.clone(), s.clone()],
986        };
987
988        let cloned = deep_clone(&data);
989        assert!(cloned.nums[0].ptr_eq(&cloned.nums[1]));
990        assert!(cloned.strs[0].ptr_eq(&cloned.strs[1]));
991
992        cloned.nums[0].set(999);
993        assert_eq!(cloned.nums[1].get(), 999);
994        assert_eq!(data.nums[0].get(), 42);
995
996        cloned.strs[0].set("world".to_string());
997        assert_eq!(cloned.strs[1].get(), "world");
998        assert_eq!(data.strs[0].get(), "hello");
999    }
1000
1001    #[test]
1002    fn test_large_graph() {
1003        let graph = RefGraph::new();
1004        let refs: Vec<_> = (0..10_000).map(|i| graph.create(i)).collect();
1005
1006        // Add some shared refs
1007        let mut all_refs = refs.clone();
1008        for i in 0..5_000 {
1009            all_refs.push(refs[i].clone());
1010        }
1011
1012        let cloned = deep_clone(&all_refs);
1013
1014        assert_eq!(cloned.len(), 15_000);
1015
1016        // Check sharing is preserved
1017        for i in 0..5_000 {
1018            assert!(cloned[i].ptr_eq(&cloned[10_000 + i]));
1019        }
1020
1021        // Check values
1022        for i in 0..10_000 {
1023            assert_eq!(cloned[i].get(), i as i32);
1024        }
1025
1026        // Independence
1027        cloned[0].set(-1);
1028        assert_eq!(all_refs[0].get(), 0);
1029    }
1030}