Skip to main content

graph_clonable_ref/
lib.rs

1//! A fast cloneable reference that preserves reference structure during deep cloning.
2//!
3//! Thread-safe (`Send + Sync`) and panic-safe. Uses `Arc`, `parking_lot::RwLock`,
4//! and generation-based tracking for O(1) reference resolution during cloning.
5//!
6//! # Example
7//! ```
8//! use graph_clonable_ref::{RefGraph, GraphRef, deep_clone};
9//!
10//! // Create a graph and some references
11//! let graph = RefGraph::new();
12//! let a = graph.create(42);
13//! let b = a.clone(); // b points to same data as a
14//!
15//! // Modify through one ref, visible through both
16//! a.set(100);
17//! assert_eq!(b.get(), 100);
18//!
19//! // Create a struct holding both refs
20//! let data = (a, b);
21//!
22//! // Deep clone preserves structure: a' and b' point to SAME new data
23//! let cloned = deep_clone(&data);
24//! cloned.0.set(999);
25//! assert_eq!(cloned.1.get(), 999); // Both see the change
26//! assert_eq!(data.0.get(), 100);   // Original unchanged
27//! ```
28
29use std::cell::Cell;
30use std::sync::atomic::{AtomicU64, Ordering};
31use std::sync::{Arc, Weak};
32
33use parking_lot::RwLock;
34
35static NEXT_GENERATION: AtomicU64 = AtomicU64::new(1);
36
37thread_local! {
38    static CLONE_GENERATION: Cell<u64> = const { Cell::new(0) };
39    static CLONE_DEPTH: Cell<u32> = const { Cell::new(0) };
40}
41
42/// Internal storage for a group of related references.
43/// All `GraphRef`s created from the same `RefGraph` share the underlying data.
44pub struct RefGraph<T: Send + Sync> {
45    data: RwLock<Vec<T>>,
46    clone_cache: RwLock<Vec<(u64, Weak<RefGraph<T>>)>>,
47}
48
49impl<T: Send + Sync> RefGraph<T> {
50    /// Create a new empty RefGraph.
51    pub fn new() -> Arc<Self> {
52        Arc::new(RefGraph {
53            data: RwLock::new(Vec::new()),
54            clone_cache: RwLock::new(Vec::new()),
55        })
56    }
57
58    /// Create a new reference with the given initial value.
59    pub fn create(self: &Arc<Self>, value: T) -> GraphRef<T> {
60        let mut data = self.data.write();
61        let index = data.len();
62        data.push(value);
63        GraphRef {
64            graph: Arc::clone(self),
65            index,
66        }
67    }
68
69    /// Get the number of values stored in this graph.
70    pub fn len(&self) -> usize {
71        self.data.read().len()
72    }
73
74    /// Check if this graph is empty.
75    pub fn is_empty(&self) -> bool {
76        self.data.read().is_empty()
77    }
78
79    /// Clear the clone cache to free memory.
80    pub fn clear_cache(&self) {
81        self.clone_cache.write().clear();
82    }
83}
84
85impl<T: Clone + Send + Sync> RefGraph<T> {
86    /// Deep clone this graph, creating independent copies of all data.
87    fn deep_clone_graph(&self) -> Arc<RefGraph<T>> {
88        let data = self.data.read();
89        Arc::new(RefGraph {
90            data: RwLock::new(data.clone()),
91            clone_cache: RwLock::new(Vec::new()),
92        })
93    }
94
95    /// Get or create a cached clone for the current generation.
96    /// Fast path is a single read-lock scan.
97    #[inline]
98    fn get_or_create_clone(self: &Arc<Self>, current_gen: u64) -> Arc<RefGraph<T>> {
99        // Fast path: read lock (concurrent, uncontended ~2ns)
100        {
101            let cache = self.clone_cache.read();
102            for (generation, weak) in cache.iter() {
103                if *generation == current_gen {
104                    if let Some(arc) = weak.upgrade() {
105                        return arc;
106                    }
107                    break;
108                }
109            }
110        }
111
112        // Slow path: clone graph, write lock, insert
113        let new_graph = self.deep_clone_graph();
114        {
115            let mut cache = self.clone_cache.write();
116            // Double-check: another thread may have inserted while we were cloning
117            for (generation, weak) in cache.iter() {
118                if *generation == current_gen {
119                    if let Some(arc) = weak.upgrade() {
120                        // Another thread beat us; drop our clone, use theirs
121                        return arc;
122                    }
123                    break;
124                }
125            }
126            // Cleanup dead entries
127            cache.retain(|(_, w)| w.strong_count() > 0);
128            cache.push((current_gen, Arc::downgrade(&new_graph)));
129        }
130        new_graph
131    }
132}
133
134/// A cloneable reference that preserves structure during deep cloning.
135///
136/// - Regular `clone()` creates a shallow copy (same underlying data)
137/// - When cloned during `deep_clone()`, creates structure-preserving deep copy
138pub struct GraphRef<T: Send + Sync> {
139    graph: Arc<RefGraph<T>>,
140    index: usize,
141}
142
143impl<T: Send + Sync> GraphRef<T> {
144    /// Get a copy of the referenced value.
145    pub fn get(&self) -> T
146    where
147        T: Clone,
148    {
149        self.graph.data.read()[self.index].clone()
150    }
151
152    /// Set the referenced value.
153    pub fn set(&self, value: T) {
154        self.graph.data.write()[self.index] = value;
155    }
156
157    /// Apply a function to the referenced value.
158    pub fn update<F>(&self, f: F)
159    where
160        F: FnOnce(&mut T),
161    {
162        f(&mut self.graph.data.write()[self.index]);
163    }
164
165    /// Get the index of this reference within its graph.
166    pub fn index(&self) -> usize {
167        self.index
168    }
169
170    /// Check if two refs point to the same data (same graph and index).
171    pub fn ptr_eq(&self, other: &GraphRef<T>) -> bool {
172        Arc::ptr_eq(&self.graph, &other.graph) && self.index == other.index
173    }
174
175    /// Check if two refs are in the same graph (may have different indices).
176    pub fn same_graph(&self, other: &GraphRef<T>) -> bool {
177        Arc::ptr_eq(&self.graph, &other.graph)
178    }
179}
180
181impl<T: Clone + Send + Sync> Clone for GraphRef<T> {
182    #[inline]
183    fn clone(&self) -> Self {
184        let current_gen = CLONE_GENERATION.with(|g| g.get());
185
186        if current_gen > 0 {
187            // We're in a deep clone operation - use generation-based lookup
188            let new_graph = self.graph.get_or_create_clone(current_gen);
189            GraphRef {
190                graph: new_graph,
191                index: self.index,
192            }
193        } else {
194            // Shallow clone - same graph, same index
195            GraphRef {
196                graph: Arc::clone(&self.graph),
197                index: self.index,
198            }
199        }
200    }
201}
202
203impl<T: std::fmt::Debug + Clone + Send + Sync> std::fmt::Debug for GraphRef<T> {
204    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
205        f.debug_struct("GraphRef")
206            .field("value", &self.get())
207            .field("index", &self.index)
208            .finish()
209    }
210}
211
212/// Perform a deep clone that preserves reference structure.
213///
214/// All `GraphRef`s pointing to the same data before cloning will point
215/// to the same (new) data after cloning. Thread-safe and panic-safe.
216///
217/// # Example
218/// ```
219/// use graph_clonable_ref::{RefGraph, deep_clone};
220///
221/// let graph = RefGraph::new();
222/// let a = graph.create(1);
223/// let b = a.clone();
224///
225/// let (a2, b2) = deep_clone(&(a.clone(), b.clone()));
226///
227/// // a2 and b2 point to same NEW data
228/// a2.set(42);
229/// assert_eq!(b2.get(), 42);
230///
231/// // Original unchanged
232/// assert_eq!(a.get(), 1);
233/// ```
234pub fn deep_clone<T: Clone>(value: &T) -> T {
235    let _guard = begin_deep_clone();
236    value.clone()
237}
238
239/// Guard that ensures clone generation and depth are reset even on panic.
240pub struct DeepCloneGuard {
241    _private: (),
242}
243
244impl Drop for DeepCloneGuard {
245    fn drop(&mut self) {
246        let depth = CLONE_DEPTH.with(|d| {
247            let new = d.get().saturating_sub(1);
248            d.set(new);
249            new
250        });
251        if depth == 0 {
252            CLONE_GENERATION.with(|g| g.set(0));
253        }
254    }
255}
256
257/// Begin a deep clone operation manually. Returns a guard that resets state on drop.
258///
259/// Supports nesting: only the outermost call sets a new generation.
260/// Prefer `deep_clone()` for simple cases.
261pub fn begin_deep_clone() -> DeepCloneGuard {
262    let depth = CLONE_DEPTH.with(|d| {
263        let c = d.get();
264        d.set(c + 1);
265        c
266    });
267    if depth == 0 {
268        let generation = NEXT_GENERATION.fetch_add(1, Ordering::Relaxed);
269        // Skip 0 (0 means shallow clone)
270        let generation = if generation == 0 {
271            NEXT_GENERATION.fetch_add(1, Ordering::Relaxed)
272        } else {
273            generation
274        };
275        CLONE_GENERATION.with(|g| g.set(generation));
276    }
277    DeepCloneGuard { _private: () }
278}
279
280// ============================================================================
281// HashMap-based approach for comparison (slower, single-threaded)
282// ============================================================================
283
284pub mod hashmap_based {
285    //! HashMap-based deep cloning for comparison.
286    //! This is the traditional approach - slower due to hash lookups.
287
288    use std::cell::RefCell;
289    use std::collections::HashMap;
290    use std::rc::Rc;
291
292    /// Internal storage using HashMap for clone tracking.
293    pub struct HashMapRefGraph<T> {
294        data: RefCell<Vec<RefCell<T>>>,
295    }
296
297    impl<T> HashMapRefGraph<T> {
298        pub fn new() -> Rc<Self> {
299            Rc::new(HashMapRefGraph {
300                data: RefCell::new(Vec::new()),
301            })
302        }
303
304        pub fn create(self: &Rc<Self>, value: T) -> HashMapGraphRef<T> {
305            let mut data = self.data.borrow_mut();
306            let index = data.len();
307            data.push(RefCell::new(value));
308            HashMapGraphRef {
309                graph: Rc::clone(self),
310                index,
311            }
312        }
313    }
314
315    impl<T: Clone> HashMapRefGraph<T> {
316        fn deep_clone_graph(self: &Rc<Self>) -> Rc<HashMapRefGraph<T>> {
317            let data = self.data.borrow();
318            let cloned_data: Vec<RefCell<T>> = data
319                .iter()
320                .map(|cell| RefCell::new(cell.borrow().clone()))
321                .collect();
322
323            Rc::new(HashMapRefGraph {
324                data: RefCell::new(cloned_data),
325            })
326        }
327    }
328
329    pub struct HashMapGraphRef<T> {
330        graph: Rc<HashMapRefGraph<T>>,
331        index: usize,
332    }
333
334    impl<T: Clone> HashMapGraphRef<T> {
335        pub fn get(&self) -> T {
336            self.graph.data.borrow()[self.index].borrow().clone()
337        }
338
339        pub fn set(&self, value: T) {
340            *self.graph.data.borrow()[self.index].borrow_mut() = value;
341        }
342    }
343
344    impl<T> HashMapGraphRef<T> {
345        pub fn ptr_eq(&self, other: &HashMapGraphRef<T>) -> bool {
346            Rc::ptr_eq(&self.graph, &other.graph) && self.index == other.index
347        }
348    }
349
350    /// Clone context using HashMap for lookups.
351    pub struct HashMapCloneContext<T> {
352        map: RefCell<HashMap<*const HashMapRefGraph<T>, Rc<HashMapRefGraph<T>>>>,
353    }
354
355    impl<T: Clone> HashMapCloneContext<T> {
356        pub fn new() -> Self {
357            HashMapCloneContext {
358                map: RefCell::new(HashMap::new()),
359            }
360        }
361
362        /// Clone a single ref using HashMap lookup.
363        pub fn clone_ref(&self, r: &HashMapGraphRef<T>) -> HashMapGraphRef<T> {
364            let ptr = Rc::as_ptr(&r.graph);
365            let mut map = self.map.borrow_mut();
366
367            let new_graph = map
368                .entry(ptr)
369                .or_insert_with(|| r.graph.deep_clone_graph())
370                .clone();
371
372            HashMapGraphRef {
373                graph: new_graph,
374                index: r.index,
375            }
376        }
377    }
378
379    impl<T: Clone> Default for HashMapCloneContext<T> {
380        fn default() -> Self {
381            Self::new()
382        }
383    }
384}
385
386// ============================================================================
387// Two-phase index-based approach (like user's implementation)
388// ============================================================================
389
390pub mod index_based {
391    //! Two-phase index-based deep cloning (matching user's approach).
392    //!
393    //! Phase 1 (Prepare): Walk through all refs, build HashMap lookup and pos_array
394    //! Phase 2 (Perform): Walk through again, use pos_array for O(1) lookups
395    //!
396    //! This avoids HashMap lookups during the actual clone, but has overhead:
397    //! - Two passes over all data
398    //! - Cloning pos_array for each clone operation
399    //! - Pre-allocating ref_array
400
401    use std::cell::RefCell;
402    use std::collections::HashMap;
403    use std::rc::Rc;
404
405    /// A reference that supports two-phase cloning.
406    pub struct IndexRef<T> {
407        inner: Rc<RefCell<T>>,
408    }
409
410    impl<T> IndexRef<T> {
411        pub fn new(value: T) -> Self {
412            IndexRef {
413                inner: Rc::new(RefCell::new(value)),
414            }
415        }
416
417        pub fn ptr(&self) -> *const RefCell<T> {
418            Rc::as_ptr(&self.inner)
419        }
420    }
421
422    impl<T: Clone> IndexRef<T> {
423        pub fn get(&self) -> T {
424            self.inner.borrow().clone()
425        }
426
427        pub fn set(&self, value: T) {
428            *self.inner.borrow_mut() = value;
429        }
430    }
431
432    impl<T> Clone for IndexRef<T> {
433        fn clone(&self) -> Self {
434            IndexRef {
435                inner: Rc::clone(&self.inner),
436            }
437        }
438    }
439
440    impl<T> IndexRef<T> {
441        pub fn ptr_eq(&self, other: &IndexRef<T>) -> bool {
442            Rc::ptr_eq(&self.inner, &other.inner)
443        }
444    }
445
446    /// Preparation node - built once, stores the reference structure.
447    /// Uses HashMap to detect shared references.
448    pub struct PrepareIndexCloningNode<T> {
449        /// (is_existing, position) for each reference in traversal order
450        pub pos_array: Vec<(bool, usize)>,
451        /// Maps raw pointer -> position in ref_array
452        lookup_map: HashMap<*const RefCell<T>, usize>,
453        /// Counter for unique references
454        ref_pos_counter: usize,
455    }
456
457    impl<T> PrepareIndexCloningNode<T> {
458        pub fn new() -> Self {
459            Self {
460                pos_array: Vec::new(),
461                lookup_map: HashMap::new(),
462                ref_pos_counter: 0,
463            }
464        }
465
466        pub fn with_capacity(capacity: usize) -> Self {
467            Self {
468                pos_array: Vec::with_capacity(capacity),
469                lookup_map: HashMap::with_capacity(capacity),
470                ref_pos_counter: 0,
471            }
472        }
473
474        /// Handle a reference during preparation phase.
475        /// Records whether it's new or existing, and its position.
476        pub fn handle_reference(&mut self, r: &IndexRef<T>) {
477            let ptr = r.ptr();
478            if let Some(&existing_pos) = self.lookup_map.get(&ptr) {
479                // Already seen this reference
480                self.pos_array.push((true, existing_pos));
481            } else {
482                // New reference
483                let pos = self.ref_pos_counter;
484                self.ref_pos_counter += 1;
485                self.lookup_map.insert(ptr, pos);
486                self.pos_array.push((false, pos));
487            }
488        }
489
490        /// Get the number of unique references.
491        pub fn unique_count(&self) -> usize {
492            self.ref_pos_counter
493        }
494    }
495
496    impl<T> Default for PrepareIndexCloningNode<T> {
497        fn default() -> Self {
498            Self::new()
499        }
500    }
501
502    /// Perform node - created from PrepareNode for each clone operation.
503    pub struct PerformIndexCloningNode<T> {
504        /// Current position in pos_array
505        counter: usize,
506        /// Pre-allocated array of new references
507        ref_array: Vec<Rc<RefCell<T>>>,
508        /// Cloned from PrepareNode
509        pos_array: Vec<(bool, usize)>,
510    }
511
512    impl<T: Clone> PerformIndexCloningNode<T> {
513        /// Create from a prepare node. This clones the pos_array.
514        pub fn from_prepare_node(node: &PrepareIndexCloningNode<T>) -> Self {
515            // Pre-allocate with dummy values (will be replaced)
516            let mut ref_array = Vec::with_capacity(node.ref_pos_counter);
517            ref_array.resize_with(node.ref_pos_counter, || {
518                Rc::new(RefCell::new(unsafe {
519                    // This is safe because we'll always set before reading
520                    std::mem::zeroed()
521                }))
522            });
523
524            Self {
525                counter: 0,
526                ref_array,
527                pos_array: node.pos_array.clone(), // This clone is expensive!
528            }
529        }
530
531        /// Handle a reference during clone phase.
532        /// Must be called in the same order as during preparation.
533        pub fn handle_reference(&mut self, orig: &IndexRef<T>) -> IndexRef<T> {
534            let pos = self.counter;
535            self.counter += 1;
536
537            let (exists, array_pos) = self.pos_array[pos];
538
539            if !exists {
540                // First time seeing this reference in this clone
541                let new_ref = Rc::new(RefCell::new(orig.inner.borrow().clone()));
542                self.ref_array[array_pos] = Rc::clone(&new_ref);
543                IndexRef { inner: new_ref }
544            } else {
545                // Already cloned, return the cached one
546                IndexRef {
547                    inner: Rc::clone(&self.ref_array[array_pos]),
548                }
549            }
550        }
551    }
552
553    /// Convenience function to prepare a slice of refs.
554    pub fn prepare_refs<T>(refs: &[IndexRef<T>]) -> PrepareIndexCloningNode<T> {
555        let mut node = PrepareIndexCloningNode::with_capacity(refs.len());
556        for r in refs {
557            node.handle_reference(r);
558        }
559        node
560    }
561
562    /// Convenience function to clone refs using a prepare node.
563    pub fn clone_refs<T: Clone>(
564        refs: &[IndexRef<T>],
565        prepare_node: &PrepareIndexCloningNode<T>,
566    ) -> Vec<IndexRef<T>> {
567        let mut perform_node = PerformIndexCloningNode::from_prepare_node(prepare_node);
568        refs.iter()
569            .map(|r| perform_node.handle_reference(r))
570            .collect()
571    }
572}
573
574#[cfg(test)]
575mod tests {
576    use super::*;
577
578    #[test]
579    fn test_shallow_clone_same_data() {
580        let graph = RefGraph::new();
581        let a = graph.create(42);
582        let b = a.clone();
583
584        assert!(a.ptr_eq(&b));
585        assert_eq!(a.get(), 42);
586        assert_eq!(b.get(), 42);
587
588        a.set(100);
589        assert_eq!(b.get(), 100);
590    }
591
592    #[test]
593    fn test_deep_clone_preserves_structure() {
594        let graph = RefGraph::new();
595        let a = graph.create(42);
596        let b = a.clone();
597
598        let (a2, b2) = deep_clone(&(a.clone(), b.clone()));
599
600        // a2 and b2 should point to same new data
601        assert!(a2.ptr_eq(&b2));
602
603        // But different from original
604        assert!(!a.ptr_eq(&a2));
605        assert!(!b.ptr_eq(&b2));
606
607        // Verify independence
608        a2.set(999);
609        assert_eq!(b2.get(), 999); // Same data
610        assert_eq!(a.get(), 42); // Original unchanged
611    }
612
613    #[test]
614    fn test_deep_clone_multiple_graphs() {
615        let graph1 = RefGraph::new();
616        let graph2 = RefGraph::new();
617
618        let a1 = graph1.create(1);
619        let b1 = a1.clone();
620        let a2 = graph2.create(2);
621        let b2 = a2.clone();
622
623        let cloned = deep_clone(&(a1.clone(), b1.clone(), a2.clone(), b2.clone()));
624
625        // Same structure preserved within each graph
626        assert!(cloned.0.ptr_eq(&cloned.1));
627        assert!(cloned.2.ptr_eq(&cloned.3));
628
629        // Different graphs remain different
630        assert!(!cloned.0.ptr_eq(&cloned.2));
631    }
632
633    #[test]
634    fn test_deep_clone_different_indices() {
635        let graph = RefGraph::new();
636        let a = graph.create(1);
637        let b = graph.create(2);
638        let c = a.clone(); // Same index as a
639
640        let (a2, b2, c2) = deep_clone(&(a.clone(), b.clone(), c.clone()));
641
642        // a2 and c2 same, b2 different index
643        assert!(a2.ptr_eq(&c2));
644        assert!(!a2.ptr_eq(&b2));
645        assert!(a2.same_graph(&b2));
646
647        // Values preserved
648        assert_eq!(a2.get(), 1);
649        assert_eq!(b2.get(), 2);
650    }
651
652    #[test]
653    fn test_nested_struct() {
654        #[derive(Clone)]
655        struct Inner {
656            x: GraphRef<i32>,
657            y: GraphRef<i32>,
658        }
659
660        #[derive(Clone)]
661        struct Outer {
662            inner: Inner,
663            z: GraphRef<i32>,
664        }
665
666        let graph = RefGraph::new();
667        let r = graph.create(42);
668
669        let original = Outer {
670            inner: Inner {
671                x: r.clone(),
672                y: r.clone(),
673            },
674            z: r.clone(),
675        };
676
677        let cloned = deep_clone(&original);
678
679        // All three refs in cloned should point to same new data
680        assert!(cloned.inner.x.ptr_eq(&cloned.inner.y));
681        assert!(cloned.inner.x.ptr_eq(&cloned.z));
682
683        // Independent from original
684        cloned.inner.x.set(999);
685        assert_eq!(cloned.z.get(), 999);
686        assert_eq!(original.z.get(), 42);
687    }
688
689    // ========================================================================
690    // Thread safety tests
691    // ========================================================================
692
693    #[test]
694    fn test_send_sync() {
695        fn assert_send_sync<T: Send + Sync>() {}
696        assert_send_sync::<RefGraph<i32>>();
697        assert_send_sync::<GraphRef<i32>>();
698        assert_send_sync::<RefGraph<String>>();
699        assert_send_sync::<GraphRef<String>>();
700    }
701
702    #[test]
703    fn test_concurrent_deep_clone() {
704        use std::thread;
705
706        let graph = RefGraph::new();
707        let a = graph.create(42);
708        let b = a.clone();
709        let c = graph.create(100);
710
711        let data = Arc::new((a.clone(), b.clone(), c.clone()));
712
713        let handles: Vec<_> = (0..8)
714            .map(|i| {
715                let data = Arc::clone(&data);
716                thread::spawn(move || {
717                    let cloned = deep_clone(data.as_ref());
718
719                    // Structure preserved: a' and b' point to same new data
720                    assert!(cloned.0.ptr_eq(&cloned.1));
721                    // c' is in same graph but different index
722                    assert!(cloned.0.same_graph(&cloned.2));
723                    assert!(!cloned.0.ptr_eq(&cloned.2));
724
725                    // Independence: mutation doesn't affect original
726                    cloned.0.set(i * 1000);
727                    assert_eq!(cloned.1.get(), i * 1000);
728                    assert_eq!(cloned.2.get(), 100);
729
730                    // Original unchanged
731                    assert_eq!(data.0.get(), 42);
732                })
733            })
734            .collect();
735
736        for h in handles {
737            h.join().unwrap();
738        }
739    }
740
741    #[test]
742    fn test_concurrent_read_write() {
743        use std::sync::Barrier;
744        use std::thread;
745
746        let graph = RefGraph::new();
747        let r = graph.create(0i64);
748        let barrier = Arc::new(Barrier::new(5));
749
750        // 1 writer thread
751        let writer_ref = r.clone();
752        let writer_barrier = Arc::clone(&barrier);
753        let writer = thread::spawn(move || {
754            writer_barrier.wait();
755            for i in 0..1000 {
756                writer_ref.set(i);
757            }
758        });
759
760        // 4 reader threads
761        let readers: Vec<_> = (0..4)
762            .map(|_| {
763                let reader_ref = r.clone();
764                let reader_barrier = Arc::clone(&barrier);
765                thread::spawn(move || {
766                    reader_barrier.wait();
767                    let mut last = -1i64;
768                    for _ in 0..1000 {
769                        let val = reader_ref.get();
770                        // Values should be valid i64 (no torn reads)
771                        assert!(val >= 0 && val < 1000);
772                        let _ = last;
773                        last = val;
774                    }
775                })
776            })
777            .collect();
778
779        writer.join().unwrap();
780        for r in readers {
781            r.join().unwrap();
782        }
783    }
784
785    #[test]
786    fn test_deep_clone_stress() {
787        use std::sync::Barrier;
788        use std::thread;
789
790        let graph = RefGraph::new();
791        let refs: Vec<_> = (0..10).map(|i| graph.create(i)).collect();
792        let data = Arc::new(refs);
793        let barrier = Arc::new(Barrier::new(50));
794
795        let handles: Vec<_> = (0..50)
796            .map(|_| {
797                let data = Arc::clone(&data);
798                let barrier = Arc::clone(&barrier);
799                thread::spawn(move || {
800                    barrier.wait();
801                    for _ in 0..100 {
802                        let cloned = deep_clone(data.as_ref());
803                        // All cloned refs should be in the same graph
804                        for j in 1..cloned.len() {
805                            assert!(cloned[0].same_graph(&cloned[j]));
806                        }
807                        // Values preserved
808                        for (j, r) in cloned.iter().enumerate() {
809                            assert_eq!(r.get(), j as i32);
810                        }
811                        // Independence
812                        cloned[0].set(999);
813                        assert_eq!(data[0].get(), 0);
814                    }
815                })
816            })
817            .collect();
818
819        for h in handles {
820            h.join().unwrap();
821        }
822    }
823
824    // ========================================================================
825    // Panic safety tests
826    // ========================================================================
827
828    #[test]
829    fn test_panic_resets_generation() {
830        use std::panic;
831
832        struct PanicOnClone;
833
834        impl Clone for PanicOnClone {
835            fn clone(&self) -> Self {
836                panic!("clone panic!");
837            }
838        }
839
840        // This will panic during deep_clone
841        let result = panic::catch_unwind(panic::AssertUnwindSafe(|| {
842            deep_clone(&PanicOnClone);
843        }));
844        assert!(result.is_err());
845
846        // Generation should be reset — subsequent deep_clone should work
847        let graph = RefGraph::new();
848        let a = graph.create(42);
849        let b = a.clone();
850        let (a2, b2) = deep_clone(&(a.clone(), b.clone()));
851        assert!(a2.ptr_eq(&b2));
852        assert_eq!(a2.get(), 42);
853    }
854
855    #[test]
856    fn test_panic_nested_recovery() {
857        use std::panic;
858
859        // Outer deep_clone that will survive; inner one panics
860        let graph = RefGraph::new();
861        let a = graph.create(42);
862
863        // Simulate: begin outer deep clone, then panic in inner
864        let _result = panic::catch_unwind(panic::AssertUnwindSafe(|| {
865            let _outer_guard = begin_deep_clone();
866            // Inner deep_clone panics
867            let _inner_result = panic::catch_unwind(panic::AssertUnwindSafe(|| {
868                let _inner_guard = begin_deep_clone();
869                panic!("inner panic");
870            }));
871            // After inner panic, outer should still work at depth 0
872            // (inner guard was dropped by unwinding, decrementing depth)
873        }));
874        // outer_guard drops here, resetting generation
875
876        // Should be back to clean state
877        let b = a.clone();
878        let (a2, b2) = deep_clone(&(a.clone(), b.clone()));
879        assert!(a2.ptr_eq(&b2));
880        assert_eq!(a2.get(), 42);
881    }
882
883    // ========================================================================
884    // Nesting tests
885    // ========================================================================
886
887    #[test]
888    fn test_nested_deep_clone() {
889        // A type whose Clone impl calls deep_clone internally
890        #[derive(Debug)]
891        struct Container {
892            inner: Vec<GraphRef<i32>>,
893        }
894
895        impl Clone for Container {
896            fn clone(&self) -> Self {
897                // This should work with nesting — inner deep_clone
898                // reuses the outer generation
899                Container {
900                    inner: self.inner.clone(),
901                }
902            }
903        }
904
905        let graph = RefGraph::new();
906        let a = graph.create(10);
907        let b = a.clone();
908
909        let container = Container {
910            inner: vec![a.clone(), b.clone()],
911        };
912
913        let cloned = deep_clone(&container);
914        assert!(cloned.inner[0].ptr_eq(&cloned.inner[1]));
915        cloned.inner[0].set(999);
916        assert_eq!(cloned.inner[1].get(), 999);
917        assert_eq!(container.inner[0].get(), 10);
918    }
919
920    #[test]
921    fn test_begin_deep_clone_nested_guards() {
922        let graph = RefGraph::new();
923        let a = graph.create(1);
924        let b = a.clone();
925
926        {
927            let _outer = begin_deep_clone();
928            let cloned_a = a.clone(); // deep clone
929            {
930                let _inner = begin_deep_clone();
931                let cloned_b = b.clone(); // same generation as outer
932                // Both should point to same new graph
933                assert!(cloned_a.same_graph(&cloned_b));
934            }
935            // Inner guard dropped, but depth > 0, so generation stays
936            let cloned_b2 = b.clone();
937            assert!(cloned_a.ptr_eq(&cloned_b2));
938        }
939        // Outer guard dropped, depth == 0, generation reset
940
941        // Now a regular clone should be shallow
942        let shallow = a.clone();
943        assert!(a.ptr_eq(&shallow));
944    }
945
946    // ========================================================================
947    // Edge case tests
948    // ========================================================================
949
950    #[test]
951    fn test_multiple_graph_types() {
952        #[derive(Clone)]
953        struct Mixed {
954            nums: Vec<GraphRef<i32>>,
955            strs: Vec<GraphRef<String>>,
956        }
957
958        let g1 = RefGraph::new();
959        let g2 = RefGraph::new();
960
961        let n = g1.create(42);
962        let s = g2.create("hello".to_string());
963
964        let data = Mixed {
965            nums: vec![n.clone(), n.clone()],
966            strs: vec![s.clone(), s.clone()],
967        };
968
969        let cloned = deep_clone(&data);
970        assert!(cloned.nums[0].ptr_eq(&cloned.nums[1]));
971        assert!(cloned.strs[0].ptr_eq(&cloned.strs[1]));
972
973        cloned.nums[0].set(999);
974        assert_eq!(cloned.nums[1].get(), 999);
975        assert_eq!(data.nums[0].get(), 42);
976
977        cloned.strs[0].set("world".to_string());
978        assert_eq!(cloned.strs[1].get(), "world");
979        assert_eq!(data.strs[0].get(), "hello");
980    }
981
982    #[test]
983    fn test_large_graph() {
984        let graph = RefGraph::new();
985        let refs: Vec<_> = (0..10_000).map(|i| graph.create(i)).collect();
986
987        // Add some shared refs
988        let mut all_refs = refs.clone();
989        for i in 0..5_000 {
990            all_refs.push(refs[i].clone());
991        }
992
993        let cloned = deep_clone(&all_refs);
994
995        assert_eq!(cloned.len(), 15_000);
996
997        // Check sharing is preserved
998        for i in 0..5_000 {
999            assert!(cloned[i].ptr_eq(&cloned[10_000 + i]));
1000        }
1001
1002        // Check values
1003        for i in 0..10_000 {
1004            assert_eq!(cloned[i].get(), i as i32);
1005        }
1006
1007        // Independence
1008        cloned[0].set(-1);
1009        assert_eq!(all_refs[0].get(), 0);
1010    }
1011}