use std::cell::Cell;
use std::sync::atomic::{AtomicU64, Ordering};
use std::sync::{Arc, Weak};
use parking_lot::RwLock;
static NEXT_GENERATION: AtomicU64 = AtomicU64::new(1);
thread_local! {
static CLONE_GENERATION: Cell<u64> = const { Cell::new(0) };
static CLONE_DEPTH: Cell<u32> = const { Cell::new(0) };
}
pub struct RefGraph<T: Send + Sync> {
data: RwLock<Vec<T>>,
clone_cache: RwLock<Vec<(u64, Weak<RefGraph<T>>)>>,
}
impl<T: Send + Sync> RefGraph<T> {
pub fn new() -> Arc<Self> {
Arc::new(RefGraph {
data: RwLock::new(Vec::new()),
clone_cache: RwLock::new(Vec::new()),
})
}
pub fn create(self: &Arc<Self>, value: T) -> GraphRef<T> {
let mut data = self.data.write();
let index = data.len();
data.push(value);
GraphRef {
graph: Arc::clone(self),
index,
}
}
pub fn len(&self) -> usize {
self.data.read().len()
}
pub fn is_empty(&self) -> bool {
self.data.read().is_empty()
}
pub fn clear_cache(&self) {
self.clone_cache.write().clear();
}
}
impl<T: Clone + Send + Sync> RefGraph<T> {
fn deep_clone_graph(&self) -> Arc<RefGraph<T>> {
let data = self.data.read();
Arc::new(RefGraph {
data: RwLock::new(data.clone()),
clone_cache: RwLock::new(Vec::new()),
})
}
#[inline]
fn get_or_create_clone(self: &Arc<Self>, current_gen: u64) -> Arc<RefGraph<T>> {
{
let cache = self.clone_cache.read();
for (generation, weak) in cache.iter() {
if *generation == current_gen {
if let Some(arc) = weak.upgrade() {
return arc;
}
break;
}
}
}
let new_graph = self.deep_clone_graph();
{
let mut cache = self.clone_cache.write();
for (generation, weak) in cache.iter() {
if *generation == current_gen {
if let Some(arc) = weak.upgrade() {
return arc;
}
break;
}
}
cache.retain(|(_, w)| w.strong_count() > 0);
cache.push((current_gen, Arc::downgrade(&new_graph)));
}
new_graph
}
}
pub struct GraphRef<T: Send + Sync> {
graph: Arc<RefGraph<T>>,
index: usize,
}
impl<T: Send + Sync> GraphRef<T> {
pub fn get(&self) -> T
where
T: Clone,
{
self.graph.data.read()[self.index].clone()
}
pub fn set(&self, value: T) {
self.graph.data.write()[self.index] = value;
}
pub fn update<F>(&self, f: F)
where
F: FnOnce(&mut T),
{
f(&mut self.graph.data.write()[self.index]);
}
pub fn index(&self) -> usize {
self.index
}
pub fn ptr_eq(&self, other: &GraphRef<T>) -> bool {
Arc::ptr_eq(&self.graph, &other.graph) && self.index == other.index
}
pub fn same_graph(&self, other: &GraphRef<T>) -> bool {
Arc::ptr_eq(&self.graph, &other.graph)
}
}
impl<T: Clone + Send + Sync> Clone for GraphRef<T> {
#[inline]
fn clone(&self) -> Self {
let current_gen = CLONE_GENERATION.with(|g| g.get());
if current_gen > 0 {
let new_graph = self.graph.get_or_create_clone(current_gen);
GraphRef {
graph: new_graph,
index: self.index,
}
} else {
GraphRef {
graph: Arc::clone(&self.graph),
index: self.index,
}
}
}
}
impl<T: std::fmt::Debug + Clone + Send + Sync> std::fmt::Debug for GraphRef<T> {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
f.debug_struct("GraphRef")
.field("value", &self.get())
.field("index", &self.index)
.finish()
}
}
pub fn deep_clone<T: Clone>(value: &T) -> T {
let _guard = begin_deep_clone();
value.clone()
}
pub struct DeepCloneGuard {
_private: (),
}
impl Drop for DeepCloneGuard {
fn drop(&mut self) {
let depth = CLONE_DEPTH.with(|d| {
let new = d.get().saturating_sub(1);
d.set(new);
new
});
if depth == 0 {
CLONE_GENERATION.with(|g| g.set(0));
}
}
}
pub fn begin_deep_clone() -> DeepCloneGuard {
let depth = CLONE_DEPTH.with(|d| {
let c = d.get();
d.set(c + 1);
c
});
if depth == 0 {
let generation = NEXT_GENERATION.fetch_add(1, Ordering::Relaxed);
let generation = if generation == 0 {
NEXT_GENERATION.fetch_add(1, Ordering::Relaxed)
} else {
generation
};
CLONE_GENERATION.with(|g| g.set(generation));
}
DeepCloneGuard { _private: () }
}
pub mod hashmap_based {
use std::cell::RefCell;
use std::collections::HashMap;
use std::rc::Rc;
pub struct HashMapRefGraph<T> {
data: RefCell<Vec<RefCell<T>>>,
}
impl<T> HashMapRefGraph<T> {
pub fn new() -> Rc<Self> {
Rc::new(HashMapRefGraph {
data: RefCell::new(Vec::new()),
})
}
pub fn create(self: &Rc<Self>, value: T) -> HashMapGraphRef<T> {
let mut data = self.data.borrow_mut();
let index = data.len();
data.push(RefCell::new(value));
HashMapGraphRef {
graph: Rc::clone(self),
index,
}
}
}
impl<T: Clone> HashMapRefGraph<T> {
fn deep_clone_graph(self: &Rc<Self>) -> Rc<HashMapRefGraph<T>> {
let data = self.data.borrow();
let cloned_data: Vec<RefCell<T>> = data
.iter()
.map(|cell| RefCell::new(cell.borrow().clone()))
.collect();
Rc::new(HashMapRefGraph {
data: RefCell::new(cloned_data),
})
}
}
pub struct HashMapGraphRef<T> {
graph: Rc<HashMapRefGraph<T>>,
index: usize,
}
impl<T: Clone> HashMapGraphRef<T> {
pub fn get(&self) -> T {
self.graph.data.borrow()[self.index].borrow().clone()
}
pub fn set(&self, value: T) {
*self.graph.data.borrow()[self.index].borrow_mut() = value;
}
}
impl<T> HashMapGraphRef<T> {
pub fn ptr_eq(&self, other: &HashMapGraphRef<T>) -> bool {
Rc::ptr_eq(&self.graph, &other.graph) && self.index == other.index
}
}
pub struct HashMapCloneContext<T> {
map: RefCell<HashMap<*const HashMapRefGraph<T>, Rc<HashMapRefGraph<T>>>>,
}
impl<T: Clone> HashMapCloneContext<T> {
pub fn new() -> Self {
HashMapCloneContext {
map: RefCell::new(HashMap::new()),
}
}
pub fn clone_ref(&self, r: &HashMapGraphRef<T>) -> HashMapGraphRef<T> {
let ptr = Rc::as_ptr(&r.graph);
let mut map = self.map.borrow_mut();
let new_graph = map
.entry(ptr)
.or_insert_with(|| r.graph.deep_clone_graph())
.clone();
HashMapGraphRef {
graph: new_graph,
index: r.index,
}
}
}
impl<T: Clone> Default for HashMapCloneContext<T> {
fn default() -> Self {
Self::new()
}
}
}
pub mod index_based {
use std::cell::RefCell;
use std::collections::HashMap;
use std::rc::Rc;
pub struct IndexRef<T> {
inner: Rc<RefCell<T>>,
}
impl<T> IndexRef<T> {
pub fn new(value: T) -> Self {
IndexRef {
inner: Rc::new(RefCell::new(value)),
}
}
pub fn ptr(&self) -> *const RefCell<T> {
Rc::as_ptr(&self.inner)
}
}
impl<T: Clone> IndexRef<T> {
pub fn get(&self) -> T {
self.inner.borrow().clone()
}
pub fn set(&self, value: T) {
*self.inner.borrow_mut() = value;
}
}
impl<T> Clone for IndexRef<T> {
fn clone(&self) -> Self {
IndexRef {
inner: Rc::clone(&self.inner),
}
}
}
impl<T> IndexRef<T> {
pub fn ptr_eq(&self, other: &IndexRef<T>) -> bool {
Rc::ptr_eq(&self.inner, &other.inner)
}
}
pub struct PrepareIndexCloningNode<T> {
pub pos_array: Vec<(bool, usize)>,
lookup_map: HashMap<*const RefCell<T>, usize>,
ref_pos_counter: usize,
}
impl<T> PrepareIndexCloningNode<T> {
pub fn new() -> Self {
Self {
pos_array: Vec::new(),
lookup_map: HashMap::new(),
ref_pos_counter: 0,
}
}
pub fn with_capacity(capacity: usize) -> Self {
Self {
pos_array: Vec::with_capacity(capacity),
lookup_map: HashMap::with_capacity(capacity),
ref_pos_counter: 0,
}
}
pub fn handle_reference(&mut self, r: &IndexRef<T>) {
let ptr = r.ptr();
if let Some(&existing_pos) = self.lookup_map.get(&ptr) {
self.pos_array.push((true, existing_pos));
} else {
let pos = self.ref_pos_counter;
self.ref_pos_counter += 1;
self.lookup_map.insert(ptr, pos);
self.pos_array.push((false, pos));
}
}
pub fn unique_count(&self) -> usize {
self.ref_pos_counter
}
}
impl<T> Default for PrepareIndexCloningNode<T> {
fn default() -> Self {
Self::new()
}
}
pub struct PerformIndexCloningNode<T> {
counter: usize,
ref_array: Vec<Rc<RefCell<T>>>,
pos_array: Vec<(bool, usize)>,
}
impl<T: Clone> PerformIndexCloningNode<T> {
pub fn from_prepare_node(node: &PrepareIndexCloningNode<T>) -> Self {
let mut ref_array = Vec::with_capacity(node.ref_pos_counter);
ref_array.resize_with(node.ref_pos_counter, || {
Rc::new(RefCell::new(unsafe {
std::mem::zeroed()
}))
});
Self {
counter: 0,
ref_array,
pos_array: node.pos_array.clone(), }
}
pub fn handle_reference(&mut self, orig: &IndexRef<T>) -> IndexRef<T> {
let pos = self.counter;
self.counter += 1;
let (exists, array_pos) = self.pos_array[pos];
if !exists {
let new_ref = Rc::new(RefCell::new(orig.inner.borrow().clone()));
self.ref_array[array_pos] = Rc::clone(&new_ref);
IndexRef { inner: new_ref }
} else {
IndexRef {
inner: Rc::clone(&self.ref_array[array_pos]),
}
}
}
}
pub fn prepare_refs<T>(refs: &[IndexRef<T>]) -> PrepareIndexCloningNode<T> {
let mut node = PrepareIndexCloningNode::with_capacity(refs.len());
for r in refs {
node.handle_reference(r);
}
node
}
pub fn clone_refs<T: Clone>(
refs: &[IndexRef<T>],
prepare_node: &PrepareIndexCloningNode<T>,
) -> Vec<IndexRef<T>> {
let mut perform_node = PerformIndexCloningNode::from_prepare_node(prepare_node);
refs.iter()
.map(|r| perform_node.handle_reference(r))
.collect()
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_shallow_clone_same_data() {
let graph = RefGraph::new();
let a = graph.create(42);
let b = a.clone();
assert!(a.ptr_eq(&b));
assert_eq!(a.get(), 42);
assert_eq!(b.get(), 42);
a.set(100);
assert_eq!(b.get(), 100);
}
#[test]
fn test_deep_clone_preserves_structure() {
let graph = RefGraph::new();
let a = graph.create(42);
let b = a.clone();
let (a2, b2) = deep_clone(&(a.clone(), b.clone()));
assert!(a2.ptr_eq(&b2));
assert!(!a.ptr_eq(&a2));
assert!(!b.ptr_eq(&b2));
a2.set(999);
assert_eq!(b2.get(), 999); assert_eq!(a.get(), 42); }
#[test]
fn test_deep_clone_multiple_graphs() {
let graph1 = RefGraph::new();
let graph2 = RefGraph::new();
let a1 = graph1.create(1);
let b1 = a1.clone();
let a2 = graph2.create(2);
let b2 = a2.clone();
let cloned = deep_clone(&(a1.clone(), b1.clone(), a2.clone(), b2.clone()));
assert!(cloned.0.ptr_eq(&cloned.1));
assert!(cloned.2.ptr_eq(&cloned.3));
assert!(!cloned.0.ptr_eq(&cloned.2));
}
#[test]
fn test_deep_clone_different_indices() {
let graph = RefGraph::new();
let a = graph.create(1);
let b = graph.create(2);
let c = a.clone();
let (a2, b2, c2) = deep_clone(&(a.clone(), b.clone(), c.clone()));
assert!(a2.ptr_eq(&c2));
assert!(!a2.ptr_eq(&b2));
assert!(a2.same_graph(&b2));
assert_eq!(a2.get(), 1);
assert_eq!(b2.get(), 2);
}
#[test]
fn test_nested_struct() {
#[derive(Clone)]
struct Inner {
x: GraphRef<i32>,
y: GraphRef<i32>,
}
#[derive(Clone)]
struct Outer {
inner: Inner,
z: GraphRef<i32>,
}
let graph = RefGraph::new();
let r = graph.create(42);
let original = Outer {
inner: Inner {
x: r.clone(),
y: r.clone(),
},
z: r.clone(),
};
let cloned = deep_clone(&original);
assert!(cloned.inner.x.ptr_eq(&cloned.inner.y));
assert!(cloned.inner.x.ptr_eq(&cloned.z));
cloned.inner.x.set(999);
assert_eq!(cloned.z.get(), 999);
assert_eq!(original.z.get(), 42);
}
#[test]
fn test_send_sync() {
fn assert_send_sync<T: Send + Sync>() {}
assert_send_sync::<RefGraph<i32>>();
assert_send_sync::<GraphRef<i32>>();
assert_send_sync::<RefGraph<String>>();
assert_send_sync::<GraphRef<String>>();
}
#[test]
fn test_concurrent_deep_clone() {
use std::thread;
let graph = RefGraph::new();
let a = graph.create(42);
let b = a.clone();
let c = graph.create(100);
let data = Arc::new((a.clone(), b.clone(), c.clone()));
let handles: Vec<_> = (0..8)
.map(|i| {
let data = Arc::clone(&data);
thread::spawn(move || {
let cloned = deep_clone(data.as_ref());
assert!(cloned.0.ptr_eq(&cloned.1));
assert!(cloned.0.same_graph(&cloned.2));
assert!(!cloned.0.ptr_eq(&cloned.2));
cloned.0.set(i * 1000);
assert_eq!(cloned.1.get(), i * 1000);
assert_eq!(cloned.2.get(), 100);
assert_eq!(data.0.get(), 42);
})
})
.collect();
for h in handles {
h.join().unwrap();
}
}
#[test]
fn test_concurrent_read_write() {
use std::sync::Barrier;
use std::thread;
let graph = RefGraph::new();
let r = graph.create(0i64);
let barrier = Arc::new(Barrier::new(5));
let writer_ref = r.clone();
let writer_barrier = Arc::clone(&barrier);
let writer = thread::spawn(move || {
writer_barrier.wait();
for i in 0..1000 {
writer_ref.set(i);
}
});
let readers: Vec<_> = (0..4)
.map(|_| {
let reader_ref = r.clone();
let reader_barrier = Arc::clone(&barrier);
thread::spawn(move || {
reader_barrier.wait();
let mut last = -1i64;
for _ in 0..1000 {
let val = reader_ref.get();
assert!(val >= 0 && val < 1000);
let _ = last;
last = val;
}
})
})
.collect();
writer.join().unwrap();
for r in readers {
r.join().unwrap();
}
}
#[test]
fn test_deep_clone_stress() {
use std::sync::Barrier;
use std::thread;
let graph = RefGraph::new();
let refs: Vec<_> = (0..10).map(|i| graph.create(i)).collect();
let data = Arc::new(refs);
let barrier = Arc::new(Barrier::new(50));
let handles: Vec<_> = (0..50)
.map(|_| {
let data = Arc::clone(&data);
let barrier = Arc::clone(&barrier);
thread::spawn(move || {
barrier.wait();
for _ in 0..100 {
let cloned = deep_clone(data.as_ref());
for j in 1..cloned.len() {
assert!(cloned[0].same_graph(&cloned[j]));
}
for (j, r) in cloned.iter().enumerate() {
assert_eq!(r.get(), j as i32);
}
cloned[0].set(999);
assert_eq!(data[0].get(), 0);
}
})
})
.collect();
for h in handles {
h.join().unwrap();
}
}
#[test]
fn test_panic_resets_generation() {
use std::panic;
struct PanicOnClone;
impl Clone for PanicOnClone {
fn clone(&self) -> Self {
panic!("clone panic!");
}
}
let result = panic::catch_unwind(panic::AssertUnwindSafe(|| {
deep_clone(&PanicOnClone);
}));
assert!(result.is_err());
let graph = RefGraph::new();
let a = graph.create(42);
let b = a.clone();
let (a2, b2) = deep_clone(&(a.clone(), b.clone()));
assert!(a2.ptr_eq(&b2));
assert_eq!(a2.get(), 42);
}
#[test]
fn test_panic_nested_recovery() {
use std::panic;
let graph = RefGraph::new();
let a = graph.create(42);
let _result = panic::catch_unwind(panic::AssertUnwindSafe(|| {
let _outer_guard = begin_deep_clone();
let _inner_result = panic::catch_unwind(panic::AssertUnwindSafe(|| {
let _inner_guard = begin_deep_clone();
panic!("inner panic");
}));
}));
let b = a.clone();
let (a2, b2) = deep_clone(&(a.clone(), b.clone()));
assert!(a2.ptr_eq(&b2));
assert_eq!(a2.get(), 42);
}
#[test]
fn test_nested_deep_clone() {
#[derive(Debug)]
struct Container {
inner: Vec<GraphRef<i32>>,
}
impl Clone for Container {
fn clone(&self) -> Self {
Container {
inner: self.inner.clone(),
}
}
}
let graph = RefGraph::new();
let a = graph.create(10);
let b = a.clone();
let container = Container {
inner: vec![a.clone(), b.clone()],
};
let cloned = deep_clone(&container);
assert!(cloned.inner[0].ptr_eq(&cloned.inner[1]));
cloned.inner[0].set(999);
assert_eq!(cloned.inner[1].get(), 999);
assert_eq!(container.inner[0].get(), 10);
}
#[test]
fn test_begin_deep_clone_nested_guards() {
let graph = RefGraph::new();
let a = graph.create(1);
let b = a.clone();
{
let _outer = begin_deep_clone();
let cloned_a = a.clone(); {
let _inner = begin_deep_clone();
let cloned_b = b.clone(); assert!(cloned_a.same_graph(&cloned_b));
}
let cloned_b2 = b.clone();
assert!(cloned_a.ptr_eq(&cloned_b2));
}
let shallow = a.clone();
assert!(a.ptr_eq(&shallow));
}
#[test]
fn test_multiple_graph_types() {
#[derive(Clone)]
struct Mixed {
nums: Vec<GraphRef<i32>>,
strs: Vec<GraphRef<String>>,
}
let g1 = RefGraph::new();
let g2 = RefGraph::new();
let n = g1.create(42);
let s = g2.create("hello".to_string());
let data = Mixed {
nums: vec![n.clone(), n.clone()],
strs: vec![s.clone(), s.clone()],
};
let cloned = deep_clone(&data);
assert!(cloned.nums[0].ptr_eq(&cloned.nums[1]));
assert!(cloned.strs[0].ptr_eq(&cloned.strs[1]));
cloned.nums[0].set(999);
assert_eq!(cloned.nums[1].get(), 999);
assert_eq!(data.nums[0].get(), 42);
cloned.strs[0].set("world".to_string());
assert_eq!(cloned.strs[1].get(), "world");
assert_eq!(data.strs[0].get(), "hello");
}
#[test]
fn test_large_graph() {
let graph = RefGraph::new();
let refs: Vec<_> = (0..10_000).map(|i| graph.create(i)).collect();
let mut all_refs = refs.clone();
for i in 0..5_000 {
all_refs.push(refs[i].clone());
}
let cloned = deep_clone(&all_refs);
assert_eq!(cloned.len(), 15_000);
for i in 0..5_000 {
assert!(cloned[i].ptr_eq(&cloned[10_000 + i]));
}
for i in 0..10_000 {
assert_eq!(cloned[i].get(), i as i32);
}
cloned[0].set(-1);
assert_eq!(all_refs[0].get(), 0);
}
}