Skip to main content

ferris_gc/gc/
sync.rs

1use std::alloc::{alloc, dealloc, Layout};
2use std::mem::transmute;
3use std::cell::{Cell, RefCell};
4use std::collections::HashMap;
5use std::ops::{Deref, DerefMut};
6use std::sync::{Mutex, RwLock};
7use std::sync::atomic::{AtomicBool, AtomicUsize, Ordering};
8use std::thread::JoinHandle;
9
10use crate::gc::{Finalize, Trace};
11use crate::basic_gc_strategy::{basic_gc_strategy_start, BASIC_STRATEGY_GLOBAL_GC};
12use std::hash::Hasher;
13
14pub type GcOpt<T> = Option<Gc<T>>;
15pub type GcCellOpt<T> = Option<Gc<T>>;
16
17struct GcInfo {
18    root_ref_count: AtomicUsize,
19}
20
21impl GcInfo {
22    fn new() -> GcInfo {
23        GcInfo {
24            root_ref_count: AtomicUsize::new(0),
25        }
26    }
27}
28
29pub struct GcPtr<T> where T: 'static + Sized + Trace {
30    info: GcInfo,
31    t: T,
32}
33
34impl<T> GcPtr<T> where T: 'static + Sized + Trace {
35    fn new(t: T) -> GcPtr<T> {
36        GcPtr {
37            info: GcInfo::new(),
38            t: t,
39        }
40    }
41}
42
43impl<T> Deref for GcPtr<T> where T: 'static + Sized + Trace {
44    type Target = T;
45
46    fn deref(&self) -> &Self::Target {
47        &self.t
48    }
49}
50
51impl<T> DerefMut for GcPtr<T> where T: 'static + Sized + Trace {
52    fn deref_mut(&mut self) -> &mut Self::Target {
53        &mut self.t
54    }
55}
56
57impl<T> Trace for GcPtr<T> where T: Sized + Trace {
58    fn is_root(&self) -> bool {
59        unreachable!("is_root on GcPtr is unreachable !!");
60    }
61
62    fn reset_root(&self) {
63        self.t.reset_root();
64    }
65
66    fn trace(&self) {
67        self.info.root_ref_count.fetch_add(1, Ordering::AcqRel);
68        self.t.trace();
69    }
70
71    fn reset(&self) {
72        self.info.root_ref_count.fetch_sub(1, Ordering::AcqRel);
73        self.t.reset();
74    }
75
76    fn is_traceable(&self) -> bool {
77        self.info.root_ref_count.load(Ordering::Acquire) > 0
78    }
79}
80
81impl<T> Trace for RefCell<GcPtr<T>> where T: Sized + Trace {
82    fn is_root(&self) -> bool {
83        unreachable!("is_root on GcPtr is unreachable !!");
84    }
85
86    fn reset_root(&self) {
87        self.borrow().t.reset_root();
88    }
89
90    fn trace(&self) {
91        self.borrow().info.root_ref_count.fetch_add(1, Ordering::AcqRel);
92        self.borrow().t.trace();
93    }
94
95    fn reset(&self) {
96        self.borrow().info.root_ref_count.fetch_sub(1, Ordering::AcqRel);
97        self.borrow().t.reset();
98    }
99
100    fn is_traceable(&self) -> bool {
101        self.borrow().info.root_ref_count.load(Ordering::Acquire) > 0
102    }
103}
104
105impl<T> Finalize for RefCell<GcPtr<T>> where T: Sized + Trace {
106    fn finalize(&self) {}
107}
108
109impl<T> Finalize for GcPtr<T> where T: Sized + Trace {
110    fn finalize(&self) {}
111}
112
113pub struct GcInternal<T> where T: 'static + Sized + Trace {
114    is_root: AtomicBool,
115    ptr: *const GcPtr<T>,
116}
117
118impl<T> GcInternal<T> where T: 'static + Sized + Trace {
119    fn new(ptr: *const GcPtr<T>) -> GcInternal<T> {
120        GcInternal {
121            is_root: AtomicBool::new(true),
122            ptr: ptr,
123        }
124    }
125}
126
127impl<T> Trace for GcInternal<T> where T: Sized + Trace {
128    fn is_root(&self) -> bool {
129        self.is_root.load(Ordering::Acquire)
130    }
131
132    fn reset_root(&self) {
133        if self.is_root.load(Ordering::Acquire) {
134            self.is_root.store(false, Ordering::Release);
135            unsafe {
136                (*self.ptr).reset_root();
137            }
138        }
139    }
140
141    fn trace(&self) {
142        unsafe {
143            (*self.ptr).trace();
144        }
145    }
146
147    fn reset(&self) {
148        unsafe {
149            (*self.ptr).reset();
150        }
151    }
152
153    fn is_traceable(&self) -> bool {
154        unsafe {
155            (*self.ptr).is_traceable()
156        }
157    }
158}
159
160impl<T> Finalize for GcInternal<T> where T: Sized + Trace {
161    fn finalize(&self) {}
162}
163
164impl<T> Deref for GcInternal<T> where T: 'static + Sized + Trace {
165    type Target = GcPtr<T>;
166
167    fn deref(&self) -> &Self::Target {
168        unsafe {
169            &(*self.ptr)
170        }
171    }
172}
173
174pub struct Gc<T> where T: 'static + Sized + Trace {
175    internal_ptr: *mut GcInternal<T>,
176}
177
178unsafe impl<T> Sync for Gc<T> where T: 'static + Sized + Trace + Sync {}
179unsafe impl<T> Send for Gc<T> where T: 'static + Sized + Trace + Send {}
180
181impl<T> Deref for Gc<T> where T: 'static + Sized + Trace {
182    type Target = GcInternal<T>;
183
184    fn deref(&self) -> &Self::Target {
185        unsafe {
186            &(*self.internal_ptr)
187        }
188    }
189}
190
191impl<T> Gc<T> where T: Sized + Trace {
192    pub fn new<'a>(t: T) -> Gc<T> {
193        basic_gc_strategy_start();
194        let global_strategy = &(*GLOBAL_GC_STRATEGY);
195        if !global_strategy.is_active() {
196            global_strategy.start();
197        }
198        unsafe {
199            (*GLOBAL_GC).create_gc(t)
200        }
201    }
202}
203
204impl<T> Clone for Gc<T> where T: 'static + Sized + Trace {
205    fn clone(&self) -> Self {
206        unsafe {
207            (*GLOBAL_GC).clone_from_gc(self)
208        }
209    }
210
211    fn clone_from(&mut self, source: &Self) {
212        unsafe {
213            (*self.internal_ptr).ptr = (*source.internal_ptr).ptr;
214        }
215    }
216}
217
218impl<T> Drop for Gc<T> where T: Sized + Trace {
219    fn drop(&mut self) {
220        unsafe {
221            (*GLOBAL_GC).remove_tracer(self.internal_ptr);
222        }
223    }
224}
225
226impl<T> Trace for Gc<T> where T: Sized + Trace {
227    fn is_root(&self) -> bool {
228        unsafe {
229            (*self.internal_ptr).is_root()
230        }
231    }
232
233    fn reset_root(&self) {
234        unsafe {
235            (*self.internal_ptr).reset_root();
236        }
237    }
238
239    fn trace(&self) {
240        unsafe {
241            (*self.internal_ptr).trace();
242        }
243    }
244
245    fn reset(&self) {
246        unsafe {
247            (*self.internal_ptr).reset();
248        }
249    }
250
251    fn is_traceable(&self) -> bool {
252        self.is_root()
253    }
254}
255
256impl<T> Finalize for Gc<T> where T: Sized + Trace {
257    fn finalize(&self) {}
258}
259
260pub struct GcCellInternal<T> where T: 'static + Sized + Trace {
261    is_root: AtomicBool,
262    ptr: *const RefCell<GcPtr<T>>,
263}
264
265impl<T> GcCellInternal<T> where T: 'static + Sized + Trace {
266    fn new(ptr: *const RefCell<GcPtr<T>>) -> GcCellInternal<T> {
267        GcCellInternal {
268            is_root: AtomicBool::new(true),
269            ptr: ptr,
270        }
271    }
272}
273
274impl<T> Trace for GcCellInternal<T> where T: Sized + Trace {
275    fn is_root(&self) -> bool {
276        self.is_root.load(Ordering::Acquire)
277    }
278
279    fn reset_root(&self) {
280        if self.is_root.load(Ordering::Acquire) {
281            self.is_root.store(false, Ordering::Release);
282            unsafe {
283                (*self.ptr).borrow().reset_root();
284            }
285        }
286    }
287
288    fn trace(&self) {
289        unsafe {
290            (*self.ptr).borrow().trace();
291        }
292    }
293
294    fn reset(&self) {
295        unsafe {
296            (*self.ptr).borrow().reset();
297        }
298    }
299
300    fn is_traceable(&self) -> bool {
301        unsafe {
302            (*self.ptr).borrow().is_traceable()
303        }
304    }
305}
306
307impl<T> Finalize for GcCellInternal<T> where T: Sized + Trace {
308    fn finalize(&self) {}
309}
310
311impl<T> Deref for GcCellInternal<T> where T: 'static + Sized + Trace {
312    type Target = RefCell<GcPtr<T>>;
313
314    fn deref(&self) -> &Self::Target {
315        unsafe {
316            &(*self.ptr)
317        }
318    }
319}
320
321pub struct GcCell<T> where T: 'static + Sized + Trace {
322    internal_ptr: *mut GcCellInternal<T>,
323}
324
325unsafe impl<T> Sync for GcCell<T> where T: 'static + Sized + Trace + Sync {}
326unsafe impl<T> Send for GcCell<T> where T: 'static + Sized + Trace + Send {}
327
328impl<T> Drop for GcCell<T> where T: Sized + Trace {
329    fn drop(&mut self) {
330        unsafe {
331            (*GLOBAL_GC).remove_tracer(self.internal_ptr);
332        }
333    }
334}
335
336impl<T> Deref for GcCell<T> where T: 'static + Sized + Trace {
337    type Target = GcCellInternal<T>;
338
339    fn deref(&self) -> &Self::Target {
340        unsafe {
341            &(*self.internal_ptr)
342        }
343    }
344}
345
346impl<T> GcCell<T> where T: 'static + Sized + Trace {
347    pub fn new<'a>(t: T) -> GcCell<T> {
348        basic_gc_strategy_start();
349        let global_strategy = &(*GLOBAL_GC_STRATEGY);
350        if !global_strategy.is_active() {
351            global_strategy.start();
352        }
353        unsafe {
354            (*GLOBAL_GC).create_gc_cell(t)
355        }
356    }
357}
358
359impl<T> Clone for GcCell<T> where T: 'static + Sized + Trace {
360    fn clone(&self) -> Self {
361        let gc = unsafe {
362            (*GLOBAL_GC).clone_from_gc_cell(self)
363        };
364        unsafe {
365            (*gc.internal_ptr).ptr = (*self.internal_ptr).ptr;
366            (*gc.internal_ptr).is_root.store(true, Ordering::Release);
367        }
368        gc
369    }
370
371    fn clone_from(&mut self, source: &Self) {
372        unsafe {
373            (*self.internal_ptr).ptr = (*source.internal_ptr).ptr;
374        }
375    }
376}
377
378impl<T> Trace for GcCell<T> where T: Sized + Trace {
379    fn is_root(&self) -> bool {
380        unsafe {
381            (*self.ptr).borrow().is_root()
382        }
383    }
384
385    fn reset_root(&self) {
386        self.is_root.store(false, Ordering::Release);
387        unsafe {
388            (*self.ptr).borrow().reset_root();
389        }
390    }
391
392    fn trace(&self) {
393        unsafe {
394            (*self.ptr).borrow().trace();
395        }
396    }
397
398    fn reset(&self) {
399        unsafe {
400            (*self.ptr).borrow().reset();
401        }
402    }
403
404    fn is_traceable(&self) -> bool {
405        self.is_root()
406    }
407}
408
409impl<T> Finalize for GcCell<T> where T: Sized + Trace {
410    fn finalize(&self) {}
411}
412
413type GcObjMem = *mut u8;
414
415pub struct GlobalGarbageCollector {
416    mem_to_trc: RwLock<HashMap<usize, *const dyn Trace>>,
417    trs: RwLock<HashMap<*const dyn Trace, (GcObjMem, Layout)>>,
418    objs: Mutex<HashMap<*const dyn Trace, (GcObjMem, Layout)>>,
419    fin: Mutex<HashMap<*const dyn Trace, *const dyn Finalize>>,
420}
421
422unsafe impl Sync for GlobalGarbageCollector {}
423
424unsafe impl Send for GlobalGarbageCollector {}
425
426impl GlobalGarbageCollector {
427    fn new() -> GlobalGarbageCollector {
428        GlobalGarbageCollector {
429            mem_to_trc: RwLock::new(HashMap::new()),
430            trs: RwLock::new(HashMap::new()),
431            objs: Mutex::new(HashMap::new()),
432            fin: Mutex::new(HashMap::new()),
433        }
434    }
435
436    pub fn get_objs(&self) -> &Mutex<HashMap<*const dyn Trace, (*mut u8, Layout)>> {
437        &self.objs
438    }
439
440    unsafe fn create_gc<T>(&self, t: T) -> Gc<T>
441        where T: Sized + Trace {
442        let (gc_ptr, mem_info_gc_ptr) = self.alloc_mem::<GcPtr<T>>();
443        let (gc_inter_ptr, mem_info_internal_ptr) = self.alloc_mem::<GcInternal<T>>();
444        std::ptr::write(gc_ptr, GcPtr::new(t));
445        std::ptr::write(gc_inter_ptr, GcInternal::new(gc_ptr));
446        let gc = Gc {
447            internal_ptr: gc_inter_ptr,
448        };
449        (*(*gc.internal_ptr).ptr).reset_root();
450        let mut mem_to_trc = self.mem_to_trc.write().unwrap();
451        let mut trs = self.trs.write().unwrap();
452        let mut objs = self.objs.lock().unwrap();
453        let mut fin = self.fin.lock().unwrap();
454        mem_to_trc.insert(gc_inter_ptr as usize, gc_inter_ptr);
455        trs.insert(gc_inter_ptr, mem_info_internal_ptr);
456        objs.insert(gc_ptr, mem_info_gc_ptr);
457        fin.insert(gc_ptr, (*gc_ptr).t.as_finalize());
458        gc
459    }
460
461    unsafe fn clone_from_gc<T>(&self, gc: &Gc<T>) -> Gc<T> where T: Sized + Trace {
462        let (gc_inter_ptr, mem_info_internal_ptr) = self.alloc_mem::<GcInternal<T>>();
463        std::ptr::write(gc_inter_ptr, GcInternal::new(gc.ptr));
464        let gc = Gc {
465            internal_ptr: gc_inter_ptr,
466        };
467        (*(*gc.internal_ptr).ptr).reset_root();
468        let mut mem_to_trc = self.mem_to_trc.write().unwrap();
469        let mut trs = self.trs.write().unwrap();
470        mem_to_trc.insert(gc_inter_ptr as usize, gc_inter_ptr);
471        trs.insert(gc_inter_ptr, mem_info_internal_ptr);
472        gc
473    }
474
475    unsafe fn create_gc_cell<T>(&self, t: T) -> GcCell<T> where T: Sized + Trace {
476        let (gc_ptr, mem_info_gc_ptr) = self.alloc_mem::<RefCell<GcPtr<T>>>();
477        let (gc_cell_inter_ptr, mem_info_internal_ptr) = self.alloc_mem::<GcCellInternal<T>>();
478        std::ptr::write(gc_ptr, RefCell::new(GcPtr::new(t)));
479        std::ptr::write(gc_cell_inter_ptr, GcCellInternal::new(gc_ptr));
480        let gc = GcCell {
481            internal_ptr: gc_cell_inter_ptr,
482        };
483        (*(*gc.internal_ptr).ptr).reset_root();
484        let mut mem_to_trc = self.mem_to_trc.write().unwrap();
485        let mut trs = self.trs.write().unwrap();
486        let mut objs = self.objs.lock().unwrap();
487        let mut fin = self.fin.lock().unwrap();
488        mem_to_trc.insert(gc_cell_inter_ptr as usize, gc_cell_inter_ptr);
489        trs.insert(gc_cell_inter_ptr, mem_info_internal_ptr);
490        objs.insert(gc_ptr, mem_info_gc_ptr);
491        fin.insert(gc_ptr, (*(*gc_ptr).as_ptr()).t.as_finalize());
492        gc
493    }
494
495    unsafe fn clone_from_gc_cell<T>(&self, gc: &GcCell<T>) -> GcCell<T> where T: Sized + Trace {
496        let (gc_inter_ptr, mem_info) = self.alloc_mem::<GcCellInternal<T>>();
497        std::ptr::write(gc_inter_ptr, GcCellInternal::new(gc.ptr));
498        let gc = GcCell {
499            internal_ptr: gc_inter_ptr,
500        };
501        (*(*gc.internal_ptr).ptr).reset_root();
502        let mut mem_to_trc = self.mem_to_trc.write().unwrap();
503        let mut trs = self.trs.write().unwrap();
504        mem_to_trc.insert(gc_inter_ptr as usize, gc_inter_ptr);
505        trs.insert(gc_inter_ptr, mem_info);
506        gc
507    }
508
509    unsafe fn alloc_mem<T>(&self) -> (*mut T, (GcObjMem, Layout)) where T: Sized {
510        let layout = Layout::new::<T>();
511        let mem = alloc(layout);
512        let type_ptr: *mut T = mem as *mut _;
513        (type_ptr, (mem, layout))
514    }
515
516    unsafe fn remove_tracer(&self, tracer: *const dyn Trace) {
517        let mut mem_to_trc = self.mem_to_trc.write().unwrap();
518        let mut trs = self.trs.write().unwrap();
519        let (tracer_thin_ptr, _) = unsafe { transmute::<_, (*const (), *const ())>(tracer) };
520        let tracer = &mem_to_trc.remove(&(tracer_thin_ptr as usize)).unwrap();
521        let del = trs.remove(&tracer).unwrap();
522        dealloc(del.0, del.1);
523    }
524
525    pub unsafe fn collect(&self) {
526        let mut trs = self.trs.write().unwrap();
527        for (gc_info, _) in &*trs {
528            let tracer = &(**gc_info);
529            if tracer.is_root() {
530                tracer.trace();
531            }
532        }
533        let mut collected_tracers = Vec::new();
534        for (gc_info, _) in &*trs {
535            let tracer = &(**gc_info);
536            if !tracer.is_traceable() {
537                collected_tracers.push(*gc_info);
538            }
539        }
540        for tracer_ptr in collected_tracers {
541            let del = (&*trs)[&tracer_ptr];
542            dealloc(del.0, del.1);
543            trs.remove(&tracer_ptr);
544        }
545        let mut collected_objects = Vec::new();
546        let mut objs = self.objs.lock().unwrap();
547        for (gc_info, _) in &*objs {
548            let obj = &(**gc_info);
549            if !obj.is_traceable() {
550                collected_objects.push(*gc_info);
551            }
552        }
553        for (gc_info, _) in &*trs {
554            let tracer = &(**gc_info);
555            tracer.reset();
556        }
557        let mut fin = self.fin.lock().unwrap();
558        let _clone_collected_objects = collected_objects.clone();
559        for col in collected_objects {
560            let del = (&*objs)[&col];
561            let finilizer = (&*fin)[&col];
562            (*finilizer).finalize();
563            dealloc(del.0, del.1);
564            objs.remove(&col);
565            fin.remove(&col);
566        }
567    }
568
569    unsafe fn collect_all(&self) {
570        let mut collected_tracers: Vec<*const dyn Trace> = Vec::new();
571        let mut trs = self.trs.write().unwrap();
572        for (gc_info, _) in &*trs {
573            collected_tracers.push(*gc_info);
574        }
575        let mut collected_objects: Vec<*const dyn Trace> = Vec::new();
576        let mut objs = self.objs.lock().unwrap();
577        for (gc_info, _) in &*objs {
578            collected_objects.push(*gc_info);
579        }
580        for tracer_ptr in collected_tracers {
581            let del = (&*trs)[&tracer_ptr];
582            dealloc(del.0, del.1);
583            trs.remove(&tracer_ptr);
584        }
585        let mut fin = self.fin.lock().unwrap();
586        for col in collected_objects {
587            let del = (&*objs)[&col];
588            let finilizer = (&*fin)[&col];
589            (*finilizer).finalize();
590            dealloc(del.0, del.1);
591            objs.remove(&col);
592            fin.remove(&col);
593        }
594    }
595}
596
597pub type StartGlobalStrategyFn = Box<dyn FnMut(&'static GlobalGarbageCollector, &'static AtomicBool) -> Option<JoinHandle<()>>>;
598pub type StopGlobalStrategyFn = Box<dyn FnMut(&'static GlobalGarbageCollector)>;
599
600pub struct GlobalStrategy {
601    gc: Cell<&'static GlobalGarbageCollector>,
602    is_active: AtomicBool,
603    start_func: Mutex<StartGlobalStrategyFn>,
604    stop_func: Mutex<StopGlobalStrategyFn>,
605    join_handle: Mutex<Option<JoinHandle<()>>>,
606}
607
608unsafe impl Sync for GlobalStrategy {}
609unsafe impl Send for GlobalStrategy {}
610
611impl GlobalStrategy {
612    fn new<StartFn, StopFn>(gc: &'static GlobalGarbageCollector, start_fn: StartFn, stop_fn: StopFn) -> GlobalStrategy
613        where StartFn: 'static + FnMut(&'static GlobalGarbageCollector, &'static AtomicBool) -> Option<JoinHandle<()>>,
614              StopFn: 'static + FnMut(&'static GlobalGarbageCollector) {
615        GlobalStrategy {
616            gc: Cell::new(gc),
617            is_active: AtomicBool::new(false),
618            start_func: Mutex::new(Box::new(start_fn)),
619            stop_func: Mutex::new(Box::new(stop_fn)),
620            join_handle: Mutex::new(None),
621        }
622    }
623
624    pub fn change_strategy<StartFn, StopFn>(&self, start_fn: StartFn, stop_fn: StopFn)
625        where StartFn: 'static + FnMut(&'static GlobalGarbageCollector, &'static AtomicBool) -> Option<JoinHandle<()>>,
626              StopFn: 'static + FnMut(&'static GlobalGarbageCollector) {
627        let mut start_func = self.start_func.lock().unwrap();
628        let mut stop_func = self.stop_func.lock().unwrap();
629        if self.is_active() {
630            self.stop();
631        }
632        *start_func = Box::new(start_fn);
633        *stop_func = Box::new(stop_fn);
634    }
635
636    pub fn is_active(&self) -> bool {
637        self.is_active.load(Ordering::Acquire)
638    }
639
640    pub fn start(&'static self) {
641        self.is_active.store(true, Ordering::Release);
642        let mut start_func = self.start_func.lock().unwrap();
643        let mut join_handle = self.join_handle.lock().unwrap();
644        *join_handle = (&mut *(start_func))(self.gc.get(), &self.is_active);
645    }
646
647    pub fn stop(&self) {
648        self.is_active.store(false, Ordering::Release);
649        let mut join_handle = self.join_handle.lock().unwrap();
650        if let Some(join_handle) = join_handle.take() {
651            join_handle.join().expect("GlobalStrategy::stop, GlobalStrategy Thread being joined has panicked !!");
652        }
653        let mut stop_func = self.stop_func.lock().unwrap();
654        (&mut *(stop_func))(self.gc.get());
655    }
656}
657
658impl Drop for GlobalStrategy {
659    fn drop(&mut self) {
660        self.is_active.store(false, Ordering::Release);
661        let mut stop_func = self.stop_func.lock().unwrap();
662        (&mut *(stop_func))(self.gc.get());
663    }
664}
665
666lazy_static! {
667    static ref GLOBAL_GC: GlobalGarbageCollector = {
668        GlobalGarbageCollector::new()
669    };
670    pub static ref GLOBAL_GC_STRATEGY: GlobalStrategy = {
671        let gc = &(*GLOBAL_GC);
672        GlobalStrategy::new(gc,
673            move |global_gc, _| {
674                let mut basic_strategy_global_gc = BASIC_STRATEGY_GLOBAL_GC.write().unwrap();
675                *basic_strategy_global_gc = Some(global_gc);
676                None
677            },
678            move |_global_gc| {
679                let mut basic_strategy_global_gc = BASIC_STRATEGY_GLOBAL_GC.write().unwrap();
680                *basic_strategy_global_gc = None;
681            })
682    };
683}
684
685#[cfg(test)]
686mod tests {
687    use crate::gc::sync::{Gc, GLOBAL_GC};
688
689    #[test]
690    fn one_object() {
691        let _one = Gc::new(1);
692        unsafe { (*GLOBAL_GC).collect() };
693        assert_eq!((*GLOBAL_GC).trs.read().unwrap().len(), 1);
694    }
695
696    #[test]
697    fn gc_collect_one_from_one() {
698        {
699            let _one = Gc::new(1);
700        }
701        unsafe { (*GLOBAL_GC).collect() };
702        assert_eq!((*GLOBAL_GC).trs.read().unwrap().len(), 0);
703    }
704
705    #[test]
706    fn two_objects() {
707        let mut one = Gc::new(1);
708        one = Gc::new(2);
709        unsafe { (*GLOBAL_GC).collect() };
710        assert_eq!((*GLOBAL_GC).trs.read().unwrap().len(), 2);
711    }
712
713    #[test]
714    fn gc_collect_one_from_two() {
715        let mut one = Gc::new(1);
716        one = Gc::new(2);
717        unsafe { (*GLOBAL_GC).collect() };
718        assert_eq!((*GLOBAL_GC).trs.read().unwrap().len(), 0);
719    }
720
721    #[test]
722    fn gc_collect_two_from_two() {
723        {
724            let mut one = Gc::new(1);
725            one = Gc::new(2);
726        }
727        unsafe { (*GLOBAL_GC).collect() };
728        assert_eq!((*GLOBAL_GC).trs.read().unwrap().len(), 0);
729    }
730}