jrsonnet_gcmodule/
collect.rs1use crate::cc::CcDummy;
8use crate::cc::CcDyn;
9use crate::cc::GcClone;
10use crate::debug;
11use crate::ref_count::RefCount;
12use crate::ref_count::SingleThreadRefCount;
13use crate::Cc;
14use crate::Trace;
15use std::cell::Cell;
16use std::cell::RefCell;
17use std::cell::UnsafeCell;
18use std::marker::PhantomData;
19use std::mem;
20use std::ptr::without_provenance;
21
22pub struct ObjectSpace {
59 pub(crate) list: RefCell<OwnedGcHeader>,
61
62 _phantom: PhantomData<Cc<()>>,
66}
67
68pub trait AbstractObjectSpace: 'static + Sized {
70 type RefCount: RefCount;
71 type Header;
72
73 fn insert(&self, header: *const Self::Header, value: &dyn CcDyn);
75
76 fn remove(header: &Self::Header);
78
79 fn new_ref_count(&self, tracked: bool) -> Self::RefCount;
81
82 fn empty_header(&self) -> Self::Header;
83}
84
85impl AbstractObjectSpace for ObjectSpace {
86 type RefCount = SingleThreadRefCount;
87 type Header = GcHeader;
88
89 fn insert(&self, header: *const Self::Header, value: &dyn CcDyn) {
90 let list = self.list.borrow();
91 let prev: &GcHeader = list.inner();
92 debug_assert!(unsafe { (*header).next.get() }.is_null());
93 let next = prev.next.get();
94 unsafe { (*header).prev.set(prev) };
95 unsafe { (*header).next.set(next) };
96 unsafe {
97 (*next).prev.set(header);
99 let fat_ptr: [*mut (); 2] = mem::transmute(value);
101 (*header).ccdyn_vptr.set(fat_ptr[1]);
102 }
103 prev.next.set(header);
104 }
105
106 #[inline]
107 fn remove(header: &Self::Header) {
108 let header: &GcHeader = header;
109 debug_assert!(!header.next.get().is_null());
110 debug_assert!(!header.prev.get().is_null());
111 let next = unmask_ptr(header.next.get());
112 let prev = unmask_ptr(header.prev.get());
113 unsafe {
115 (*prev).next.set(next);
116 (*next).prev.set(prev);
117 }
118 header.next.set(std::ptr::null_mut());
119 }
120
121 #[inline]
122 fn new_ref_count(&self, tracked: bool) -> Self::RefCount {
123 SingleThreadRefCount::new(tracked)
124 }
125
126 #[inline]
127 fn empty_header(&self) -> Self::Header {
128 GcHeader::empty()
129 }
130}
131
132impl Default for ObjectSpace {
133 fn default() -> Self {
135 let header = new_gc_list();
136 Self {
137 list: RefCell::new(header),
138 _phantom: PhantomData,
139 }
140 }
141}
142
143impl ObjectSpace {
144 pub fn count_tracked(&self) -> usize {
146 let list = self.list.borrow();
147 let list: &GcHeader = list.inner();
148 let mut count = 0;
149 visit_list(list, |_| count += 1);
150 count
151 }
152
153 pub fn collect_cycles(&self) -> usize {
156 let list = self.list.borrow();
157 let list: &GcHeader = list.inner();
158 collect_list(list, ())
159 }
160
161 pub fn create<T: Trace>(&self, value: T) -> Cc<T> {
167 Cc::new_in_space(value, self)
169 }
170
171 pub fn leak(&self) {
173 *self.list.borrow_mut() = new_gc_list();
174 }
175
176 }
179
180impl Drop for ObjectSpace {
181 fn drop(&mut self) {
182 self.collect_cycles();
183 }
184}
185
186pub trait Linked {
187 fn next(&self) -> *const Self;
188 fn prev(&self) -> *const Self;
189 fn set_prev(&self, other: *const Self);
190
191 fn value_ptr(this: *const Self) -> *const dyn CcDyn;
192 fn value(&self) -> &dyn CcDyn;
194}
195
196#[repr(C)]
198pub struct GcHeader {
199 pub(crate) next: Cell<*const GcHeader>,
200 pub(crate) prev: Cell<*const GcHeader>,
201
202 pub(crate) ccdyn_vptr: Cell<*const ()>,
204
205 pub(crate) _marker: UnsafeCell<()>,
207}
208
209impl Linked for GcHeader {
210 #[inline]
211 fn next(&self) -> *const Self {
212 self.next.get()
213 }
214 #[inline]
215 fn prev(&self) -> *const Self {
216 self.prev.get()
217 }
218 #[inline]
219 fn set_prev(&self, other: *const Self) {
220 self.prev.set(other)
221 }
222 fn value_ptr(this: *const Self) -> *const dyn CcDyn {
223 unsafe {
226 let fat_ptr: (*const (), *const ()) = (this.offset(1) as _, (*this).ccdyn_vptr.get());
227 mem::transmute(fat_ptr)
228 }
229 }
230 #[inline]
231 fn value(&self) -> &dyn CcDyn {
232 unsafe { mem::transmute(Self::value_ptr(self)) }
233 }
234}
235
236impl GcHeader {
237 pub(crate) fn empty() -> Self {
239 Self {
240 next: Cell::new(std::ptr::null()),
241 prev: Cell::new(std::ptr::null()),
242 ccdyn_vptr: Cell::new(CcDummy::ccdyn_vptr()),
243 _marker: UnsafeCell::new(()),
244 }
245 }
246}
247
248pub fn collect_thread_cycles() -> usize {
252 debug::log(|| ("collect", "collect_thread_cycles"));
253 THREAD_OBJECT_SPACE.with(|list| list.collect_cycles())
254}
255
256pub fn count_thread_tracked() -> usize {
260 THREAD_OBJECT_SPACE.with(|list| list.count_tracked())
261}
262
263thread_local!(pub(crate) static THREAD_OBJECT_SPACE: ObjectSpace = ObjectSpace::default());
264
265pub fn with_thread_object_space<R>(handler: impl FnOnce(&ObjectSpace) -> R) -> R {
267 THREAD_OBJECT_SPACE.with(handler)
268}
269
270pub(crate) struct OwnedGcHeader {
271 raw: *mut GcHeader,
272}
273impl OwnedGcHeader {
274 fn inner(&self) -> &GcHeader {
275 unsafe { &*self.raw }
276 }
277}
278impl Drop for OwnedGcHeader {
279 fn drop(&mut self) {
280 drop(unsafe { Box::from_raw(self.raw) });
281 }
282}
283
284pub(crate) fn new_gc_list() -> OwnedGcHeader {
286 let header = Box::into_raw(Box::new(GcHeader::empty()));
287 unsafe { (*header).prev.set(header) };
288 unsafe { (*header).next.set(header) };
289
290 OwnedGcHeader { raw: header }
291}
292
293pub(crate) fn collect_list<L: Linked, K>(list: &L, lock: K) -> usize {
295 update_refs(list);
296 subtract_refs(list);
297 release_unreachable(list, lock)
298}
299
300pub(crate) fn visit_list<'a, L: Linked>(list: &'a L, mut func: impl FnMut(&'a L)) {
302 let mut ptr = list.next();
304 while ptr as *const _ != list as *const _ {
305 let header: &L = unsafe { &*ptr };
307 ptr = header.next();
308 func(header);
309 }
310}
311
312const PTR_MASK: usize = usize::MAX & !(0b11);
313const PREV_MASK_COLLECTING: usize = 1;
314const PREV_MASK_VISITED: usize = 2;
315const PREV_SHIFT: u32 = 2;
316
317#[inline]
318fn unmask_ptr<T>(ptr: *const T) -> *const T {
319 ptr.map_addr(|ptr| ptr & PTR_MASK)
320}
321
322fn update_refs<L: Linked>(list: &L) {
325 visit_list(list, |header| {
326 let ref_count = header.value().gc_ref_count();
327 if ref_count > 0 {
335 let shifted = (ref_count << PREV_SHIFT) | PREV_MASK_COLLECTING;
336 header.set_prev(without_provenance(shifted));
337 } else {
338 debug_assert!(header.prev() as usize & PREV_MASK_COLLECTING == 0);
339 }
340 });
341}
342
343fn subtract_refs<L: Linked>(list: &L) {
350 let mut tracer = |header: *const ()| {
351 let header = unsafe { &*(header as *const L) };
353 if is_collecting(header) {
354 debug_assert!(
355 !is_unreachable(header),
356 "bug: object {} becomes unreachable while trying to dec_ref (is Trace impl correct?)",
357 debug_name(header)
358 );
359 edit_gc_ref_count(header, -1);
360 }
361 };
362 visit_list(list, |header| {
363 set_visited(header);
364 header.value().gc_traverse(&mut tracer);
365 });
366}
367
368fn mark_reachable<L: Linked>(list: &L) {
372 fn revive<L: Linked>(header: *const ()) {
373 let header = unsafe { &*(header as *const L) };
375 if is_collecting(header) {
377 unset_collecting(header);
378 if is_unreachable(header) {
379 edit_gc_ref_count(header, 1); }
381 header.value().gc_traverse(&mut revive::<L>); }
383 }
384 visit_list(list, |header| {
385 if is_collecting(header) && !is_unreachable(header) {
386 unset_collecting(header);
387 header.value().gc_traverse(&mut revive::<L>)
388 }
389 });
390}
391
392fn release_unreachable<L: Linked, K>(list: &L, lock: K) -> usize {
394 mark_reachable(list);
398
399 let mut count = 0;
400
401 visit_list(list, |header| {
403 if is_unreachable(header) {
404 count += 1;
405 }
406 });
407
408 debug::log(|| ("collect", format!("{} unreachable objects", count)));
409
410 let mut to_drop: Vec<Box<dyn GcClone>> = Vec::with_capacity(count);
416 visit_list(list, |header| {
417 if is_unreachable(header) {
418 to_drop.push(header.value().gc_clone());
419 }
420 });
421
422 restore_prev(list);
424
425 drop(lock);
431 #[cfg(feature = "debug")]
435 {
436 crate::debug::GC_DROPPING.with(|d| d.set(true));
437 }
438
439 for value in to_drop.iter() {
443 value.gc_drop_t();
444 }
445
446 for value in to_drop.iter() {
449 let ref_count = value.gc_ref_count();
450 assert_eq!(
451 ref_count, 1,
452 concat!(
453 "bug: unexpected ref-count after dropping cycles\n",
454 "This usually indicates a buggy Trace or Drop implementation."
455 )
456 );
457 }
458
459 #[cfg(feature = "debug")]
460 {
461 crate::debug::GC_DROPPING.with(|d| d.set(false));
462 }
463
464 count
465}
466
467fn restore_prev<L: Linked>(list: &L) {
469 let mut prev = list;
470 visit_list(list, |header| {
471 header.set_prev(prev);
472 prev = header;
473 });
474}
475
476fn is_unreachable<L: Linked>(header: &L) -> bool {
477 let prev = header.prev().addr();
478 is_collecting(header) && (prev >> PREV_SHIFT) == 0
479}
480
481pub(crate) fn is_collecting<L: Linked>(header: &L) -> bool {
482 let prev = header.prev().addr();
483 (prev & PREV_MASK_COLLECTING) != 0
484}
485
486fn set_visited<L: Linked>(header: &L) -> bool {
487 let prev = header.prev();
488 let visited = (prev.addr() & PREV_MASK_VISITED) != 0;
489 debug_assert!(
490 !visited,
491 "bug: double visit: {} (is Trace impl correct?)",
492 debug_name(header)
493 );
494 let new_prev = prev.map_addr(|prev| prev | PREV_MASK_VISITED);
495 header.set_prev(new_prev);
496 visited
497}
498
499fn unset_collecting<L: Linked>(header: &L) {
500 let prev = header.prev();
501 let new_prev = prev.map_addr(|prev| (prev & PREV_MASK_COLLECTING) ^ prev);
502 header.set_prev(new_prev);
503}
504
505fn edit_gc_ref_count<L: Linked>(header: &L, delta: isize) {
506 let prev = header.prev() as isize;
507 let new_prev = prev + (1 << PREV_SHIFT) * delta;
508 header.set_prev(without_provenance(new_prev as usize));
509}
510
511#[allow(unused_variables)]
512fn debug_name<L: Linked>(header: &L) -> String {
513 #[cfg(feature = "debug")]
514 {
515 return header.value().gc_debug_name();
516 }
517
518 #[cfg(not(feature = "debug"))]
519 {
520 "(enable gcmodule \"debug\" feature for debugging)".to_string()
521 }
522}