1use std::{
2 any::{Any, TypeId},
3 cell::Cell,
4 cmp::Ordering,
5 collections::{hash_map::Entry, HashMap, HashSet, VecDeque},
6 fmt,
7 hash::{Hash, Hasher},
8 marker::PhantomData,
9 mem,
10 ops::{Deref, DerefMut},
11 ptr::{self, NonNull},
12 rc::Rc,
13 result::Result as StdResult,
14 sync::{self, Arc},
15};
16
17use crate::{
18 base::fnv::FnvMap, forget_lifetime, interner::InternedStr, types::VmIndex, Error, Result,
19};
20
21pub mod mutex;
22
23#[doc(hidden)]
24#[macro_export]
25macro_rules! impl_trace {
26 ($self_: tt, $gc: ident, $body: expr) => {
27 unsafe fn root(&mut $self_) {
28 #[allow(unused)]
29 unsafe fn mark<T: ?Sized + Trace>(this: &mut T, _: ()) {
30 Trace::root(this)
31 }
32 let $gc = ();
33 $body
34 }
35 unsafe fn unroot(&mut $self_) {
36 #[allow(unused)]
37 unsafe fn mark<T: ?Sized + Trace>(this: &mut T, _: ()) {
38 Trace::unroot(this)
39 }
40 let $gc = ();
41 $body
42 }
43 fn trace(&$self_, $gc: &mut $crate::gc::Gc) {
44 #[allow(unused)]
45 fn mark<T: ?Sized + Trace>(this: &T, gc: &mut $crate::gc::Gc) {
46 Trace::trace(this, gc)
47 }
48 $body
49 }
50 }
51}
52
53macro_rules! deref_trace_mut {
54 ([$($params: tt)*] $ty: ty) => {
55 unsafe impl<$($params)*> Trace for $ty {
56 unsafe fn root(&mut self) {
57 (**self).root();
58 }
59 unsafe fn unroot(&mut self) {
60 (**self).unroot();
61 }
62 fn trace(&self, gc: &mut Gc) {
63 (**self).trace(gc);
64 }
65 }
66 };
67}
68
69macro_rules! deref_trace {
70 ([$($params: tt)*] $ty: ty) => {
71 unsafe impl<$($params)*> Trace for $ty {
72 fn trace(&self, gc: &mut Gc) {
73 (**self).trace(gc);
74 }
75 }
76 };
77}
78
79macro_rules! impl_trace_fields {
80 ($self_: tt, $gc: ident; $($field: ident),*) => {
81 impl_trace!{ $self_, $gc,
82 match $self_ {
83 Self { $($field,)* .. } => {
84 $(
85 mark($field, $gc);
86 )*
87 }
88 }
89 }
90 }
91}
92
93#[inline]
94unsafe fn allocate(size: usize) -> *mut u8 {
95 let cap = size / mem::size_of::<f64>()
97 + (if size % mem::size_of::<f64>() != 0 {
98 1
99 } else {
100 0
101 });
102 ptr_from_vec(Vec::<f64>::with_capacity(cap))
103}
104
105#[inline]
106fn ptr_from_vec(mut buf: Vec<f64>) -> *mut u8 {
107 let ptr = buf.as_mut_ptr();
108 mem::forget(buf);
109
110 ptr as *mut u8
111}
112
113#[inline]
114unsafe fn deallocate(ptr: *mut u8, old_size: usize) {
115 let cap = old_size / mem::size_of::<f64>()
116 + (if old_size % mem::size_of::<f64>() != 0 {
117 1
118 } else {
119 0
120 });
121 Vec::<f64>::from_raw_parts(ptr as *mut f64, 0, cap);
122}
123
124pub struct WriteOnly<'s, T: ?Sized + 's>(*mut T, PhantomData<&'s mut T>);
126
127impl<'s, T: ?Sized> WriteOnly<'s, T> {
128 pub unsafe fn new(t: *mut T) -> WriteOnly<'s, T> {
130 WriteOnly(t, PhantomData)
131 }
132
133 pub fn as_mut_ptr(&mut self) -> *mut T {
137 self.0
138 }
139}
140
141impl<'s, T> WriteOnly<'s, T> {
142 pub fn write(self, t: T) -> &'s mut T {
144 unsafe {
145 ptr::write(self.0, t);
146 &mut *self.0
147 }
148 }
149}
150
151impl<'s, T: Copy> WriteOnly<'s, [T]> {
152 pub fn write_slice(self, s: &[T]) -> &'s mut [T] {
153 let self_ = unsafe { &mut *self.0 };
154 assert!(s.len() == self_.len());
155 for (to, from) in self_.iter_mut().zip(s) {
156 *to = *from;
157 }
158 self_
159 }
160}
161
162impl<'s> WriteOnly<'s, str> {
163 pub fn write_str(self, s: &str) -> &'s mut str {
164 unsafe {
165 let ptr: &mut [u8] = mem::transmute::<*mut str, &mut [u8]>(self.0);
166 assert!(s.len() == ptr.len());
167 for (to, from) in ptr.iter_mut().zip(s.as_bytes()) {
168 *to = *from;
169 }
170 &mut *self.0
171 }
172 }
173}
174
175#[derive(Clone, Copy, Default, Debug)]
176#[cfg_attr(feature = "serde_derive", derive(Deserialize, Serialize))]
177pub struct Generation(i32);
178
179impl Generation {
180 pub fn is_root(self) -> bool {
181 self.0 == 0
182 }
183
184 pub fn disjoint() -> Generation {
186 Generation(-1)
187 }
188
189 pub fn is_parent_of(self, other: Generation) -> bool {
191 self.0 < other.0
192 }
193
194 pub fn can_contain_values_from(self, other: Generation) -> bool {
196 other.0 <= self.0
197 }
198
199 pub fn next(self) -> Generation {
200 assert!(
201 self.0 < i32::max_value(),
202 "To many generations has been created"
203 );
204 Generation(self.0 + 1)
205 }
206}
207
208#[derive(Debug)]
210#[cfg_attr(feature = "serde_derive", derive(DeserializeState, SerializeState))]
211#[cfg_attr(
212 feature = "serde_derive",
213 serde(
214 deserialize_state = "crate::serialization::DeSeed<'gc>",
215 de_parameters = "'gc"
216 )
217)]
218#[cfg_attr(
219 feature = "serde_derive",
220 serde(serialize_state = "crate::serialization::SeSeed")
221)]
222pub struct Gc {
223 #[cfg_attr(feature = "serde_derive", serde(skip))]
225 values: Option<AllocPtr>,
226 allocated_memory: usize,
228 collect_limit: usize,
230 memory_limit: usize,
232 #[cfg_attr(feature = "serde_derive", serde(skip))]
233 type_infos: FnvMap<TypeId, Box<TypeInfo>>,
234 #[cfg_attr(feature = "serde_derive", serde(skip))]
235 record_infos: FnvMap<Arc<[InternedStr]>, Box<TypeInfo>>,
236 #[cfg_attr(feature = "serde_derive", serde(skip))]
237 tag_infos: FnvMap<InternedStr, Box<TypeInfo>>,
238 generation: Generation,
258}
259
260impl Drop for Gc {
261 fn drop(&mut self) {
262 if let Some(values) = self.values.take() {
263 mem::forget(values);
264 if std::thread::panicking() {
265 eprintln!("Gc values were not dropped explicitly. Leaking the allocatons!");
266 } else {
267 panic!("Gc values were not dropped explicitly. Leaking the allocatons!");
268 }
269 }
270 }
271}
272
273pub unsafe trait FromPtr<D> {
277 unsafe fn make_ptr(data: D, ptr: *mut ()) -> *mut Self;
278}
279
280unsafe impl<D, T> FromPtr<D> for T {
281 unsafe fn make_ptr(_: D, ptr: *mut ()) -> *mut Self {
282 ptr as *mut Self
283 }
284}
285
286unsafe impl<'s, 't, T> FromPtr<&'s &'t [T]> for [T] {
287 unsafe fn make_ptr(v: &'s &'t [T], ptr: *mut ()) -> *mut [T] {
288 ::std::slice::from_raw_parts_mut(ptr as *mut T, v.len())
289 }
290}
291
292pub unsafe trait DataDef {
294 type Value: ?Sized + for<'a> FromPtr<&'a Self> + Trace;
296 fn size(&self) -> usize;
298 fn initialize<'w>(self, ptr: WriteOnly<'w, Self::Value>) -> &'w mut Self::Value;
301
302 fn fields(&self) -> Option<&[InternedStr]> {
303 None
304 }
305
306 fn tag(&self) -> Option<&InternedStr> {
307 None
308 }
309}
310
311#[derive(Trace)]
314#[gluon(gluon_vm)]
315pub struct Move<T>(pub T);
316
317unsafe impl<T> DataDef for Move<T>
318where
319 T: Trace,
320{
321 type Value = T;
322 fn size(&self) -> usize {
323 mem::size_of::<T>()
324 }
325 fn initialize(self, result: WriteOnly<T>) -> &mut T {
326 result.write(self.0)
327 }
328}
329
330#[derive(Debug)]
331struct TypeInfo {
332 drop: unsafe fn(*mut ()),
333 generation: Generation,
334 tag: Option<InternedStr>,
335 fields: FnvMap<InternedStr, VmIndex>,
336 fields_key: Arc<[InternedStr]>,
337}
338
339#[derive(Debug)]
340struct GcHeader {
341 next: Option<AllocPtr>,
342 marked: Cell<bool>,
343 value_size: usize,
344 type_info: *const TypeInfo,
345}
346
347struct AllocPtr {
348 ptr: *mut GcHeader,
349}
350
351unsafe impl Send for AllocPtr {}
352
353impl AllocPtr {
354 fn new<T>(type_info: *const TypeInfo, value_size: usize) -> AllocPtr {
355 fn new(type_info: *const TypeInfo, value_size: usize) -> AllocPtr {
356 unsafe {
357 let alloc_size = GcHeader::value_offset() + value_size;
358 let ptr = allocate(alloc_size) as *mut GcHeader;
359 ptr::write(
360 ptr,
361 GcHeader {
362 next: None,
363 type_info: type_info,
364 value_size: value_size,
365 marked: Cell::new(false),
366 },
367 );
368 AllocPtr { ptr }
369 }
370 }
371 debug_assert!(mem::align_of::<T>() <= mem::align_of::<f64>());
372 new(type_info, value_size)
373 }
374
375 fn size(&self) -> usize {
376 GcHeader::value_offset() + self.value_size
377 }
378}
379
380impl fmt::Debug for AllocPtr {
381 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
382 write!(f, "AllocPtr {{ ptr: {:?} }}", &**self)
383 }
384}
385
386impl Drop for AllocPtr {
387 fn drop(&mut self) {
388 unsafe {
389 let mut current = self.next.take();
392 while let Some(mut next) = current {
393 current = next.next.take();
394 }
395 let size = self.size();
396 ((*self.type_info).drop)(self.value());
397 ptr::read(&*self.ptr);
398 deallocate(self.ptr as *mut u8, size);
399 }
400 }
401}
402
403impl Deref for AllocPtr {
404 type Target = GcHeader;
405 fn deref(&self) -> &GcHeader {
406 unsafe { &*self.ptr }
407 }
408}
409
410impl DerefMut for AllocPtr {
411 fn deref_mut(&mut self) -> &mut GcHeader {
412 unsafe { &mut *self.ptr }
413 }
414}
415
416impl GcHeader {
417 fn value(&mut self) -> *mut () {
418 unsafe {
419 let ptr: *mut GcHeader = self;
420 (ptr as *mut u8).offset(GcHeader::value_offset() as isize) as *mut ()
421 }
422 }
423
424 fn value_offset() -> usize {
425 let hs = mem::size_of::<GcHeader>();
426 let max_align = mem::align_of::<f64>();
427 hs + ((max_align - (hs % max_align)) % max_align)
428 }
429
430 fn generation(&self) -> Generation {
431 unsafe { (*self.type_info).generation }
432 }
433}
434
435pub struct OwnedPtr<T: ?Sized>(NonNull<T>);
436
437impl<T: ?Sized> Deref for OwnedPtr<T> {
438 type Target = T;
439 fn deref(&self) -> &T {
440 unsafe { self.0.as_ref() }
441 }
442}
443
444impl<T: ?Sized> DerefMut for OwnedPtr<T> {
445 fn deref_mut(&mut self) -> &mut T {
446 unsafe { self.0.as_mut() }
447 }
448}
449
450impl<T: ?Sized> From<OwnedPtr<T>> for GcPtr<T> {
451 fn from(ptr: OwnedPtr<T>) -> GcPtr<T> {
453 GcPtr(ptr.0)
454 }
455}
456
457pub unsafe trait CopyUnrooted: CloneUnrooted<Value = Self> + Sized {
459 unsafe fn copy_unrooted(&self) -> Self {
460 ptr::read(self)
461 }
462}
463
464pub trait CloneUnrooted {
465 type Value;
466 unsafe fn clone_unrooted(&self) -> Self::Value;
469}
470
471impl<T: ?Sized + CloneUnrooted> CloneUnrooted for &'_ T {
472 type Value = T::Value;
473 #[inline]
474 unsafe fn clone_unrooted(&self) -> Self::Value {
475 (**self).clone_unrooted()
476 }
477}
478
479#[derive(Debug, Eq, PartialEq)]
480pub struct Borrow<'a, T>(T, PhantomData<&'a T>);
481
482pub type GcRef<'a, T> = Borrow<'a, GcPtr<T>>;
483pub type OwnedGcRef<'a, T> = Borrow<'a, OwnedPtr<T>>;
484
485impl<T> Deref for Borrow<'_, T> {
486 type Target = T;
487 fn deref(&self) -> &T {
488 &self.0
489 }
490}
491
492impl<T> DerefMut for Borrow<'_, T> {
493 fn deref_mut(&mut self) -> &mut T {
494 &mut self.0
495 }
496}
497
498impl<T> CloneUnrooted for Borrow<'_, T>
499where
500 T: CloneUnrooted,
501{
502 type Value = T::Value;
503 #[inline]
504 unsafe fn clone_unrooted(&self) -> Self::Value {
505 self.0.clone_unrooted()
506 }
507}
508
509deref_trace! { ['a, T: Trace] Borrow<'a, T> }
510
511unsafe impl<'a, T> DataDef for Borrow<'a, T>
512where
513 T: DataDef,
514 T::Value: Sized,
515{
516 type Value = T::Value;
517 fn size(&self) -> usize {
518 (**self).size()
519 }
520 fn initialize(self, result: WriteOnly<Self::Value>) -> &mut Self::Value {
521 self.0.initialize(result)
522 }
523}
524
525impl<'gc, T> Borrow<'gc, T> {
526 #[inline]
527 pub fn new(value: &'gc T) -> Borrow<'gc, T::Value>
528 where
529 T: CloneUnrooted,
530 {
531 unsafe { Borrow::with_root(value.clone_unrooted(), value) }
534 }
535
536 #[inline]
537 pub fn from_static(value: T) -> Self
538 where
539 T: 'static, {
541 Borrow(value, PhantomData)
542 }
543
544 #[inline]
545 pub(crate) unsafe fn with_root<U: ?Sized>(value: T, _root: &'gc U) -> Self {
546 Borrow(value, PhantomData)
547 }
548
549 pub fn map<U>(&self, f: impl FnOnce(&T) -> &U) -> Borrow<'gc, U::Value>
550 where
551 U: CloneUnrooted,
552 {
553 let v = f(&self.0);
554 unsafe { Borrow(v.clone_unrooted(), PhantomData) }
556 }
557
558 pub unsafe fn map_unrooted<U>(self, f: impl FnOnce(T) -> U) -> Borrow<'gc, U> {
559 Borrow(f(self.0), PhantomData)
560 }
561
562 pub unsafe fn unrooted(self) -> T {
563 self.0
564 }
565
566 pub fn as_lifetime(&self) -> &'gc () {
567 &()
568 }
569}
570
571impl<'gc, T: ?Sized> From<OwnedGcRef<'gc, T>> for GcRef<'gc, T> {
572 fn from(ptr: OwnedGcRef<'gc, T>) -> Self {
574 Borrow(ptr.0.into(), PhantomData)
575 }
576}
577
578impl<'a, T: ?Sized> Clone for GcRef<'a, T> {
579 #[inline]
580 fn clone(&self) -> Self {
581 unsafe { Borrow(self.0.clone_unrooted(), self.1) }
584 }
585}
586
587impl<'a, T: ?Sized> GcRef<'a, T> {
588 pub fn as_ref(&self) -> &'a T {
589 unsafe { forget_lifetime(&*self.0) }
592 }
593}
594
595pub struct GcPtr<T: ?Sized>(NonNull<T>);
600
601unsafe impl<T: ?Sized + Send + Sync> Send for GcPtr<T> {}
603unsafe impl<T: ?Sized + Send + Sync> Sync for GcPtr<T> {}
604
605impl<T: ?Sized> Deref for GcPtr<T> {
606 type Target = T;
607 fn deref(&self) -> &T {
608 unsafe { self.0.as_ref() }
609 }
610}
611
612impl<T: ?Sized> ::std::borrow::Borrow<T> for GcPtr<T> {
613 fn borrow(&self) -> &T {
614 &**self
615 }
616}
617
618impl<T: ?Sized + Eq> Eq for GcPtr<T> {}
619impl<T: ?Sized + PartialEq> PartialEq for GcPtr<T> {
620 fn eq(&self, other: &GcPtr<T>) -> bool {
621 **self == **other
622 }
623}
624
625impl<T: ?Sized + Ord> Ord for GcPtr<T> {
626 fn cmp(&self, other: &GcPtr<T>) -> Ordering {
627 (**self).cmp(&**other)
628 }
629}
630impl<T: ?Sized + PartialOrd> PartialOrd for GcPtr<T> {
631 fn partial_cmp(&self, other: &GcPtr<T>) -> Option<Ordering> {
632 (**self).partial_cmp(&**other)
633 }
634}
635
636impl<T: ?Sized + Hash> Hash for GcPtr<T> {
637 fn hash<H>(&self, state: &mut H)
638 where
639 H: Hasher,
640 {
641 (**self).hash(state)
642 }
643}
644impl<T: ?Sized + fmt::Debug> fmt::Debug for GcPtr<T> {
645 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
646 (**self).fmt(f)
647 }
648}
649impl<T: ?Sized + fmt::Display> fmt::Display for GcPtr<T> {
650 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
651 (**self).fmt(f)
652 }
653}
654
655unsafe impl<T: ?Sized> CopyUnrooted for GcPtr<T> {}
656impl<T: ?Sized> CloneUnrooted for GcPtr<T> {
657 type Value = Self;
658 #[inline]
659 unsafe fn clone_unrooted(&self) -> Self {
660 GcPtr(self.0)
661 }
662}
663
664impl<T: ?Sized> GcPtr<T> {
665 pub unsafe fn as_mut(&mut self) -> &mut T {
668 self.0.as_mut()
669 }
670
671 pub unsafe fn from_raw(ptr: *const T) -> GcPtr<T> {
673 GcPtr(NonNull::new_unchecked(ptr as *mut _))
674 }
675
676 pub fn generation(&self) -> Generation {
677 self.header().generation()
678 }
679
680 pub fn poly_tag(&self) -> Option<&InternedStr> {
681 self.type_info().tag.as_ref()
682 }
683
684 pub fn field_map(&self) -> &FnvMap<InternedStr, VmIndex> {
685 &self.type_info().fields
686 }
687
688 pub fn field_names(&self) -> &Arc<[InternedStr]> {
689 &self.type_info().fields_key
690 }
691
692 fn type_info(&self) -> &TypeInfo {
693 unsafe { &*self.header().type_info }
694 }
695
696 pub fn ptr_eq(&self, other: &GcPtr<T>) -> bool {
697 ptr::eq::<T>(&**self, &**other)
698 }
699
700 #[doc(hidden)]
701 pub(crate) unsafe fn clone(&self) -> Self {
702 GcPtr(self.0)
703 }
704
705 pub unsafe fn unrooted(&self) -> Self {
706 GcPtr(self.0)
707 }
708
709 pub fn as_lifetime(&self) -> &() {
710 &()
711 }
712
713 fn header(&self) -> &GcHeader {
714 unsafe {
715 let p = self.0.as_ptr() as *mut u8;
716 let header = p.offset(-(GcHeader::value_offset() as isize));
717 &*(header as *const GcHeader)
718 }
719 }
720
721 pub unsafe fn cast<U>(ptr: Self) -> GcPtr<U> {
722 GcPtr(ptr.0.cast())
723 }
724}
725
726impl<'a, T: Trace + Send + Sync + 'a> GcPtr<T> {
727 pub fn as_trace(self) -> GcPtr<dyn Trace + Send + Sync + 'a> {
729 unsafe {
730 let ptr: &(dyn Trace + Send + Sync) = self.0.as_ref();
731 GcPtr(NonNull::new_unchecked(ptr as *const _ as *mut _))
732 }
733 }
734}
735impl GcPtr<str> {
736 pub fn as_trace_string(self) -> GcPtr<dyn Trace + Send + Sync> {
738 unsafe {
741 GcPtr(NonNull::new_unchecked(
742 self.as_ptr() as *const (dyn Trace + Send + Sync) as *mut _,
743 ))
744 }
745 }
746}
747
748pub trait CollectScope {
749 fn scope<F>(&self, gc: &mut Gc, f: F)
750 where
751 F: FnOnce(&mut Gc);
752}
753
754pub unsafe trait Trace {
758 unsafe fn root(&mut self) {}
759 unsafe fn unroot(&mut self) {}
760
761 fn trace(&self, gc: &mut Gc) {
762 let _ = gc;
763 }
764}
765
766#[macro_export]
767macro_rules! construct_enum_gc {
768 (impl $typ: ident $(:: $variant: ident)? [$($acc: tt)*] [$($ptr: ident)*] @ $expr: expr, $($rest: tt)*) => { {
769 let ref ptr = $expr;
770 $crate::construct_enum_gc!(impl $typ $(:: $variant)?
771 [$($acc)* unsafe { $crate::gc::CloneUnrooted::clone_unrooted(ptr) },]
772 [$($ptr)* ptr]
773 $($rest)*
774 )
775 } };
776
777 (impl $typ: ident $(:: $variant: ident)? [$($acc: tt)*] [$($ptr: ident)*] $expr: expr, $($rest: tt)*) => {
778 $crate::construct_enum_gc!(impl $typ $(:: $variant)?
779 [$($acc)* $expr,]
780 [$($ptr)*]
781 $($rest)*
782 )
783 };
784
785 (impl $typ: ident $(:: $variant: ident)? [$($acc: tt)*] [$($ptr: ident)*] @ $expr: expr) => { {
786 let ref ptr = $expr;
787 $crate::construct_enum_gc!(impl $typ $(:: $variant)?
788 [$($acc)* unsafe { $crate::gc::CloneUnrooted::clone_unrooted(ptr) },]
789 [$($ptr)* ptr]
790 )
791 } };
792
793 (impl $typ: ident $(:: $variant: ident)? [$($acc: tt)*] [$($ptr: ident)*] $expr: expr) => {
794 $crate::construct_enum_gc!(impl $typ $(:: $variant)?
795 [$($acc)* $expr,]
796 [$($ptr)*]
797 )
798 };
799
800 (impl $typ: ident $(:: $variant: ident)? [$($acc: tt)*] [$($ptr: ident)*] ) => { {
801 let root = [$( $ptr.as_lifetime() )*].first().map_or(&(), |v| *v);
802 #[allow(unused_unsafe)]
803 let v = $typ $(:: $variant)? ( $($acc)* );
804 match &v {
807 $typ $(:: $variant)? (..) if true => (),
808 _ => unreachable!(),
809 }
810 #[allow(unused_unsafe)]
811 unsafe { $crate::gc::Borrow::with_root(v, root) }
812 } };
813}
814
815#[macro_export]
816macro_rules! construct_gc {
817 (impl $typ: ident [$($acc: tt)*] [$($ptr: ident)*] @ $field: ident : $expr: expr, $($rest: tt)*) => { {
818 let $field = $expr;
819 $crate::construct_gc!(impl $typ
820 [$($acc)* $field: unsafe { $crate::gc::CloneUnrooted::clone_unrooted(&$field) },]
821 [$($ptr)* $field]
822 $($rest)*
823 )
824 } };
825
826 (impl $typ: ident [$($acc: tt)*] [$($ptr: ident)*] @ $field: ident, $($rest: tt)*) => {
827 $crate::construct_gc!(impl $typ
828 [$($acc)* $field: unsafe { $crate::gc::CloneUnrooted::clone_unrooted(&$field) },]
829 [$($ptr)* $field]
830 $($rest)*
831 )
832 };
833
834 (impl $typ: ident [$($acc: tt)*] [$($ptr: ident)*] $field: ident $(: $expr: expr)?, $($rest: tt)*) => {
835 $crate::construct_gc!(impl $typ
836 [$($acc)* $field $(: $expr)?,]
837 [$($ptr)*]
838 $($rest)*
839 )
840 };
841
842 (impl $typ: ident [$($acc: tt)*] [$($ptr: ident)*] ) => { {
843 let root = [$( $ptr.as_lifetime() )*].first().map_or(&(), |v| *v);
844 #[allow(unused_unsafe)]
845 let v = $typ { $($acc)* };
846 #[allow(unused_unsafe)]
847 unsafe { $crate::gc::Borrow::with_root(v, root) }
848 } };
849
850 ($typ: ident { $( $tt: tt )* } ) => {
851 $crate::construct_gc!{impl $typ [] [] $( $tt )* }
852 };
853
854 ($typ: ident $(:: $variant: ident)? ( $( $tt: tt )* ) ) => {
855 $crate::construct_enum_gc!{impl $typ $(:: $variant)? [] [] $( $tt )* }
856 };
857}
858
859deref_trace! { ['a, T: ?Sized + Trace] &'a T }
860deref_trace_mut! { ['a, T: ?Sized + Trace] &'a mut T }
861deref_trace_mut! { ['a, T: ?Sized + Trace] Box<T> }
862deref_trace! { ['a, T: ?Sized + Trace] Arc<T> }
863deref_trace! { ['a, T: ?Sized + Trace] Rc<T> }
864deref_trace_mut! { ['a, T: Trace] Vec<T> }
865
866macro_rules! tuple_trace {
867 () => {};
868 ($first: ident $($id: ident)*) => {
869 tuple_trace!($($id)*);
870 unsafe impl <$first $(,$id)*> Trace for ($first, $($id,)*)
871 where $first: Trace
872 $(, $id: Trace)* {
873 impl_trace! { self, gc,{
874 #[allow(non_snake_case)]
875 let ($first, $($id,)*) = self;
876 mark($first, gc);
877 $(
878 mark($id, gc);
879 )*
880 } }
881 }
882 }
883}
884
885tuple_trace!(A B C D E F G H I J);
886
887macro_rules! empty_trace {
888 ($($id: ty)*) => {
889 $(unsafe impl Trace for $id {
890 impl_trace! { self, _gc, {} }
891 })*
892 }
893}
894
895empty_trace! { () u8 u16 u32 u64 usize i8 i16 i32 i64 isize f32 f64 str String bool }
896
897unsafe impl<T> Trace for Option<T>
898where
899 T: Trace,
900{
901 impl_trace! { self, gc,
902 if let Some(x) = self {
903 mark(x, gc);
904 }
905 }
906}
907
908unsafe impl<T, E> Trace for StdResult<T, E>
909where
910 T: Trace,
911 E: Trace,
912{
913 impl_trace! { self, gc,
914 match self {
915 Ok(x) => mark(x, gc),
916 Err(x) => mark(x, gc),
917 }
918 }
919}
920
921unsafe impl<T: ?Sized> Trace for PhantomData<T> {
922 impl_trace! { self, _gc, {} }
923}
924
925unsafe impl<T: ?Sized> Trace for *const T {
926 impl_trace! { self, _gc, { } }
927}
928
929unsafe impl<T: ?Sized> Trace for *mut T {
930 impl_trace! { self, _gc, { } }
931}
932
933unsafe impl<T> Trace for Cell<T>
934where
935 T: Trace + Copy,
936{
937 impl_trace! { self, gc,
938 mark(&mut self.get(), gc)
939 }
940}
941
942unsafe impl<T> Trace for sync::Mutex<T>
944where
945 T: Trace,
946{
947 fn trace(&self, gc: &mut Gc) {
948 self.lock().unwrap_or_else(|err| err.into_inner()).trace(gc)
949 }
950}
951
952unsafe impl<T> Trace for sync::RwLock<T>
954where
955 T: Trace,
956{
957 fn trace(&self, gc: &mut Gc) {
958 self.read().unwrap_or_else(|err| err.into_inner()).trace(gc)
959 }
960}
961
962unsafe impl<T> Trace for sync::RwLockReadGuard<'_, T>
963where
964 T: Trace,
965{
966 fn trace(&self, gc: &mut Gc) {
967 (&**self).trace(gc)
968 }
969}
970
971unsafe impl<T> Trace for parking_lot::RwLock<T>
972where
973 T: Trace,
974{
975 fn trace(&self, gc: &mut Gc) {
976 self.read().trace(gc)
977 }
978}
979
980unsafe impl<T> Trace for parking_lot::RwLockReadGuard<'_, T>
981where
982 T: Trace,
983{
984 fn trace(&self, gc: &mut Gc) {
985 (&**self).trace(gc)
986 }
987}
988
989unsafe impl<U> Trace for [U]
990where
991 U: Trace,
992{
993 impl_trace! { self, gc,
994 for x in self {
995 mark(x, gc);
996 }
997 }
998}
999
1000unsafe impl<T> Trace for VecDeque<T>
1001where
1002 T: Trace,
1003{
1004 impl_trace! { self, gc,
1005 mark(&mut self.as_slices(), gc)
1006 }
1007}
1008
1009unsafe impl<K, V, H> Trace for HashMap<K, V, H>
1010where
1011 V: Trace,
1012{
1013 impl_trace! { self, gc,
1014 for (_, x) in self {
1015 mark(x, gc);
1016 }
1017 }
1018}
1019
1020unsafe impl<V, H> Trace for HashSet<V, H>
1021where
1022 V: Trace,
1023{
1024 fn trace(&self, gc: &mut Gc) {
1025 for x in self.iter() {
1026 x.trace(gc);
1027 }
1028 }
1029}
1030
1031unsafe impl<T: ?Sized> Trace for GcPtr<T>
1033where
1034 T: Trace,
1035{
1036 unsafe fn root(&mut self) {
1037 }
1039 unsafe fn unroot(&mut self) {
1040 }
1042 fn trace(&self, gc: &mut Gc) {
1043 if !gc.mark(self) {
1044 (**self).trace(gc);
1046 }
1047 }
1048}
1049
1050impl Gc {
1051 pub fn new(generation: Generation, memory_limit: usize) -> Gc {
1053 Gc {
1054 values: None,
1055 allocated_memory: 0,
1056 collect_limit: 100,
1057 memory_limit: memory_limit,
1058 type_infos: FnvMap::default(),
1059 record_infos: FnvMap::default(),
1060 tag_infos: FnvMap::default(),
1061 generation: generation,
1062 }
1063 }
1064
1065 pub fn allocated_memory(&self) -> usize {
1066 self.allocated_memory
1067 }
1068
1069 pub fn set_memory_limit(&mut self, memory_limit: usize) {
1070 self.memory_limit = memory_limit;
1071 }
1072
1073 pub fn generation(&self) -> Generation {
1074 self.generation
1075 }
1076
1077 pub fn new_child_gc(&self) -> Gc {
1078 Gc::new(self.generation.next(), self.memory_limit)
1079 }
1080
1081 pub unsafe fn alloc_and_collect<R, D>(
1086 &mut self,
1087 roots: R,
1088 def: D,
1089 ) -> Result<OwnedGcRef<D::Value>>
1090 where
1091 R: Trace + CollectScope,
1092 D: DataDef + Trace,
1093 D::Value: Sized + Any,
1094 {
1095 #[derive(Trace)]
1096 #[gluon(gluon_vm)]
1097 struct Scope1<A, B>(A, B);
1098
1099 impl<A, B> CollectScope for Scope1<A, B>
1100 where
1101 A: CollectScope,
1102 {
1103 fn scope<F>(&self, gc: &mut Gc, f: F)
1104 where
1105 F: FnOnce(&mut Gc),
1106 {
1107 self.0.scope(gc, f)
1108 }
1109 }
1110
1111 self.check_collect(Scope1(roots, &def));
1112 self.alloc_owned(def)
1113 }
1114
1115 pub fn alloc<D>(&mut self, def: D) -> Result<GcRef<D::Value>>
1117 where
1118 D: DataDef,
1119 D::Value: Sized + Any,
1120 {
1121 self.alloc_owned(def).map(GcRef::from)
1122 }
1123
1124 pub fn alloc_owned<D>(&mut self, def: D) -> Result<OwnedGcRef<D::Value>>
1125 where
1126 D: DataDef,
1127 D::Value: Sized + Any,
1128 {
1129 let size = def.size();
1130 let needed = self.allocated_memory.saturating_add(size);
1131 if needed >= self.memory_limit {
1132 return Err(Error::OutOfMemory {
1133 limit: self.memory_limit,
1134 needed: needed,
1135 });
1136 }
1137 Ok(self.alloc_ignore_limit_(size, def))
1138 }
1139
1140 pub fn alloc_ignore_limit<D>(&mut self, def: D) -> GcRef<D::Value>
1141 where
1142 D: DataDef,
1143 D::Value: Sized + Any,
1144 {
1145 GcRef::from(self.alloc_ignore_limit_(def.size(), def))
1146 }
1147
1148 fn get_type_info(
1149 &mut self,
1150 tag: Option<&InternedStr>,
1151 fields: Option<&[InternedStr]>,
1152 type_id: TypeId,
1153 drop: unsafe fn(*mut ()),
1154 ) -> *const TypeInfo {
1155 match fields {
1156 Some(fields) => match self
1157 .record_infos
1158 .get(fields)
1159 .map(|info| &**info as *const _)
1160 {
1161 Some(info) => info,
1162 None => {
1163 let owned_fields: Arc<[InternedStr]> = Arc::from(
1164 fields
1165 .iter()
1166 .map(|v| unsafe { v.clone_unrooted() })
1167 .collect::<Vec<_>>(),
1168 );
1169 &**self
1170 .record_infos
1171 .entry(owned_fields.clone())
1172 .or_insert(Box::new(TypeInfo {
1173 drop,
1174 generation: self.generation,
1175 tag: unsafe { tag.map(|tag| tag.clone_unrooted()) },
1176 fields: unsafe {
1177 fields
1178 .iter()
1179 .enumerate()
1180 .map(|(i, ref s)| (s.clone_unrooted(), i as VmIndex))
1181 .collect()
1182 },
1183 fields_key: owned_fields,
1184 }))
1185 }
1186 },
1187 None => match tag {
1188 Some(tag) => match self.tag_infos.entry(unsafe { tag.clone_unrooted() }) {
1189 Entry::Occupied(entry) => &**entry.get(),
1190 Entry::Vacant(entry) => &**entry.insert(Box::new(TypeInfo {
1191 drop,
1192 generation: self.generation,
1193 tag: Some(unsafe { tag.clone_unrooted() }),
1194 fields: FnvMap::default(),
1195 fields_key: Arc::from(Vec::new()),
1196 })),
1197 },
1198 None => match self.type_infos.entry(type_id) {
1199 Entry::Occupied(entry) => &**entry.get(),
1200 Entry::Vacant(entry) => &**entry.insert(Box::new(TypeInfo {
1201 drop,
1202 generation: self.generation,
1203 tag: None,
1204 fields: FnvMap::default(),
1205 fields_key: Arc::from(Vec::new()),
1206 })),
1207 },
1208 },
1209 }
1210 }
1211
1212 fn alloc_ignore_limit_<D>(&mut self, size: usize, def: D) -> OwnedGcRef<D::Value>
1213 where
1214 D: DataDef,
1215 D::Value: Sized + Any,
1216 {
1217 unsafe fn drop<T>(t: *mut ()) {
1218 ptr::drop_in_place(t as *mut T);
1219 }
1220
1221 let type_info = self.get_type_info(
1222 def.tag(),
1223 def.fields(),
1224 TypeId::of::<D::Value>(),
1225 drop::<D::Value>,
1226 );
1227
1228 let mut ptr = AllocPtr::new::<D::Value>(type_info, size);
1229 ptr.next = self.values.take();
1230 self.allocated_memory += ptr.size();
1231 unsafe {
1232 let p: *mut D::Value = D::Value::make_ptr(&def, ptr.value());
1233 let ret: *const D::Value = &*def.initialize(WriteOnly::new(p));
1234 assert!(ret == p);
1237 self.values = Some(ptr);
1238 let mut ptr = OwnedPtr(NonNull::new_unchecked(p));
1239 D::Value::unroot(&mut ptr);
1240 OwnedGcRef::with_root(ptr, self)
1241 }
1242 }
1243
1244 pub unsafe fn check_collect<R>(&mut self, roots: R) -> bool
1245 where
1246 R: Trace + CollectScope,
1247 {
1248 if self.allocated_memory >= self.collect_limit {
1249 self.collect(roots);
1250 true
1251 } else {
1252 false
1253 }
1254 }
1255
1256 pub unsafe fn collect<R>(&mut self, roots: R)
1259 where
1260 R: Trace + CollectScope,
1261 {
1262 info!("Start collect {:?}", self.generation);
1263 roots.scope(self, |self_| {
1264 roots.trace(self_);
1265 self_.sweep();
1266 self_.collect_limit = 2 * self_.allocated_memory;
1267 })
1268 }
1269
1270 pub fn mark<T: ?Sized>(&mut self, value: &GcPtr<T>) -> bool {
1273 let header = value.header();
1274 if header.generation().is_parent_of(self.generation()) || header.marked.get() {
1276 true
1277 } else {
1278 header.marked.set(true);
1279 false
1280 }
1281 }
1282
1283 pub unsafe fn sweep(&mut self) {
1287 fn moving<T>(t: T) -> T {
1288 t
1289 }
1290
1291 let mut count = 0;
1292 let mut free_count = 0;
1293
1294 let mut first = self.values.take();
1295 {
1296 let mut maybe_header = &mut first;
1298 loop {
1299 let mut free = false;
1300 let mut replaced_next = None;
1301 match *maybe_header {
1302 Some(ref mut header) => {
1303 if !header.marked.get() {
1306 replaced_next = header.next.take();
1307 free = true;
1308 } else {
1309 header.marked.set(false);
1310 }
1311 }
1312 None => break,
1314 }
1315 count += 1;
1316 if free {
1317 free_count += 1;
1318 self.free(maybe_header.take());
1320 *maybe_header = replaced_next;
1321 } else {
1322 maybe_header = &mut moving(maybe_header).as_mut().unwrap().next;
1324 }
1325 }
1326 }
1327 info!("GC: Freed {} / Traversed {}", free_count, count);
1328 self.values = first;
1329 }
1330
1331 pub unsafe fn clear(&mut self) {
1335 self.values = None;
1336 }
1337
1338 fn free(&mut self, header: Option<AllocPtr>) {
1339 if let Some(ref ptr) = header {
1340 self.allocated_memory -= ptr.size();
1341 }
1342 debug!("FREE: {:?}", header);
1343 drop(header);
1344 }
1345}
1346
1347#[cfg(test)]
1348mod tests {
1349 use super::*;
1350 use std::cell::Cell;
1351 use std::fmt;
1352 use std::mem;
1353 use std::rc::Rc;
1354 use std::usize;
1355
1356 use self::Value::*;
1357
1358 impl CollectScope for () {
1359 fn scope<F>(&self, gc: &mut Gc, f: F)
1360 where
1361 F: FnOnce(&mut Gc),
1362 {
1363 f(gc)
1364 }
1365 }
1366
1367 impl<'a, T> CollectScope for &'a mut [T] {
1368 fn scope<F>(&self, gc: &mut Gc, f: F)
1369 where
1370 F: FnOnce(&mut Gc),
1371 {
1372 f(gc)
1373 }
1374 }
1375
1376 fn object_count(gc: &Gc) -> usize {
1377 let mut header: &GcHeader = match gc.values {
1378 Some(ref x) => &**x,
1379 None => return 0,
1380 };
1381 let mut count = 1;
1382 loop {
1383 match header.next {
1384 Some(ref ptr) => {
1385 count += 1;
1386 header = &**ptr;
1387 }
1388 None => break,
1389 }
1390 }
1391 count
1392 }
1393
1394 #[derive(Trace)]
1395 #[gluon(gluon_vm)]
1396 struct Data_ {
1397 fields: GcPtr<Vec<Value>>,
1398 }
1399
1400 impl PartialEq for Data_ {
1401 fn eq(&self, other: &Data_) -> bool {
1402 self.fields.0 == other.fields.0
1403 }
1404 }
1405 impl fmt::Debug for Data_ {
1406 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
1407 self.fields.0.fmt(f)
1408 }
1409 }
1410
1411 struct Def<'a> {
1412 elems: &'a [Value],
1413 }
1414 unsafe impl<'a> DataDef for Def<'a> {
1415 type Value = Vec<Value>;
1416 fn size(&self) -> usize {
1417 mem::size_of::<Self::Value>()
1418 }
1419 fn initialize(self, result: WriteOnly<Vec<Value>>) -> &mut Vec<Value> {
1420 unsafe { result.write(self.elems.iter().map(|v| v.clone_unrooted()).collect()) }
1421 }
1422 }
1423
1424 #[derive(PartialEq, Debug, Trace)]
1425 #[gluon(gluon_vm)]
1426 enum Value {
1427 Int(i32),
1428 Data(Data_),
1429 }
1430
1431 unsafe impl CopyUnrooted for Value {}
1432
1433 impl CloneUnrooted for Value {
1434 type Value = Self;
1435 #[inline]
1436 unsafe fn clone_unrooted(&self) -> Self {
1437 self.copy_unrooted()
1438 }
1439 }
1440
1441 fn new_data(p: GcRef<Vec<Value>>) -> Value {
1442 unsafe {
1443 Data(Data_ {
1444 fields: p.unrooted(),
1445 })
1446 }
1447 }
1448
1449 #[test]
1450 fn gc_header() {
1451 let mut gc: Gc = Gc::new(Generation::default(), usize::MAX);
1452 let ptr = unsafe { gc.alloc(Def { elems: &[Int(1)] }).unwrap().unrooted() };
1453 let header: *const _ = ptr.header();
1454 let other: &mut GcHeader = gc.values.as_mut().unwrap();
1455 assert_eq!(&*ptr as *const _ as *const (), other.value());
1456 assert_eq!(header, other as *const _);
1457
1458 unsafe { gc.clear() }
1459 }
1460
1461 #[test]
1462 fn basic() {
1463 let mut gc: Gc = Gc::new(Generation::default(), usize::MAX);
1464 let mut stack: Vec<Value> = Vec::new();
1465 stack.push(new_data(gc.alloc(Def { elems: &[Int(1)] }).unwrap()));
1466 let d2 = new_data(
1467 gc.alloc(Def {
1468 elems: std::slice::from_ref(&stack[0]),
1469 })
1470 .unwrap(),
1471 );
1472 stack.push(d2);
1473 assert_eq!(object_count(&gc), 2);
1474 unsafe {
1475 gc.collect(&mut *stack);
1476 }
1477 assert_eq!(object_count(&gc), 2);
1478 match stack[0] {
1479 Data(ref data) => assert_eq!(data.fields[0], Int(1)),
1480 _ => ice!(),
1481 }
1482 match stack[1] {
1483 Data(ref data) => assert_eq!(data.fields[0], stack[0]),
1484 _ => ice!(),
1485 }
1486 stack.pop();
1487 stack.pop();
1488 unsafe {
1489 gc.collect(&mut *stack);
1490 }
1491 assert_eq!(object_count(&gc), 0);
1492
1493 unsafe { gc.clear() }
1494 }
1495
1496 #[derive(Trace)]
1497 #[gluon(gluon_vm)]
1498 pub struct Dropable {
1499 dropped: Rc<Cell<bool>>,
1500 }
1501
1502 impl Drop for Dropable {
1503 fn drop(&mut self) {
1504 self.dropped.set(true);
1505 }
1506 }
1507
1508 #[test]
1509 fn drop() {
1510 let dropped = Rc::new(Cell::new(false));
1511 let mut gc = Gc::new(Generation::default(), usize::MAX);
1512 {
1513 let ptr = gc
1514 .alloc(Move(Dropable {
1515 dropped: dropped.clone(),
1516 }))
1517 .unwrap();
1518 assert_eq!(false, ptr.dropped.get());
1519 }
1520 assert_eq!(false, dropped.get());
1521 unsafe {
1522 gc.collect(());
1523 }
1524 assert_eq!(true, dropped.get());
1525
1526 unsafe { gc.clear() }
1527 }
1528}