1
  2
  3
  4
  5
  6
  7
  8
  9
 10
 11
 12
 13
 14
 15
 16
 17
 18
 19
 20
 21
 22
 23
 24
 25
 26
 27
 28
 29
 30
 31
 32
 33
 34
 35
 36
 37
 38
 39
 40
 41
 42
 43
 44
 45
 46
 47
 48
 49
 50
 51
 52
 53
 54
 55
 56
 57
 58
 59
 60
 61
 62
 63
 64
 65
 66
 67
 68
 69
 70
 71
 72
 73
 74
 75
 76
 77
 78
 79
 80
 81
 82
 83
 84
 85
 86
 87
 88
 89
 90
 91
 92
 93
 94
 95
 96
 97
 98
 99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
use std::ptr::{null_mut, drop_in_place};
use std::alloc::{alloc_zeroed, dealloc, Layout};
use std::rc::Weak;
use std::cell::RefCell;
pub use libc::c_void;
use super::vm::Vm;

// node
struct GcNode {
    next: *mut GcNode,
    size: usize,
    unreachable: bool, // by default this is false
    // if the node is unreachable, it will be pruned (free'd)
    pub native_refs: usize,
    tracer: GenericFunction,
    // tracer gets called sweep phased (FIXME)
    finalizer: GenericFunction,
    // finalizer gets called with a pointer to
    // the data that's about to be freed
}

impl GcNode {

    pub fn alloc_size<T: Sized>() -> usize {
        // number of bytes needed to allocate node for <T>
        use std::mem::size_of;
        size_of::<GcNode>() + size_of::<T>()
    }

}

type GenericFunction = fn(*mut c_void);
// a generic function that takes in some pointer
// this might be a finalizer or a tracer function
// TODO maybe replace this with Any

// manager
const INITIAL_THRESHOLD: usize = 100;
const USED_SPACE_RATIO: f64 = 0.7;
pub struct GcManager {
    first_node: *mut GcNode,
    last_node: *mut GcNode,
    root: Weak<RefCell<Vm>>,
    bytes_allocated: usize,
    threshold: usize,
    enabled: bool
}

impl GcManager {

    pub fn new(root: Weak<RefCell<Vm>>) -> GcManager {
        GcManager {
            first_node: null_mut(),
            last_node: null_mut(),
            root: root,
            bytes_allocated: 0,
            threshold: INITIAL_THRESHOLD,
            enabled: false
        }
    }

    unsafe fn malloc_raw<T: Sized + GcTraceable>
        (&mut self, x: T, finalizer: GenericFunction) -> *mut T {
        // free up if over threshold
        if cfg!(test) {
            self.collect();
        } else if self.bytes_allocated > self.threshold {
            self.collect();
            // we didn't collect enough, grow the ratio
            if ((self.bytes_allocated as f64) / (self.threshold as f64)) > USED_SPACE_RATIO {
                self.threshold = (self.bytes_allocated as f64 / USED_SPACE_RATIO) as usize;
            }
        }
        // tfw no qt malloc function
        let layout = Layout::from_size_align(GcNode::alloc_size::<T>(), 2).unwrap();
        let bytes : *mut GcNode = alloc_zeroed(layout) as *mut GcNode;
        // append node
        if self.first_node.is_null() {
            self.first_node = bytes;
            self.last_node = bytes;
            (*bytes).next = null_mut();
        } else {
            (*self.last_node).next = bytes;
            (*bytes).next = null_mut();
            self.last_node = bytes;
        }
        (*bytes).native_refs = 1;
        (*bytes).tracer = T::trace;
        (*bytes).finalizer = finalizer;
        (*bytes).size = GcNode::alloc_size::<T>();
        self.bytes_allocated += (*bytes).size;
        // return the body aka (start byte + sizeof(GCNode))
        std::mem::replace(&mut *(bytes.add(1) as *mut T), x);
        bytes.add(1) as *mut T
    }

    pub fn malloc<T: Sized + GcTraceable>(&mut self, val: T) -> Gc<T> {
        Gc {
            ptr: unsafe {
                self.malloc_raw(val, |ptr| drop_in_place::<T>(ptr as *mut T))
            }
        }
    }

    // state
    pub fn enable(&mut self) { self.enabled = true; }
    pub fn disable(&mut self) { self.enabled = false; }

    // gc algorithm
    unsafe fn collect(&mut self) {
        if !self.enabled { return; }
        // mark phase:
        let mut node : *mut GcNode = self.first_node;
        // reset all nodes
        while !node.is_null() {
            let next : *mut GcNode = (*node).next;
            (*node).unreachable = true;
            node = next;
        }
        // mark make nodes with at least one native reference
        node = self.first_node;
        while !node.is_null() {
            let next : *mut GcNode = (*node).next;
            if (*node).native_refs > 0 {
                (*node).unreachable = false;
                ((*node).tracer)(node.add(1) as *mut c_void);
            }
            node = next;
        }
        // mark from root
        {
            let rootcell = self.root.upgrade().unwrap();
            rootcell.borrow().mark();
        }
        // sweep phase:
        let mut node : *mut GcNode = self.first_node;
        let mut prev : *mut GcNode = null_mut();
        while !node.is_null() {
            let next : *mut GcNode = (*node).next;
            let mut freed = false;
            if (*node).native_refs == 0 && (*node).unreachable {
                freed = true;
                let body = node.add(1);

                // remove from ll
                if prev.is_null() { self.first_node = (*node).next; }
                else { (*prev).next = (*node).next; }

                if (*node).next.is_null() { self.last_node = prev; }

                self.bytes_allocated -= (*node).size;

                // call finalizer
                let finalizer = (*node).finalizer;
                finalizer(body as *mut c_void);

                // free memory
                let layout = Layout::from_size_align((*node).size, 2).unwrap();
                dealloc(node as *mut u8, layout);
            }
            if !freed { prev = node; }
            node = next;
        }
    }

}

unsafe impl std::marker::Send for GcManager {}
unsafe impl std::marker::Sync for GcManager {}

impl std::ops::Drop for GcManager {

    fn drop(&mut self) {
        unsafe {
            let mut node : *mut GcNode = self.first_node;
            while !node.is_null() {
                let next : *mut GcNode = (*node).next;
                let body = node.add(1);
                // call finalizer
                let finalizer = (*node).finalizer;
                finalizer(body as *mut c_void);
                // free memory
                let layout = Layout::from_size_align((*node).size, 2).unwrap();
                dealloc(node as *mut u8, layout);
                node = next;
            }
        }
    }

}

// gc struct
#[repr(transparent)]
pub struct Gc<T: Sized + GcTraceable> {
    ptr: *mut T,
}

impl<T: Sized + GcTraceable> Gc<T> {
    pub fn new_nil() -> Gc<T> {
        Gc {
            ptr: null_mut()
        }
    }

    // raw
    pub fn from_raw(ptr: *mut T) -> Gc<T> {
        unsafe{ ref_inc(ptr as *mut libc::c_void); }
        Gc {
            ptr: ptr
        }
    }
    pub fn into_raw(self) -> *mut T {
        self.ptr
    }

    // ptrs
    pub fn to_raw(&self) -> *const T {
        self.ptr
    }
    pub fn to_mut_raw(&mut self) -> *mut T {
        self.ptr
    }
    pub fn ptr_eq(&self, right: &Gc<T>) -> bool {
        std::ptr::eq(self.ptr, right.ptr)
    }

    // refs with interior mutability
    pub fn as_mut(&self) -> &mut T {
        unsafe{ &mut *self.ptr }
    }
}

impl<T: Sized + GcTraceable> std::ops::Drop for Gc<T> {
    fn drop(&mut self) {
        unsafe {
            if !self.ptr.is_null() {
                ref_dec(self.ptr as *mut libc::c_void);
            }
        }
    }
}

impl<T: Sized + GcTraceable> std::convert::AsRef<T> for Gc<T> {
    fn as_ref(&self) -> &T {
        unsafe{ &*self.ptr }
    }
}

impl<T: Sized + GcTraceable> std::clone::Clone for Gc<T> {
    fn clone(&self) -> Self {
        Gc {
            ptr: unsafe {
                ref_inc(self.ptr as *mut libc::c_void);
                self.ptr
            }
        }
    }
}

pub trait GcTraceable {
    fn trace(ptr: *mut libc::c_void);
}

// native traceables
impl GcTraceable for String {
    fn trace(_: *mut libc::c_void) {}
}

// collect
pub unsafe fn ref_inc(ptr: *mut c_void) {
    if ptr.is_null() { return; }
    let node : *mut GcNode = (ptr as *mut GcNode).sub(1);
    (*node).native_refs += 1;
}

pub unsafe fn ref_dec(ptr: *mut c_void) {
    if ptr.is_null() { return; }
    let node : *mut GcNode = (ptr as *mut GcNode).sub(1);
    (*node).native_refs -= 1;
}

pub unsafe fn mark_reachable(ptr: *mut c_void) -> bool {
    // => start byte
    if ptr.is_null() { return false; }
    let node : *mut GcNode = (ptr as *mut GcNode).sub(1);
    if !(*node).unreachable { return false; }
    (*node).unreachable = false;
    true
}