1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
use std::{
    alloc::{alloc, dealloc, realloc, Layout},
    mem::size_of,
    ptr::null_mut,
};

use super::{Allocator, RawMemPtr};

/// The largest value QuickJS will allocate is a u64;
/// So all allocated memory must have the same alignment is this largest size.
const ALLOC_ALIGN: usize = std::mem::align_of::<u64>();

#[derive(Copy, Clone)]
#[repr(transparent)]
struct Header {
    size: usize,
}

const fn max(a: usize, b: usize) -> usize {
    if a < b {
        b
    } else {
        a
    }
}

/// Head needs to be at least alloc aligned so all that values after the header are aligned.
const HEADER_SIZE: usize = max(size_of::<Header>(), ALLOC_ALIGN);

#[inline]
fn round_size(size: usize) -> usize {
    // this will be optimized by the compiler
    // to something like (size + <off>) & <mask>
    (size + ALLOC_ALIGN - 1) / ALLOC_ALIGN * ALLOC_ALIGN
}

/// The allocator which uses Rust global allocator
pub struct RustAllocator;

unsafe impl Allocator for RustAllocator {
    fn alloc(&mut self, size: usize) -> RawMemPtr {
        let size = round_size(size);
        let alloc_size = size + HEADER_SIZE;
        let layout = if let Ok(layout) = Layout::from_size_align(alloc_size, ALLOC_ALIGN) {
            layout
        } else {
            return null_mut();
        };

        let ptr = unsafe { alloc(layout) };

        if ptr.is_null() {
            return null_mut();
        }
        {
            let header = unsafe { &mut *(ptr as *mut Header) };
            header.size = size;
        }

        unsafe { ptr.add(HEADER_SIZE) }
    }

    #[allow(clippy::not_unsafe_ptr_arg_deref)]
    unsafe fn dealloc(&mut self, ptr: RawMemPtr) {
        let ptr = unsafe { ptr.sub(HEADER_SIZE) };
        let alloc_size = {
            let header = unsafe { &*(ptr as *const Header) };
            header.size + HEADER_SIZE
        };
        let layout = unsafe { Layout::from_size_align_unchecked(alloc_size, ALLOC_ALIGN) };

        unsafe { dealloc(ptr, layout) };
    }

    #[allow(clippy::not_unsafe_ptr_arg_deref)]
    unsafe fn realloc(&mut self, ptr: RawMemPtr, new_size: usize) -> RawMemPtr {
        let new_size = round_size(new_size);
        let ptr = unsafe { ptr.sub(HEADER_SIZE) };
        let alloc_size = {
            let header = unsafe { &*(ptr as *const Header) };
            header.size + HEADER_SIZE
        };
        let layout = unsafe { Layout::from_size_align_unchecked(alloc_size, ALLOC_ALIGN) };

        let new_alloc_size = new_size + HEADER_SIZE;

        let ptr = unsafe { realloc(ptr, layout, new_alloc_size) };

        if ptr.is_null() {
            return null_mut();
        }
        {
            let header = unsafe { &mut *(ptr as *mut Header) };
            header.size = new_size;
        }

        unsafe { ptr.add(HEADER_SIZE) }
    }

    #[allow(clippy::not_unsafe_ptr_arg_deref)]
    unsafe fn usable_size(ptr: RawMemPtr) -> usize {
        let ptr = unsafe { ptr.sub(HEADER_SIZE) };
        let header = unsafe { &*(ptr as *const Header) };
        header.size
    }
}

#[cfg(all(test, feature = "rust-alloc", feature = "allocator"))]
mod test {
    use super::RustAllocator;
    use crate::{allocator::Allocator, Context, Runtime};
    use std::sync::atomic::{AtomicUsize, Ordering};

    static ALLOC_SIZE: AtomicUsize = AtomicUsize::new(0);

    struct TestAllocator;

    unsafe impl Allocator for TestAllocator {
        fn alloc(&mut self, size: usize) -> crate::allocator::RawMemPtr {
            unsafe {
                let res = RustAllocator.alloc(size);
                ALLOC_SIZE.fetch_add(RustAllocator::usable_size(res), Ordering::AcqRel);
                res
            }
        }

        unsafe fn dealloc(&mut self, ptr: crate::allocator::RawMemPtr) {
            ALLOC_SIZE.fetch_sub(RustAllocator::usable_size(ptr), Ordering::AcqRel);
            RustAllocator.dealloc(ptr);
        }

        unsafe fn realloc(
            &mut self,
            ptr: crate::allocator::RawMemPtr,
            new_size: usize,
        ) -> crate::allocator::RawMemPtr {
            if !ptr.is_null() {
                ALLOC_SIZE.fetch_sub(RustAllocator::usable_size(ptr), Ordering::AcqRel);
            }

            let res = RustAllocator.realloc(ptr, new_size);
            if !res.is_null() {
                ALLOC_SIZE.fetch_add(RustAllocator::usable_size(res), Ordering::AcqRel);
            }
            res
        }

        unsafe fn usable_size(ptr: crate::allocator::RawMemPtr) -> usize
        where
            Self: Sized,
        {
            RustAllocator::usable_size(ptr)
        }
    }

    #[test]
    fn test_gc_working_correctly() {
        let rt = Runtime::new_with_alloc(TestAllocator).unwrap();
        let context = Context::full(&rt).unwrap();

        let before = ALLOC_SIZE.load(Ordering::Acquire);

        context.with(|ctx| {
            ctx.eval::<(), _>(
                r#"
                for(let i = 0;i < 100_000;i++){
                    // create recursive structure.
                    const a = () => {
                        if(a){
                            return true
                        }
                        return false
                    };
                }
            "#,
            )
            .unwrap();
        });

        let after = ALLOC_SIZE.load(Ordering::Acquire);
        // every object takes atleast a single byte.
        // So the gc must have collected atleast some of the recursive objects if the difference is
        // smaller then number of objects created.
        assert!(after.saturating_sub(before) < 100_000)
    }
}