rquickjs_core/allocator/
rust.rs

1use alloc::alloc;
2use core::{alloc::Layout, mem, ptr};
3
4use super::Allocator;
5
6/// The largest value QuickJS will allocate is a u64;
7/// So all allocated memory must have the same alignment is this largest size.
8const ALLOC_ALIGN: usize = mem::align_of::<u64>();
9
10#[derive(Copy, Clone)]
11#[repr(transparent)]
12struct Header {
13    size: usize,
14}
15
16const fn max(a: usize, b: usize) -> usize {
17    if a < b {
18        b
19    } else {
20        a
21    }
22}
23
24/// Head needs to be at least alloc aligned so all that values after the header are aligned.
25const HEADER_SIZE: usize = max(mem::size_of::<Header>(), ALLOC_ALIGN);
26
27#[inline]
28fn round_size(size: usize) -> usize {
29    size.div_ceil(ALLOC_ALIGN) * ALLOC_ALIGN
30}
31
32/// The allocator which uses Rust global allocator
33pub struct RustAllocator;
34
35unsafe impl Allocator for RustAllocator {
36    fn calloc(&mut self, count: usize, size: usize) -> *mut u8 {
37        if count == 0 || size == 0 {
38            return ptr::null_mut();
39        }
40
41        let total_size = count.checked_mul(size).expect("overflow");
42        let total_size = round_size(total_size);
43
44        let alloc_size = HEADER_SIZE + total_size;
45
46        let layout = if let Ok(layout) = Layout::from_size_align(alloc_size, ALLOC_ALIGN) {
47            layout
48        } else {
49            return ptr::null_mut();
50        };
51
52        let ptr = unsafe { alloc::alloc_zeroed(layout) };
53
54        if ptr.is_null() {
55            return ptr::null_mut();
56        }
57
58        unsafe {
59            ptr.cast::<Header>().write(Header { size: total_size });
60            ptr.add(HEADER_SIZE)
61        }
62    }
63
64    fn alloc(&mut self, size: usize) -> *mut u8 {
65        let size = round_size(size);
66        let alloc_size = size + HEADER_SIZE;
67
68        let layout = if let Ok(layout) = Layout::from_size_align(alloc_size, ALLOC_ALIGN) {
69            layout
70        } else {
71            return ptr::null_mut();
72        };
73
74        let ptr = unsafe { alloc::alloc(layout) };
75
76        if ptr.is_null() {
77            return ptr::null_mut();
78        }
79
80        unsafe {
81            ptr.cast::<Header>().write(Header { size });
82            ptr.add(HEADER_SIZE)
83        }
84    }
85
86    unsafe fn dealloc(&mut self, ptr: *mut u8) {
87        let ptr = ptr.sub(HEADER_SIZE);
88        let alloc_size = ptr.cast::<Header>().read().size + HEADER_SIZE;
89        let layout = Layout::from_size_align_unchecked(alloc_size, ALLOC_ALIGN);
90
91        alloc::dealloc(ptr, layout);
92    }
93
94    unsafe fn realloc(&mut self, ptr: *mut u8, new_size: usize) -> *mut u8 {
95        let new_size = round_size(new_size);
96
97        let ptr = ptr.sub(HEADER_SIZE);
98        let alloc_size = ptr.cast::<Header>().read().size + HEADER_SIZE;
99
100        let layout = Layout::from_size_align_unchecked(alloc_size, ALLOC_ALIGN);
101
102        let new_alloc_size = new_size + HEADER_SIZE;
103
104        let ptr = alloc::realloc(ptr, layout, new_alloc_size);
105
106        if ptr.is_null() {
107            return ptr::null_mut();
108        }
109
110        ptr.cast::<Header>().write(Header { size: new_size });
111        ptr.add(HEADER_SIZE)
112    }
113
114    unsafe fn usable_size(ptr: *mut u8) -> usize {
115        let ptr = ptr.sub(HEADER_SIZE);
116        ptr.cast::<Header>().read().size
117    }
118}
119
120#[cfg(all(test, feature = "rust-alloc"))]
121mod test {
122    use super::RustAllocator;
123    use crate::{allocator::Allocator, Context, Runtime};
124    use std::sync::atomic::{AtomicUsize, Ordering};
125
126    static ALLOC_SIZE: AtomicUsize = AtomicUsize::new(0);
127
128    struct TestAllocator;
129
130    unsafe impl Allocator for TestAllocator {
131        fn alloc(&mut self, size: usize) -> *mut u8 {
132            unsafe {
133                let res = RustAllocator.alloc(size);
134                ALLOC_SIZE.fetch_add(RustAllocator::usable_size(res), Ordering::AcqRel);
135                res
136            }
137        }
138
139        fn calloc(&mut self, count: usize, size: usize) -> *mut u8 {
140            unsafe {
141                let res = RustAllocator.calloc(count, size);
142                ALLOC_SIZE.fetch_add(RustAllocator::usable_size(res), Ordering::AcqRel);
143                res
144            }
145        }
146
147        unsafe fn dealloc(&mut self, ptr: *mut u8) {
148            ALLOC_SIZE.fetch_sub(RustAllocator::usable_size(ptr), Ordering::AcqRel);
149            RustAllocator.dealloc(ptr);
150        }
151
152        unsafe fn realloc(&mut self, ptr: *mut u8, new_size: usize) -> *mut u8 {
153            if !ptr.is_null() {
154                ALLOC_SIZE.fetch_sub(RustAllocator::usable_size(ptr), Ordering::AcqRel);
155            }
156
157            let res = RustAllocator.realloc(ptr, new_size);
158            if !res.is_null() {
159                ALLOC_SIZE.fetch_add(RustAllocator::usable_size(res), Ordering::AcqRel);
160            }
161            res
162        }
163
164        unsafe fn usable_size(ptr: *mut u8) -> usize
165        where
166            Self: Sized,
167        {
168            RustAllocator::usable_size(ptr)
169        }
170    }
171
172    #[test]
173    fn test_gc_working_correctly() {
174        let rt = Runtime::new_with_alloc(TestAllocator).unwrap();
175        let context = Context::full(&rt).unwrap();
176
177        let before = ALLOC_SIZE.load(Ordering::Acquire);
178
179        context.with(|ctx| {
180            ctx.eval::<(), _>(
181                r#"
182                for(let i = 0;i < 100_000;i++){
183                    // create recursive structure.
184                    const a = () => {
185                        if(a){
186                            return true
187                        }
188                        return false
189                    };
190                }
191            "#,
192            )
193            .unwrap();
194        });
195
196        let after = ALLOC_SIZE.load(Ordering::Acquire);
197        // every object takes atleast a single byte.
198        // So the gc must have collected atleast some of the recursive objects if the difference is
199        // smaller then number of objects created.
200        assert!(after.saturating_sub(before) < 100_000)
201    }
202}