1use std::{ptr::null_mut, sync::atomic::AtomicBool};
2
3use crate::{gc_info_table::GC_TABLE, header::HeapObjectHeader};
4
5#[repr(C)]
11pub struct PreciseAllocation {
12 pub cell_size: usize,
15 pub mark: bool,
16 pub index_in_space: u32,
18 pub adjusted_alignment: bool,
21 pub has_valid_cell: bool,
23 pub is_newly_allocated: bool,
24}
25
26impl PreciseAllocation {
27 pub const ALIGNMENT: usize = 16;
29 pub const HALF_ALIGNMENT: usize = Self::ALIGNMENT / 2;
31 pub fn is_precise(raw_ptr: *mut ()) -> bool {
33 (raw_ptr as usize & Self::HALF_ALIGNMENT) != 0
34 }
35 pub fn mark_atomic(&self) -> &AtomicBool {
36 as_atomic!(&self.mark;AtomicBool)
37 }
38 pub fn from_cell(ptr: *mut HeapObjectHeader) -> *mut Self {
40 unsafe {
41 ptr.cast::<u8>()
42 .offset(-(Self::header_size() as isize))
43 .cast()
44 }
45 }
46 #[inline]
48 pub fn base_pointer(&self) -> *mut () {
49 if self.adjusted_alignment {
50 ((self as *const Self as isize) - (Self::HALF_ALIGNMENT as isize)) as *mut ()
51 } else {
52 self as *const Self as *mut ()
53 }
54 }
55
56 pub fn cell(&self) -> *mut HeapObjectHeader {
58 let addr = unsafe { (self as *const Self as *const u8).add(Self::header_size()) };
59 addr as _
60 }
61 pub fn above_lower_bound(&self, raw_ptr: *mut ()) -> bool {
63 let ptr = raw_ptr;
64 let begin = self.cell() as *mut ();
65 ptr >= begin
66 }
67 pub fn below_upper_bound(&self, raw_ptr: *mut ()) -> bool {
69 let ptr = raw_ptr;
70 let begin = self.cell() as *mut ();
71 let end = (begin as usize + self.cell_size) as *mut ();
72 ptr <= (end as usize + 8) as *mut ()
73 }
74 pub const fn header_size() -> usize {
76 ((core::mem::size_of::<PreciseAllocation>() + Self::HALF_ALIGNMENT - 1)
77 & !(Self::HALF_ALIGNMENT - 1))
78 | Self::HALF_ALIGNMENT
79 }
80 pub fn contains(&self, raw_ptr: *mut ()) -> bool {
82 self.above_lower_bound(raw_ptr) && self.below_upper_bound(raw_ptr)
83 }
84 pub fn flip(&mut self) {
85 self.is_newly_allocated |= self.is_marked();
104 self.mark_atomic().store(false, atomic::Ordering::Relaxed);
105 }
106
107 pub fn is_marked(&self) -> bool {
108 self.mark_atomic().load(atomic::Ordering::Relaxed)
109 }
110
111 pub fn test_and_set_marked(&self) -> bool {
112 if self.is_marked() {
113 return true;
114 }
115 match self.mark_atomic().compare_exchange(
116 false,
117 true,
118 atomic::Ordering::Relaxed,
119 atomic::Ordering::Relaxed,
120 ) {
121 Ok(_) => false,
122 _ => true,
123 }
124 }
125
126 pub fn clear_marked(&self) {
127 self.mark_atomic().store(false, atomic::Ordering::Relaxed);
128 }
129
130 pub fn sweep(&mut self) -> bool {
132 let cell = self.cell();
133 unsafe {
134 if self.has_valid_cell && !self.is_live() {
135 let info = GC_TABLE.get_gc_info((*cell).get_gc_info_index());
136 if let Some(cb) = info.finalize {
137 cb((*cell).payload() as _);
138 }
139 self.has_valid_cell = false;
140 }
141 }
142 true
143 }
144 pub fn try_create(size: usize, index_in_space: u32) -> *mut Self {
146 let adjusted_alignment_allocation_size = Self::header_size() + size + Self::HALF_ALIGNMENT;
147 unsafe {
148 let mut space = libc::malloc(adjusted_alignment_allocation_size).cast::<u8>();
149
150 let mut adjusted_alignment = false;
152 if !is_aligned_for_precise_allocation(space) {
153 space = space.add(Self::HALF_ALIGNMENT);
154 adjusted_alignment = true;
155 assert!(is_aligned_for_precise_allocation(space));
156 }
157 assert!(size != 0);
158 space.cast::<Self>().write(Self {
159 adjusted_alignment,
161 mark: false,
162 has_valid_cell: true,
164 cell_size: size,
165 index_in_space,
166 is_newly_allocated: false,
167 });
168
169 space.cast()
170 }
171 }
172 pub fn is_newly_allocated(&self) -> bool {
173 self.is_newly_allocated
174 }
175 pub fn is_live(&self) -> bool {
176 self.is_marked() || self.is_newly_allocated
177 }
178
179 pub fn cell_size(&self) -> usize {
181 self.cell_size
182 }
183 pub fn destroy(&mut self) {
185 let base = self.base_pointer();
186 unsafe {
187 libc::free(base as _);
188 }
189 }
190
191 pub fn is_empty(&self) -> bool {
192 !self.is_marked() && !self.is_newly_allocated()
193 }
194}
195pub fn is_aligned_for_precise_allocation(mem: *mut u8) -> bool {
197 let allocable_ptr = mem as usize;
198 (allocable_ptr & (PreciseAllocation::ALIGNMENT - 1)) == 0
199}
200pub struct LargeObjectSpace {
204 pub(crate) allocations: Vec<*mut PreciseAllocation>,
205 pub(crate) precise_allocations_nursery_offset: usize,
206 pub(crate) precise_allocations_offest_for_this_collection: usize,
207 pub(crate) precise_allocations_offset_nursery_for_sweep: usize,
208 pub(crate) precise_allocations_for_this_collection_size: usize,
209 pub(crate) precise_allocations_for_this_collection_begin: *mut *mut PreciseAllocation,
210 pub(crate) precise_allocations_for_this_collection_end: *mut *mut PreciseAllocation,
211}
212
213impl LargeObjectSpace {
214 pub(crate) fn new() -> Self {
215 Self {
216 allocations: Vec::new(),
217
218 precise_allocations_nursery_offset: 0,
219 precise_allocations_offest_for_this_collection: 0,
220 precise_allocations_offset_nursery_for_sweep: 0,
221 precise_allocations_for_this_collection_end: null_mut(),
222 precise_allocations_for_this_collection_begin: null_mut(),
223 precise_allocations_for_this_collection_size: 0,
224 }
225 }
226 pub fn begin_marking(&mut self, full: bool) {
227 if full {
228 for alloc in self.allocations.iter() {
229 unsafe {
230 (**alloc).flip();
231 }
232 }
233 }
234 }
235 #[inline]
236 pub fn contains(&self, pointer: *const u8) -> *mut HeapObjectHeader {
237 unsafe {
239 if self.precise_allocations_for_this_collection_size != 0 {
240 if (**self.precise_allocations_for_this_collection_begin)
241 .above_lower_bound(pointer as _)
242 && (**(self.precise_allocations_for_this_collection_end.sub(1)))
243 .below_upper_bound(pointer as _)
244 {
245 let prec = PreciseAllocation::from_cell(pointer as _);
246 let slice = std::slice::from_raw_parts(
247 self.precise_allocations_for_this_collection_begin,
248 self.precise_allocations_for_this_collection_size,
249 );
250 let result = slice.binary_search_by(|ptr| ptr.cmp(&prec));
251 if let Ok(_) = result {
252 return (*prec).cell();
253 }
254 }
255 }
256 null_mut()
257 }
258 }
259
260 pub fn prepare_for_allocation(&mut self, eden: bool) {
261 if eden {
262 self.precise_allocations_offset_nursery_for_sweep =
263 self.precise_allocations_nursery_offset;
264 } else {
265 self.precise_allocations_offset_nursery_for_sweep = 0;
266 }
267 self.precise_allocations_nursery_offset = self.allocations.len();
268 }
269
270 pub fn prepare_for_marking(&mut self, eden: bool) {
271 if eden {
272 self.precise_allocations_offest_for_this_collection =
273 self.precise_allocations_nursery_offset;
274 } else {
275 self.precise_allocations_offest_for_this_collection = 0;
276 }
277 }
278 pub fn prepare_for_conservative_scan(&mut self) {
280 unsafe {
281 self.precise_allocations_for_this_collection_begin = self
282 .allocations
283 .as_mut_ptr()
284 .add(self.precise_allocations_offest_for_this_collection);
285 self.precise_allocations_for_this_collection_size =
286 self.allocations.len() - self.precise_allocations_offest_for_this_collection;
287 self.precise_allocations_for_this_collection_end =
288 self.allocations.as_mut_ptr().add(self.allocations.len());
289 let slice = std::slice::from_raw_parts_mut(
290 self.precise_allocations_for_this_collection_begin,
291 self.precise_allocations_for_this_collection_size,
292 );
293
294 slice.sort_by(|a, b| a.cmp(b));
295
296 let mut index = self.precise_allocations_offest_for_this_collection;
297 let mut start = self.precise_allocations_for_this_collection_begin;
298 let end = self.precise_allocations_for_this_collection_end;
299 while start != end {
300 (**start).index_in_space = index as _;
301 index += 1;
302 start = start.add(1);
303 }
304 }
305 }
306
307 pub fn sweep(&mut self) {
308 let mut src_index = self.precise_allocations_offset_nursery_for_sweep;
309 let mut dst_index = src_index;
310 while src_index < self.allocations.len() {
311 let allocation = self.allocations[src_index];
312 src_index += 1;
313 unsafe {
314 (*allocation).sweep();
315 if (*allocation).is_empty() {
316 (*allocation).destroy();
317
318 continue;
319 } else {
320 (*allocation).index_in_space = dst_index as u32;
321 self.allocations[dst_index] = allocation;
322 dst_index += 1;
323 }
324 }
325 }
326 self.allocations.resize(dst_index, null_mut());
327 self.precise_allocations_nursery_offset = self.allocations.len();
328 }
329
330 pub fn allocate(&mut self, size: usize) -> *mut HeapObjectHeader {
331 unsafe {
332 let index = self.allocations.len();
333 let memory = PreciseAllocation::try_create(size, index as _);
334 if memory.is_null() {
335 panic!("LargeObjectSpace: OOM");
336 }
337
338 self.allocations.push(memory);
339
340 let cell = (*memory).cell();
341 (*cell).set_size(0); (*memory).cell()
343 }
344 }
345}
346
347impl Drop for LargeObjectSpace {
348 fn drop(&mut self) {
349 while let Some(alloc) = self.allocations.pop() {
350 unsafe {
351 let hdr = (*alloc).cell();
352 if let Some(callback) = GC_TABLE.get_gc_info((*hdr).get_gc_info_index()).finalize {
353 callback((*hdr).payload() as _);
354 }
355 (*alloc).destroy();
356 }
357 }
358 }
359}