comet/
block.rs

1use std::{mem::size_of, ptr::null_mut};
2
3use crate::{
4    gc_info_table::GC_TABLE,
5    gc_size,
6    globals::{IMMIX_BLOCK_SIZE, LINE_COUNT, LINE_SIZE},
7    heap::Heap,
8    internal::space_bitmap::SpaceBitmap,
9};
10
11/// A block is a page-aligned container for heap-allocated objects.
12#[repr(C, align(16))]
13pub struct Block {
14    pub all_next: *mut Self,
15    pub next: *mut Self,
16    /// Stores magic number to check if block is allocated.
17    pub allocated: u32,
18    pub hole_count: u8,
19    pub heap: *mut Heap,
20}
21
22impl Block {
23    pub fn set_allocated(&mut self) {
24        self.allocated = 0xdeadbeef;
25    }
26    pub fn offset(&self, offset: usize) -> *mut u8 {
27        let x = self as *const Self as usize + offset;
28        debug_assert!(
29            x >= self.begin() as usize && x <= self.end() as usize,
30            "overflow {:x} (end={:p})",
31            x,
32            self.end()
33        );
34        x as _
35    }
36    /// Convert an address on this block into a line number.
37    pub fn object_to_line_num(object: *const u8) -> usize {
38        (object as usize % IMMIX_BLOCK_SIZE) / LINE_SIZE
39    }
40    /// Get pointer to block from `object` pointer.
41    ///
42    /// # Safety
43    /// Does not do anything unsafe but might return wrong pointer
44    pub unsafe fn get_block_ptr(object: *const u8) -> *mut Self {
45        let off = object as usize % IMMIX_BLOCK_SIZE;
46        (object as *mut u8).offset(-(off as isize)) as *mut Block
47    }
48
49    pub fn new(at: *mut u8) -> &'static mut Self {
50        unsafe {
51            let ptr = at as *mut Self;
52            debug_assert!(ptr as usize % IMMIX_BLOCK_SIZE == 0);
53            ptr.write(Self {
54                all_next: null_mut(),
55                next: null_mut(),
56                allocated: 0,
57                heap: null_mut(),
58                hole_count: LINE_COUNT as _,
59            });
60
61            &mut *ptr
62        }
63    }
64    pub fn begin(&self) -> *mut u8 {
65        debug_assert!(size_of::<Self>() < LINE_SIZE);
66        (self as *const Self as usize + LINE_SIZE) as _
67    }
68
69    pub fn end(&self) -> *mut u8 {
70        (self as *const Self as usize + IMMIX_BLOCK_SIZE) as _
71    }
72    #[inline]
73    pub fn is_in_block(&self, p: *const u8) -> bool {
74        //if self.allocated == 0xdeadbeef {
75        let b = self.begin() as usize;
76        let e = self.end() as usize;
77        b <= p as usize && p as usize <= e
78        //} else {
79        //    false
80        //}
81    }
82    /// Update the line counter for the given object.
83    ///
84    /// Mark if `MARK`, otherwise clear mark bit.
85    pub fn update_lines<const MARK: bool>(
86        &self,
87        bitmap: &SpaceBitmap<LINE_SIZE>,
88        object: *const u8,
89    ) {
90        // This calculates how many lines are affected starting from a
91        // LINE_SIZE aligned address. So it might not mark enough lines. But
92        // that does not matter as we always skip a line in scan_block()
93        let line_num = Self::object_to_line_num(object);
94        let size = gc_size(object.cast());
95        debug_assert!(self.is_in_block(object));
96
97        for line in line_num..(line_num + (size / LINE_SIZE) + 1) {
98            debug_assert!(line != 0);
99            if MARK {
100                bitmap.set(self.line(line));
101            } else {
102                bitmap.clear(self.line(line));
103            }
104        }
105    }
106    pub fn line(&self, index: usize) -> *mut u8 {
107        let line = self.offset(index * LINE_SIZE);
108        debug_assert!(
109            line >= self.begin() && line <= self.end(),
110            "invalid line: {:p} (begin={:p},end={:p})",
111            line,
112            self.begin(),
113            self.end()
114        );
115        line
116    }
117
118    pub fn sweep<const MAJOR: bool>(
119        &mut self,
120        bitmap: &SpaceBitmap<8>,
121        live: &SpaceBitmap<8>,
122        line: &SpaceBitmap<LINE_SIZE>,
123    ) -> SweepResult {
124        let mut empty = true;
125        let mut count = 0;
126
127        live.visit_marked_range(self.begin(), self.end(), |object| unsafe {
128            if !bitmap.test(object as _) {
129                if let Some(callback) = GC_TABLE.get_gc_info((*object).get_gc_info_index()).finalize
130                {
131                    callback((*object).payload() as _);
132                }
133                live.clear(object as _);
134
135                count += 1;
136            } else {
137                self.update_lines::<true>(line, object as _);
138                bitmap.clear(object as _);
139                empty = false;
140            }
141        });
142        if empty {
143            SweepResult::Empty
144        } else {
145            SweepResult::Reuse
146        }
147    }
148
149    pub fn count_holes(&mut self, bitmap: &SpaceBitmap<LINE_SIZE>) -> usize {
150        let mut count = 0;
151        for line in 1..LINE_COUNT {
152            if !bitmap.test(self.line(line)) {
153                count += 1;
154            }
155        }
156        self.hole_count = count as _;
157        count
158    }
159
160    /// Scan the block for a hole to allocate into.
161    ///
162    /// The scan will start at `last_high_offset` bytes into the block and
163    /// return a tuple of `low_offset`, `high_offset` as the lowest and
164    /// highest usable offsets for a hole.
165    ///
166    /// `None` is returned if no hole was found.
167    pub fn scan_block(
168        &self,
169        bitmap: &SpaceBitmap<LINE_SIZE>,
170        last_high_offset: u16,
171    ) -> Option<(u16, u16)> {
172        let last_high_index = last_high_offset as usize / LINE_SIZE;
173        // search for first unmarked line
174        let mut low_index = LINE_COUNT - 1;
175        for index in (last_high_index + 1)..LINE_COUNT {
176            if !bitmap.test(self.line(index)) {
177                low_index = index + 1;
178                break;
179            }
180        }
181        // search for first marked line
182        let mut high_index = LINE_COUNT;
183        for index in low_index..LINE_COUNT {
184            if bitmap.test(self.line(index)) {
185                high_index = index;
186                break;
187            }
188        }
189        if low_index == high_index && high_index != (LINE_COUNT - 1) {
190            return self.scan_block(bitmap, (high_index * LINE_SIZE - 1) as u16);
191        } else if low_index < (LINE_COUNT - 1) {
192            return Some((
193                (low_index * LINE_SIZE) as u16,
194                (high_index * LINE_SIZE - 1) as u16,
195            ));
196        }
197        None
198    }
199
200    pub fn init(&mut self, heap: *mut Heap) {
201        unsafe {
202            (*heap)
203                .global
204                .line_bitmap
205                .clear_range(self.begin(), self.end());
206        }
207    }
208}
209
210#[derive(Clone, Copy, PartialEq, Eq)]
211pub enum SweepResult {
212    Empty,
213
214    Reuse,
215}