Skip to main content

shape_gc/
region.rs

1//! Memory regions — 2MB mmap'd chunks for GC allocation.
2
3use crate::header::GcHeader;
4use std::alloc::Layout;
5
6/// Size of each region: 2MB.
7pub const REGION_SIZE: usize = 2 * 1024 * 1024;
8
9/// A contiguous 2MB memory region allocated via mmap.
10///
11/// Objects are bump-allocated within the region. Each object is preceded by
12/// a GcHeader (8 bytes) and is 8-byte aligned.
13pub struct Region {
14    /// Start of the mmap'd memory.
15    base: *mut u8,
16    /// Current allocation cursor (next free byte).
17    cursor: usize,
18    /// Limit (base + REGION_SIZE).
19    limit: usize,
20    /// Total live bytes after last sweep.
21    live_bytes: usize,
22}
23
24// Safety: Region is only accessed from the owning thread or under GC stop-the-world.
25unsafe impl Send for Region {}
26
27impl Region {
28    /// Allocate a new 2MB region via mmap.
29    pub fn new() -> Self {
30        let base = unsafe {
31            libc::mmap(
32                std::ptr::null_mut(),
33                REGION_SIZE,
34                libc::PROT_READ | libc::PROT_WRITE,
35                libc::MAP_PRIVATE | libc::MAP_ANONYMOUS,
36                -1,
37                0,
38            )
39        };
40        assert!(
41            base != libc::MAP_FAILED,
42            "mmap failed to allocate {} byte region",
43            REGION_SIZE
44        );
45        let base = base as *mut u8;
46        Self {
47            base,
48            cursor: 0,
49            limit: REGION_SIZE,
50            live_bytes: 0,
51        }
52    }
53
54    /// Try to bump-allocate within this region.
55    ///
56    /// Returns a pointer to the object data (after the GcHeader) or None if
57    /// the region is exhausted.
58    ///
59    /// `total_size` includes the GcHeader (8 bytes) + the object layout size,
60    /// already aligned to 8 bytes.
61    pub fn try_alloc(&mut self, layout: Layout) -> Option<*mut u8> {
62        let header_size = std::mem::size_of::<GcHeader>();
63        let obj_size = layout.size();
64        // Align total to 8 bytes
65        let total = (header_size + obj_size + 7) & !7;
66
67        if self.cursor + total > self.limit {
68            return None;
69        }
70
71        let header_ptr = unsafe { self.base.add(self.cursor) } as *mut GcHeader;
72        let obj_ptr = unsafe { self.base.add(self.cursor + header_size) };
73
74        // Write the header
75        unsafe {
76            header_ptr.write(GcHeader::new(0, obj_size as u32));
77        }
78
79        self.cursor += total;
80        Some(obj_ptr)
81    }
82
83    /// Check if a pointer falls within this region.
84    #[inline]
85    pub fn contains(&self, ptr: *const u8) -> bool {
86        let addr = ptr as usize;
87        let base = self.base as usize;
88        addr >= base && addr < base + REGION_SIZE
89    }
90
91    /// Get the base address of this region.
92    pub fn base(&self) -> *mut u8 {
93        self.base
94    }
95
96    /// Get the current used bytes.
97    pub fn used_bytes(&self) -> usize {
98        self.cursor
99    }
100
101    /// Get the remaining capacity.
102    pub fn remaining(&self) -> usize {
103        self.limit - self.cursor
104    }
105
106    /// Set the cursor to a specific offset. Used by TLAB flush to sync
107    /// the TLAB's allocation cursor back into the region before sweep.
108    pub fn set_cursor(&mut self, cursor: usize) {
109        debug_assert!(cursor <= self.limit);
110        self.cursor = cursor;
111    }
112
113    /// Reset the cursor (after relocation has moved all live objects out).
114    pub fn reset(&mut self) {
115        self.cursor = 0;
116        self.live_bytes = 0;
117    }
118
119    /// Iterate over all allocated objects in this region.
120    ///
121    /// Calls `f(header, obj_ptr)` for each object.
122    pub fn for_each_object(&self, mut f: impl FnMut(&GcHeader, *mut u8)) {
123        let header_size = std::mem::size_of::<GcHeader>();
124        let mut offset = 0;
125
126        while offset < self.cursor {
127            let header_ptr = unsafe { self.base.add(offset) } as *const GcHeader;
128            let header = unsafe { &*header_ptr };
129            let obj_ptr = unsafe { self.base.add(offset + header_size) };
130            let obj_size = header.size as usize;
131            let total = (header_size + obj_size + 7) & !7;
132
133            f(header, obj_ptr);
134
135            offset += total;
136        }
137    }
138
139    /// Iterate over all allocated objects mutably.
140    pub fn for_each_object_mut(&mut self, mut f: impl FnMut(&mut GcHeader, *mut u8)) {
141        let header_size = std::mem::size_of::<GcHeader>();
142        let mut offset = 0;
143        let cursor = self.cursor;
144
145        while offset < cursor {
146            let header_ptr = unsafe { self.base.add(offset) } as *mut GcHeader;
147            let header = unsafe { &mut *header_ptr };
148            let obj_ptr = unsafe { self.base.add(offset + header_size) };
149            let obj_size = header.size as usize;
150            let total = (header_size + obj_size + 7) & !7;
151
152            f(header, obj_ptr);
153
154            offset += total;
155        }
156    }
157
158    /// Protect this region's pages (PROT_NONE) — used after relocation.
159    pub fn protect(&self) {
160        unsafe {
161            libc::mprotect(self.base as *mut libc::c_void, REGION_SIZE, libc::PROT_NONE);
162        }
163    }
164
165    /// Unprotect this region's pages (PROT_READ|PROT_WRITE) — used after fixup.
166    pub fn unprotect(&self) {
167        unsafe {
168            libc::mprotect(
169                self.base as *mut libc::c_void,
170                REGION_SIZE,
171                libc::PROT_READ | libc::PROT_WRITE,
172            );
173        }
174    }
175
176    /// Get/set live_bytes tracking.
177    pub fn live_bytes(&self) -> usize {
178        self.live_bytes
179    }
180
181    pub fn set_live_bytes(&mut self, bytes: usize) {
182        self.live_bytes = bytes;
183    }
184}
185
186impl Drop for Region {
187    fn drop(&mut self) {
188        unsafe {
189            libc::munmap(self.base as *mut libc::c_void, REGION_SIZE);
190        }
191    }
192}
193
194#[cfg(test)]
195mod tests {
196    use super::*;
197
198    #[test]
199    fn test_region_alloc_and_contains() {
200        let mut region = Region::new();
201        let layout = Layout::from_size_align(64, 8).unwrap();
202        let ptr = region.try_alloc(layout).expect("allocation failed");
203        assert!(region.contains(ptr));
204        assert!(!region.contains(std::ptr::null()));
205    }
206
207    #[test]
208    fn test_region_exhaustion() {
209        let mut region = Region::new();
210        // Allocate until full
211        let big_layout = Layout::from_size_align(REGION_SIZE, 8).unwrap();
212        assert!(region.try_alloc(big_layout).is_none());
213    }
214
215    #[test]
216    fn test_region_iteration() {
217        let mut region = Region::new();
218        let layout = Layout::from_size_align(16, 8).unwrap();
219        let _p1 = region.try_alloc(layout).unwrap();
220        let _p2 = region.try_alloc(layout).unwrap();
221        let _p3 = region.try_alloc(layout).unwrap();
222
223        let mut count = 0;
224        region.for_each_object(|header, _ptr| {
225            assert_eq!(header.size, 16);
226            count += 1;
227        });
228        assert_eq!(count, 3);
229    }
230}