Skip to main content

cjc_runtime/
frame_arena.rs

1//! Frame Arena — bump allocator for non-escaping values.
2//!
3//! Each function invocation gets a `FrameArena` that satisfies all
4//! `AllocHint::Arena` allocations. The arena is bulk-freed when the
5//! function returns (no individual deallocation).
6//!
7//! # Determinism
8//!
9//! - Allocation order is sequential (bump pointer, no fragmentation).
10//! - No OS memory is returned during normal execution — arenas grow but
11//!   never shrink. After bulk-free, memory is retained for reuse.
12//! - Same sequence of allocations → same layout, every time.
13//!
14//! # Design
15//!
16//! The arena manages a list of 4 KB pages. Each page is a `Vec<u8>` that
17//! is never deallocated during program execution. When a page is exhausted,
18//! a new page is allocated. On `reset()`, the bump pointer returns to the
19//! start of the first page — all subsequent pages remain allocated but
20//! unused (retained for future frames).
21//!
22//! The arena does NOT hand out typed references — it stores opaque byte
23//! slices. The caller is responsible for alignment and type safety.
24//! For CJC's `Value` type (which is always the same size), alignment is
25//! guaranteed by the page alignment.
26
27use std::any::Any;
28use std::cell::RefCell;
29use std::rc::Rc;
30
31/// Default page size: 4 KB (fits ~64 Values at 64 bytes each).
32const PAGE_SIZE: usize = 4096;
33
34/// A bump-arena that allocates sequentially within fixed-size pages.
35///
36/// Bulk-freed via `reset()` at function return. Pages are never returned
37/// to the OS during execution — they are retained for reuse.
38pub struct FrameArena {
39    /// All allocated pages (never freed during program lifetime).
40    pages: Vec<Vec<u8>>,
41    /// Index of the current page being filled.
42    current_page: usize,
43    /// Byte offset within the current page.
44    cursor: usize,
45    /// Total bytes allocated across all arena lifetimes (monotonic counter).
46    pub total_allocated: usize,
47    /// Number of reset() calls.
48    pub reset_count: usize,
49}
50
51impl FrameArena {
52    /// Create a new arena with one pre-allocated page.
53    pub fn new() -> Self {
54        FrameArena {
55            pages: vec![vec![0u8; PAGE_SIZE]],
56            current_page: 0,
57            cursor: 0,
58            total_allocated: 0,
59            reset_count: 0,
60        }
61    }
62
63    /// Create an arena with a custom page size (for testing).
64    pub fn with_page_size(page_size: usize) -> Self {
65        let size = if page_size == 0 { PAGE_SIZE } else { page_size };
66        FrameArena {
67            pages: vec![vec![0u8; size]],
68            current_page: 0,
69            cursor: 0,
70            total_allocated: 0,
71            reset_count: 0,
72        }
73    }
74
75    /// Allocate `size` bytes from the arena. Returns the (page_index, offset)
76    /// pair that identifies the allocation.
77    ///
78    /// If the current page doesn't have enough room, advances to the next
79    /// page (allocating a new one if needed).
80    pub fn alloc_bytes(&mut self, size: usize) -> (usize, usize) {
81        // Align to 8 bytes for safe casting.
82        let aligned = (size + 7) & !7;
83        let page_size = self.page_size();
84
85        // If the allocation is larger than a page, allocate a dedicated page.
86        if aligned > page_size {
87            let page_idx = self.pages.len();
88            self.pages.push(vec![0u8; aligned]);
89            self.total_allocated += aligned;
90            return (page_idx, 0);
91        }
92
93        // Check if current page has room.
94        if self.cursor + aligned > page_size {
95            // Move to next page.
96            self.current_page += 1;
97            self.cursor = 0;
98            if self.current_page >= self.pages.len() {
99                self.pages.push(vec![0u8; page_size]);
100            }
101        }
102
103        let offset = self.cursor;
104        self.cursor += aligned;
105        self.total_allocated += aligned;
106        (self.current_page, offset)
107    }
108
109    /// Get a reference to the bytes at the given (page, offset) location.
110    pub fn get_bytes(&self, page: usize, offset: usize, len: usize) -> Option<&[u8]> {
111        self.pages
112            .get(page)
113            .and_then(|p| p.get(offset..offset + len))
114    }
115
116    /// Get a mutable reference to the bytes at the given (page, offset) location.
117    pub fn get_bytes_mut(&mut self, page: usize, offset: usize, len: usize) -> Option<&mut [u8]> {
118        self.pages
119            .get_mut(page)
120            .and_then(|p| p.get_mut(offset..offset + len))
121    }
122
123    /// Reset the arena for reuse. All previous allocations are invalidated.
124    /// Pages are NOT freed — they are retained for the next frame.
125    pub fn reset(&mut self) {
126        self.current_page = 0;
127        self.cursor = 0;
128        self.reset_count += 1;
129    }
130
131    /// Number of pages currently allocated (including unused retained pages).
132    pub fn page_count(&self) -> usize {
133        self.pages.len()
134    }
135
136    /// Total capacity in bytes across all pages.
137    pub fn capacity(&self) -> usize {
138        self.pages.iter().map(|p| p.len()).sum()
139    }
140
141    /// Bytes currently in use (from start of page 0 to current cursor).
142    pub fn used_bytes(&self) -> usize {
143        if self.pages.is_empty() {
144            return 0;
145        }
146        let page_size = self.page_size();
147        self.current_page * page_size + self.cursor
148    }
149
150    fn page_size(&self) -> usize {
151        self.pages.first().map(|p| p.len()).unwrap_or(PAGE_SIZE)
152    }
153}
154
155impl Default for FrameArena {
156    fn default() -> Self {
157        Self::new()
158    }
159}
160
161// ---------------------------------------------------------------------------
162// Type-erased arena storage for CJC Values
163// ---------------------------------------------------------------------------
164
165/// A type-erased arena entry. Stores an `Rc<RefCell<Box<dyn Any>>>` so it
166/// can be shared with the Value system while being backed by arena memory.
167///
168/// In the current implementation, the arena provides the backing store
169/// discipline (bulk-free on reset), while Rc provides the sharing semantics
170/// needed by CJC's Value type. The key benefit: arena values don't need
171/// individual deallocation and can be bulk-freed.
172pub struct ArenaEntry {
173    /// The stored value. Uses Rc for compatibility with Value::ClassRef etc.
174    pub value: Rc<RefCell<Box<dyn Any>>>,
175}
176
177/// Arena-backed object store. Combines FrameArena with a typed entry list
178/// for objects that need to be accessed by index.
179pub struct ArenaStore {
180    /// The underlying bump arena (tracks memory pages).
181    pub arena: FrameArena,
182    /// Type-erased entries stored in this frame.
183    entries: Vec<Option<ArenaEntry>>,
184    /// Free list for entry reuse after reset.
185    free_slots: Vec<usize>,
186}
187
188impl ArenaStore {
189    pub fn new() -> Self {
190        ArenaStore {
191            arena: FrameArena::new(),
192            entries: Vec::new(),
193            free_slots: Vec::new(),
194        }
195    }
196
197    /// Allocate a new entry in the arena store.
198    pub fn alloc<T: Any + 'static>(&mut self, value: T) -> usize {
199        let entry = ArenaEntry {
200            value: Rc::new(RefCell::new(Box::new(value))),
201        };
202        if let Some(idx) = self.free_slots.pop() {
203            self.entries[idx] = Some(entry);
204            idx
205        } else {
206            let idx = self.entries.len();
207            self.entries.push(Some(entry));
208            idx
209        }
210    }
211
212    /// Get a reference to the stored value at the given index.
213    pub fn get<T: Any + 'static>(&self, index: usize) -> Option<&T> {
214        self.entries.get(index).and_then(|slot| {
215            slot.as_ref().and_then(|entry| {
216                let borrowed = entry.value.as_ref();
217                // Safety: we need to extend the lifetime through the RefCell.
218                // This is safe because the ArenaStore owns the Rc and the
219                // caller borrows &self.
220                let ptr = borrowed.as_ptr();
221                unsafe { (*ptr).downcast_ref::<T>() }
222            })
223        })
224    }
225
226    /// Number of live entries.
227    pub fn live_count(&self) -> usize {
228        self.entries.iter().filter(|e| e.is_some()).count()
229    }
230
231    /// Reset the store — mark all entries as free, reset the arena.
232    pub fn reset(&mut self) {
233        self.free_slots.clear();
234        for i in 0..self.entries.len() {
235            if self.entries[i].is_some() {
236                self.entries[i] = None;
237                self.free_slots.push(i);
238            }
239        }
240        // Reverse so LIFO pop gives lowest indices first (deterministic).
241        self.free_slots.reverse();
242        self.arena.reset();
243    }
244}
245
246impl Default for ArenaStore {
247    fn default() -> Self {
248        Self::new()
249    }
250}
251
252// ---------------------------------------------------------------------------
253// Tests
254// ---------------------------------------------------------------------------
255
256#[cfg(test)]
257mod tests {
258    use super::*;
259
260    #[test]
261    fn test_arena_basic_alloc() {
262        let mut arena = FrameArena::new();
263        let (p1, o1) = arena.alloc_bytes(16);
264        let (p2, o2) = arena.alloc_bytes(16);
265
266        // Both should be on page 0, sequential.
267        assert_eq!(p1, 0);
268        assert_eq!(o1, 0);
269        assert_eq!(p2, 0);
270        assert_eq!(o2, 16); // 16 bytes aligned to 8 → 16
271    }
272
273    #[test]
274    fn test_arena_page_overflow() {
275        let mut arena = FrameArena::with_page_size(32);
276        let (p1, _) = arena.alloc_bytes(16);
277        let (p2, _) = arena.alloc_bytes(16);
278        // 32 bytes fills page 0.
279        assert_eq!(p1, 0);
280        assert_eq!(p2, 0);
281
282        // Next alloc should go to page 1.
283        let (p3, o3) = arena.alloc_bytes(8);
284        assert_eq!(p3, 1);
285        assert_eq!(o3, 0);
286        assert_eq!(arena.page_count(), 2);
287    }
288
289    #[test]
290    fn test_arena_reset_reuses_pages() {
291        let mut arena = FrameArena::with_page_size(32);
292        arena.alloc_bytes(16);
293        arena.alloc_bytes(16);
294        arena.alloc_bytes(16); // page 1
295        assert_eq!(arena.page_count(), 2);
296
297        arena.reset();
298        assert_eq!(arena.page_count(), 2); // Pages retained
299        assert_eq!(arena.used_bytes(), 0);
300        assert_eq!(arena.reset_count, 1);
301
302        // New allocs reuse page 0.
303        let (p, o) = arena.alloc_bytes(16);
304        assert_eq!(p, 0);
305        assert_eq!(o, 0);
306    }
307
308    #[test]
309    fn test_arena_alignment() {
310        let mut arena = FrameArena::new();
311        let (_, o1) = arena.alloc_bytes(1);  // 1 byte → aligned to 8
312        let (_, o2) = arena.alloc_bytes(1);  // next starts at 8
313        assert_eq!(o1, 0);
314        assert_eq!(o2, 8);
315    }
316
317    #[test]
318    fn test_arena_oversized_alloc() {
319        let mut arena = FrameArena::with_page_size(32);
320        // Allocate something bigger than a page.
321        let (p, o) = arena.alloc_bytes(64);
322        assert_eq!(o, 0);
323        // Should be on a dedicated page.
324        assert!(arena.pages[p].len() >= 64);
325    }
326
327    #[test]
328    fn test_arena_get_bytes() {
329        let mut arena = FrameArena::new();
330        let (p, o) = arena.alloc_bytes(8);
331        // Write some data.
332        let bytes = arena.get_bytes_mut(p, o, 8).unwrap();
333        bytes.copy_from_slice(&[1, 2, 3, 4, 5, 6, 7, 8]);
334        // Read it back.
335        let read = arena.get_bytes(p, o, 8).unwrap();
336        assert_eq!(read, &[1, 2, 3, 4, 5, 6, 7, 8]);
337    }
338
339    #[test]
340    fn test_arena_deterministic_layout() {
341        // Same allocation sequence → same layout.
342        let mut a1 = FrameArena::with_page_size(64);
343        let mut a2 = FrameArena::with_page_size(64);
344
345        let r1: Vec<_> = (0..10).map(|_| a1.alloc_bytes(8)).collect();
346        let r2: Vec<_> = (0..10).map(|_| a2.alloc_bytes(8)).collect();
347
348        assert_eq!(r1, r2, "deterministic layout");
349    }
350
351    // -- ArenaStore tests ---------------------------------------------------
352
353    #[test]
354    fn test_store_alloc_and_read() {
355        let mut store = ArenaStore::new();
356        let idx = store.alloc(42i64);
357        assert_eq!(*store.get::<i64>(idx).unwrap(), 42);
358    }
359
360    #[test]
361    fn test_store_reset_frees_entries() {
362        let mut store = ArenaStore::new();
363        store.alloc(1i64);
364        store.alloc(2i64);
365        assert_eq!(store.live_count(), 2);
366
367        store.reset();
368        assert_eq!(store.live_count(), 0);
369    }
370
371    #[test]
372    fn test_store_reset_reuses_slots() {
373        let mut store = ArenaStore::new();
374        let _a = store.alloc(1i64);
375        let _b = store.alloc(2i64);
376        store.reset();
377
378        // After reset, new allocs reuse old slots.
379        let c = store.alloc(99i64);
380        assert!(c < 2, "should reuse a freed slot, got {c}");
381        assert_eq!(*store.get::<i64>(c).unwrap(), 99);
382    }
383
384    #[test]
385    fn test_store_type_mismatch() {
386        let mut store = ArenaStore::new();
387        let idx = store.alloc(42i64);
388        assert!(store.get::<String>(idx).is_none());
389    }
390
391    #[test]
392    fn test_arena_capacity_grows_not_shrinks() {
393        let mut arena = FrameArena::with_page_size(32);
394        for _ in 0..10 {
395            arena.alloc_bytes(16);
396        }
397        let cap = arena.capacity();
398        arena.reset();
399        assert_eq!(arena.capacity(), cap, "capacity must not shrink after reset");
400    }
401}