swamp_vm/
memory.rs

1/*
2 * Copyright (c) Peter Bjorklund. All rights reserved. https://github.com/swamp/swamp
3 * Licensed under the MIT License. See LICENSE in the project root for license information.
4 */
5use crate::ALIGNMENT;
6use std::{alloc, mem, ptr, slice};
7use swamp_vm_isa::aligner::{SAFE_ALIGNMENT, align};
8use swamp_vm_isa::{HeapMemoryAddress, HeapMemoryRegion, MemoryAlignment, MemorySize};
9
10/// Execution mode for the VM memory
11#[derive(Debug, Clone, Copy, PartialEq, Eq)]
12pub enum ExecutionMode {
13    /// Constant evaluation mode - strings should be in constant memory or heap
14    ConstantEvaluation,
15    /// Normal execution mode - strings should be in heap
16    NormalExecution,
17}
18
19pub struct MemoryDebug {
20    pub max_heap_alloc_offset: usize,
21}
22
23/// VM Memory Layout (from lower to higher addresses):
24///
25/// 1. **Constant Memory**: Pre-compiled constant data (read-only)
26/// 2. **Constant Heap Allocations**: Strings and other heap data allocated during constant evaluation
27///    - These allocations must be preserved between function calls as they contain constant string data
28///    - String pointers in constant memory reference this area
29/// 3. **Preserved Structs**: Data structures that need to persist between engine ticks
30///    - Currently unused but reserved for future use
31/// 4. **Stack Space**: Frame-placed variables and function call frames
32///    - Grows upward with each function call
33///    - Reset on function entry/exit
34/// 5. **Heap**: Dynamic allocations during program execution
35///    - Reset after each tick to prevent memory leaks
36///    - Starts after the preserved area to avoid corrupting constant data
37pub struct Memory {
38    pub(crate) memory: *mut u8,
39    pub(crate) memory_size: usize,
40    pub stack_memory_size: usize,
41    pub stack_offset: usize,        // Current stack position
42    pub(crate) frame_offset: usize, // Current frame position
43    pub stack_start: usize,
44    pub heap_start: usize,
45    pub heap_alloc_offset: usize,
46    pub constant_memory_size: usize,
47    pub execution_mode: ExecutionMode, // Track whether we're in constant evaluation or normal execution
48
49    pub debug: MemoryDebug,
50}
51
52impl Drop for Memory {
53    fn drop(&mut self) {
54        unsafe {
55            alloc::dealloc(
56                self.memory,
57                alloc::Layout::from_size_align(self.memory_size, ALIGNMENT).unwrap(),
58            );
59        }
60    }
61}
62
63impl Memory {
64    pub fn new(constant_memory: &[u8], stack_memory_size: usize, heap_memory_size: usize) -> Self {
65        let total_memory_size = constant_memory.len() + stack_memory_size + heap_memory_size;
66        let memory = unsafe {
67            alloc::alloc(alloc::Layout::from_size_align(total_memory_size, ALIGNMENT).unwrap())
68        };
69        unsafe {
70            ptr::write_bytes(memory, 0, total_memory_size);
71            ptr::copy_nonoverlapping(constant_memory.as_ptr(), memory, constant_memory.len());
72        }
73
74        let aligned_start_of_stack = align(constant_memory.len(), ALIGNMENT);
75
76        let aligned_start_of_heap = align(aligned_start_of_stack + stack_memory_size, ALIGNMENT);
77
78        assert!(aligned_start_of_heap > aligned_start_of_stack + 128 * 1024);
79
80        Self {
81            memory,
82            stack_memory_size,
83            memory_size: total_memory_size,
84            stack_offset: aligned_start_of_stack,
85            heap_start: aligned_start_of_heap,
86            frame_offset: aligned_start_of_stack,
87            heap_alloc_offset: aligned_start_of_heap,
88            constant_memory_size: aligned_start_of_stack,
89            stack_start: aligned_start_of_stack,
90            execution_mode: ExecutionMode::NormalExecution,
91            debug: MemoryDebug {
92                max_heap_alloc_offset: 0,
93            },
94        }
95    }
96
97    pub fn reset_offset(&mut self) {
98        self.frame_offset = self.stack_start;
99    }
100
101    pub fn reset(&mut self) {
102        assert!(self.stack_offset >= self.constant_memory_size);
103        self.stack_offset = self.stack_start;
104        self.frame_offset = self.stack_offset;
105    }
106
107    pub fn reset_allocator(&mut self) {
108        self.heap_alloc_offset = self.heap_start;
109    }
110
111    pub fn reset_stack_and_fp(&mut self) {
112        self.stack_offset = self.stack_start;
113        self.frame_offset = self.stack_offset;
114    }
115
116    const HEAP_SIZE_DURING_CONSTANT_EVALUATION: usize = 512 * 1024;
117    pub fn set_heap_directly_after_constant_area(&mut self) {
118        // Set the heap_start to be just after the constant memory area (aligned)
119        let original_constant_memory_size = self.constant_memory_size;
120        let aligned_heap_start = align(original_constant_memory_size, ALIGNMENT);
121        self.heap_start = aligned_heap_start;
122        self.heap_alloc_offset = aligned_heap_start;
123
124        // Move the stack start well after heap so they do not clobber each other
125        // TODO: Have a setting instead of a constant?
126        self.stack_start = self.heap_start + Self::HEAP_SIZE_DURING_CONSTANT_EVALUATION;
127        self.stack_offset = self.stack_start;
128
129        // When setting heap directly after constant area, we're in constant evaluation mode
130        self.execution_mode = ExecutionMode::ConstantEvaluation;
131    }
132
133    pub fn incorporate_heap_into_constant_area(&mut self) {
134        let constant_heap_end = self.heap_alloc_offset;
135
136        /*
137        eprintln!("=== INCORPORATE_HEAP_INTO_CONSTANT_AREA ===");
138        eprintln!("Before: constant_memory_size=0x{:X}, heap_alloc_offset=0x{:X}",
139                  self.constant_memory_size, self.heap_alloc_offset);
140
141         */
142
143        // Stack should start right after incorporated constant heap
144        let new_stack_start = align(constant_heap_end, ALIGNMENT);
145
146        self.stack_start = new_stack_start;
147        self.stack_offset = new_stack_start;
148        self.frame_offset = new_stack_start;
149        self.constant_memory_size = new_stack_start;
150
151        // Heap should be after stack
152        let new_heap_start = align(new_stack_start + self.stack_memory_size, ALIGNMENT);
153        self.heap_start = new_heap_start;
154        self.heap_alloc_offset = new_heap_start;
155
156        // After incorporating heap into constant area, we're switching to normal execution mode
157        self.execution_mode = ExecutionMode::NormalExecution;
158        /*
159        eprintln!(
160            "After: constant_memory_size=0x{:X}, new_stack_start=0x{:X}, new_heap_start=0x{:X}",
161            self.constant_memory_size, new_stack_start, new_heap_start
162        );
163
164         */
165    }
166
167    pub fn alloc_before_stack(
168        &mut self,
169        size: &MemorySize,
170        alignment: &MemoryAlignment,
171    ) -> HeapMemoryRegion {
172        let start = align(self.stack_start, SAFE_ALIGNMENT);
173        let end = start + size.0 as usize;
174        let new_start = align(end, SAFE_ALIGNMENT);
175
176        unsafe {
177            ptr::write_bytes(self.get_heap_ptr(start), 0, size.0 as usize);
178        }
179
180        self.stack_start = new_start;
181
182        HeapMemoryRegion {
183            addr: HeapMemoryAddress(start as u32),
184            size: *size,
185        }
186    }
187
188    #[inline(always)]
189    #[must_use]
190    pub fn get_heap_ptr(&self, offset: usize) -> *mut u8 {
191        debug_assert!(
192            offset < self.memory_size,
193            "out of bounds for heap. requested {offset} out of {}",
194            self.memory_size,
195        );
196        unsafe { self.memory.add(offset) }
197    }
198
199    pub fn get_heap_const_ptr(&self, offset: usize) -> *const u8 {
200        debug_assert!(
201            offset < self.memory_size,
202            "Memory access out of bounds: offset 0x{:X} >= memory_size 0x{:X}",
203            offset,
204            self.memory_size
205        );
206
207        unsafe { self.memory.add(offset) }
208    }
209
210    pub unsafe fn get_heap_offset(&self, ptr: *const u8) -> u32 {
211        // Assuming ptr is guaranteed to be within bounds or this
212        // will cause a panic if subtraction results in overflow (ptr < heap_base)
213        // or if cast to u32 overflows (for extremely large heaps on 64-bit)
214        (ptr as usize - self.memory as usize) as u32
215    }
216
217    pub fn heap_allocate_secret(&mut self, size: usize) -> u32 {
218        let aligned_size = align(size, ALIGNMENT);
219        let aligned_offset = self.heap_alloc_offset + aligned_size;
220
221        debug_assert!(
222            aligned_offset <= self.memory_size,
223            "out of heap memory {aligned_offset:X} > {}",
224            self.memory_size
225        );
226
227        let result_offset = self.heap_alloc_offset;
228        self.heap_alloc_offset = aligned_offset;
229
230        #[cfg(feature = "debug_vm")]
231        if self.heap_alloc_offset > self.debug.max_heap_alloc_offset {
232            self.debug.max_heap_alloc_offset = self.heap_alloc_offset - self.heap_start;
233        }
234
235        result_offset as u32
236    }
237
238    pub fn heap_allocate_with_data(&mut self, data: &[u8]) -> u32 {
239        let aligned_size = align(data.len(), ALIGNMENT);
240        let aligned_offset = self.heap_alloc_offset + aligned_size;
241
242        debug_assert!(
243            aligned_offset <= self.memory_size,
244            "out of heap memory {aligned_offset:X} > {}",
245            self.memory_size
246        );
247
248        let result_offset = self.heap_alloc_offset;
249
250        unsafe {
251            let dest_ptr = self.memory.add(result_offset);
252            ptr::copy_nonoverlapping(data.as_ptr(), dest_ptr, data.len());
253        }
254
255        self.heap_alloc_offset = aligned_offset;
256
257        result_offset as u32
258    }
259
260    pub fn frame_offset(&self) -> usize {
261        self.frame_offset
262    }
263
264    /// Usually called on `Enter`
265    /// reserving space for local variables and arguments
266    /// it is a bit of a hack, but the current return values and arguments are not part of the stack
267    /// (it should be in theory), but that is to slightly increase performance to not having to update
268    /// SP to reflect the "pushed" return space and arguments.
269    #[inline(always)]
270    pub(crate) const fn inc_sp(&mut self, aligned_size: usize) {
271        self.stack_offset += aligned_size;
272    }
273
274    /// Usually called on a call
275    /// It sets the FP to the current SP. The stack pointer includes the current function frame size
276    /// but doesn't include return values and arguments.
277    #[inline(always)]
278    pub const fn set_fp_from_sp(&mut self) {
279        self.frame_offset = self.stack_offset;
280    }
281
282    pub(crate) fn set_stack_and_frame(&mut self, addr: usize) {
283        assert!(
284            addr > self.constant_memory_size,
285            "must be greater than the constant area"
286        );
287        assert!(addr > self.stack_start);
288        self.frame_offset = addr;
289        self.stack_offset = addr;
290    }
291
292    #[inline]
293    pub(crate) const fn pop(&mut self, previous_frame_offset: usize, previous_stack_offset: usize) {
294        self.frame_offset = previous_frame_offset;
295        self.stack_offset = previous_stack_offset;
296    }
297
298    pub(crate) fn read_debug_stack_slice(&self, start_offset: u32, size: u16) -> Vec<u8> {
299        let slice = unsafe {
300            slice::from_raw_parts(
301                self.get_stack_const_ptr(start_offset as usize),
302                size as usize,
303            )
304        };
305
306        slice.to_vec()
307    }
308
309    // ---------------- FP relative ----------------------------------
310    #[must_use]
311    pub fn frame_ptr(&self) -> *mut u8 {
312        self.get_frame_ptr(0)
313    }
314
315    #[inline(always)]
316    pub fn get_frame_ptr_as_u32(&self, offset: u32) -> *mut u32 {
317        debug_assert!(
318            (self.frame_offset + offset as usize) < self.memory_size,
319            "out of stack space frame base:{} offset:{offset} total: {}",
320            self.frame_offset,
321            self.memory_size,
322        );
323        debug_assert_eq!(offset % 4, 0, "Unaligned i32 access at offset {offset}");
324
325        unsafe { self.get_heap_ptr(offset as usize + self.frame_offset) as *mut u32 }
326    }
327
328    #[inline(always)]
329    pub fn get_frame_ptr_as_u16(&self, offset: u32) -> *mut u16 {
330        debug_assert!(
331            (self.frame_offset + offset as usize) < self.memory_size,
332            "wrong frame addr"
333        );
334        // Ensure alignment
335        debug_assert_eq!(
336            (self.frame_offset + offset as usize) % 2,
337            0,
338            "Unaligned u16 access at offset {offset}",
339        );
340
341        unsafe { self.get_frame_ptr(offset) as *mut u16 }
342    }
343
344    #[inline(always)]
345    #[must_use]
346    pub fn get_frame_ptr(&self, fp_offset: u32) -> *mut u8 {
347        debug_assert!(
348            (self.frame_offset + fp_offset as usize) < self.memory_size,
349            "wrong frame addr"
350        );
351
352        unsafe { self.get_heap_ptr(fp_offset as usize + self.frame_offset) }
353    }
354
355    #[inline(always)]
356    #[must_use]
357    pub fn get_frame_const_ptr(&self, fp_offset: u32) -> *mut u8 {
358        debug_assert!(
359            (self.frame_offset + fp_offset as usize) < self.memory_size,
360            "wrong frame addr"
361        );
362
363        unsafe { self.get_heap_ptr(fp_offset as usize + self.frame_offset) }
364    }
365    pub(crate) fn read_frame_debug_slice(&self, start_offset: u32, size: u16) -> Vec<u8> {
366        let slice =
367            unsafe { slice::from_raw_parts(self.get_frame_const_ptr(start_offset), size as usize) };
368
369        slice.to_vec()
370    }
371
372    #[inline(always)]
373    #[must_use]
374    pub fn get_frame_ptr_as_i32(&self, some_addressing: u32) -> *mut i32 {
375        // Ensure alignment
376        debug_assert_eq!(
377            some_addressing % 4,
378            0,
379            "Unaligned i32 access at offset {some_addressing}"
380        );
381        // Inline ptr_at functionality
382
383        unsafe { self.get_frame_ptr(some_addressing) as *mut i32 }
384    }
385
386    #[inline(always)]
387    #[must_use]
388    pub fn get_frame_const_ptr_as_i32(&self, addressing: u32) -> *const i32 {
389        // Ensure alignment
390        debug_assert_eq!(
391            addressing % 4,
392            0,
393            "Unaligned i32 access at offset {addressing}"
394        );
395        debug_assert!(
396            (self.frame_offset + addressing as usize) < self.memory_size,
397            "wrong frame addr"
398        );
399
400        unsafe { self.get_frame_const_ptr(addressing) as *const i32 }
401    }
402
403    #[inline(always)]
404    #[must_use]
405    pub fn get_frame_const_ptr_as_u32(&self, offset: u32) -> *const u32 {
406        let absolute_offset = self.frame_offset + offset as usize;
407        debug_assert!(
408            (self.frame_offset + offset as usize) < self.memory_size,
409            "wrong frame addr"
410        );
411
412        // Ensure alignment
413        debug_assert_eq!(
414            absolute_offset % mem::align_of::<u32>(),
415            0,
416            "Unaligned u32 access at absolute offset {absolute_offset} (frame: {}, offset: {})",
417            self.frame_offset,
418            offset
419        );
420
421        // Inline ptr_at functionality
422        unsafe { self.get_frame_const_ptr(offset) as *const u32 }
423    }
424
425    #[inline(always)]
426    #[must_use]
427    pub fn get_frame_const_ptr_as_u16(&self, addressing: u32) -> *const u16 {
428        let absolute_offset = self.frame_offset + addressing as usize;
429        debug_assert!(
430            (self.frame_offset + addressing as usize) < self.memory_size,
431            "wrong frame addr"
432        );
433
434        // Ensure alignment
435        debug_assert_eq!(
436            absolute_offset % mem::align_of::<u32>(),
437            0,
438            "Unaligned u32 access at absolute offset {absolute_offset} (frame: {}, offset: {})",
439            self.frame_offset,
440            addressing
441        );
442
443        // Inline ptr_at functionality
444        unsafe { self.get_frame_const_ptr(addressing) as *const u16 }
445    }
446
447    #[must_use]
448    pub fn read_frame_i32(&self, offset: u32) -> i32 {
449        unsafe { *(self.get_frame_const_ptr_as_i32(offset)) }
450    }
451
452    #[inline(always)]
453    #[must_use]
454    pub fn read_frame_u8(&self, offset: u32) -> u8 {
455        unsafe { *self.get_frame_const_ptr(offset) }
456    }
457
458    #[inline(always)]
459    #[must_use]
460    pub fn read_frame_bool(&self, offset: u32) -> bool {
461        unsafe { *self.get_frame_const_ptr(offset) != 0 }
462    }
463
464    #[inline(always)]
465    #[must_use]
466    pub fn read_frame_u16(&self, offset: u32) -> u16 {
467        unsafe { *self.get_frame_const_ptr_as_u16(offset) }
468    }
469
470    #[inline(always)]
471    #[must_use]
472    pub fn read_frame_u32(&self, offset: u32) -> u32 {
473        unsafe { *self.get_frame_const_ptr_as_u32(offset) }
474    }
475
476    // ---------- Stack ---------------
477    #[inline(always)]
478    #[must_use]
479    pub const fn get_stack_const_ptr(&self, stack_offset: usize) -> *const u8 {
480        debug_assert!(stack_offset < self.memory_size, "wrong stack addr");
481        unsafe { self.memory.add(stack_offset) }
482    }
483
484    pub(crate) fn read_debug_slice(&self, start_offset: u32, size: u16) -> Vec<u8> {
485        let slice =
486            unsafe { slice::from_raw_parts(self.memory.add(start_offset as usize), size as usize) };
487
488        slice.to_vec()
489    }
490
491    #[must_use]
492    #[inline(always)]
493    pub fn read_heap_offset_via_frame(&self, frame_offset: u32) -> u32 {
494        self.read_frame_u32(frame_offset)
495    }
496
497    #[inline(always)]
498    #[must_use]
499    pub fn get_heap_ptr_via_frame(&self, frame_offset: u32) -> *mut u8 {
500        let heap_offset = self.read_frame_u32(frame_offset);
501        self.get_heap_ptr(heap_offset as usize)
502    }
503
504    #[inline(always)]
505    #[must_use]
506    pub fn get_heap_u32_ptr_via_frame(&self, frame_offset: u32) -> *mut u32 {
507        let heap_offset = self.read_frame_u32(frame_offset);
508        self.get_heap_ptr(heap_offset as usize) as *mut u32
509    }
510
511    #[inline(always)]
512    #[must_use]
513    pub fn get_heap_ptr_via_frame_with_offset(
514        &self,
515        frame_offset: u32,
516        heap_ptr_offset: u32,
517    ) -> *mut u8 {
518        let heap_offset = self.read_heap_offset_via_frame(frame_offset);
519        self.get_heap_ptr(heap_offset as usize + heap_ptr_offset as usize)
520    }
521}