1use crate::ALIGNMENT;
6use std::{alloc, mem, ptr, slice};
7use swamp_vm_types::aligner::{SAFE_ALIGNMENT, align};
8use swamp_vm_types::{HeapMemoryAddress, HeapMemoryRegion, MemoryAlignment, MemorySize};
9
10#[derive(Debug, Clone, Copy, PartialEq, Eq)]
12pub enum ExecutionMode {
13 ConstantEvaluation,
15 NormalExecution,
17}
18
19pub struct MemoryDebug {
20 pub max_heap_alloc_offset: usize,
21}
22
23pub struct Memory {
38 pub(crate) memory: *mut u8,
39 pub(crate) memory_size: usize,
40 pub stack_memory_size: usize,
41 pub stack_offset: usize, pub(crate) frame_offset: usize, pub stack_start: usize,
44 pub heap_start: usize,
45 pub heap_alloc_offset: usize,
46 pub constant_memory_size: usize,
47 pub execution_mode: ExecutionMode, pub debug: MemoryDebug,
50}
51
52impl Drop for Memory {
53 fn drop(&mut self) {
54 unsafe {
55 alloc::dealloc(
56 self.memory,
57 alloc::Layout::from_size_align(self.memory_size, ALIGNMENT).unwrap(),
58 );
59 }
60 }
61}
62
63impl Memory {
64 pub fn new(constant_memory: &[u8], stack_memory_size: usize, heap_memory_size: usize) -> Self {
65 let total_memory_size = constant_memory.len() + stack_memory_size + heap_memory_size;
66 let memory = unsafe {
67 alloc::alloc(alloc::Layout::from_size_align(total_memory_size, ALIGNMENT).unwrap())
68 };
69 unsafe {
70 ptr::write_bytes(memory, 0, total_memory_size);
71 ptr::copy_nonoverlapping(constant_memory.as_ptr(), memory, constant_memory.len());
72 }
73
74 let aligned_start_of_stack = align(constant_memory.len(), ALIGNMENT);
75
76 let aligned_start_of_heap = align(aligned_start_of_stack + stack_memory_size, ALIGNMENT);
77
78 assert!(aligned_start_of_heap > aligned_start_of_stack + 128 * 1024);
79
80 Self {
81 memory,
82 stack_memory_size,
83 memory_size: total_memory_size,
84 stack_offset: aligned_start_of_stack,
85 heap_start: aligned_start_of_heap,
86 frame_offset: aligned_start_of_stack,
87 heap_alloc_offset: aligned_start_of_heap,
88 constant_memory_size: aligned_start_of_stack,
89 stack_start: aligned_start_of_stack,
90 execution_mode: ExecutionMode::NormalExecution,
91 debug: MemoryDebug {
92 max_heap_alloc_offset: 0,
93 },
94 }
95 }
96
97 pub fn reset_offset(&mut self) {
98 self.frame_offset = self.stack_start;
99 }
100
101 pub fn reset(&mut self) {
102 assert!(self.stack_offset >= self.constant_memory_size);
103 self.stack_offset = self.stack_start;
104 self.frame_offset = self.stack_offset;
105 }
106
107 pub fn reset_allocator(&mut self) {
108 self.heap_alloc_offset = self.heap_start;
109 }
110
111 pub fn reset_stack_and_fp(&mut self) {
112 self.stack_offset = self.stack_start;
113 self.frame_offset = self.stack_offset;
114 }
115
116 pub fn set_heap_directly_after_constant_area(&mut self) {
117 let original_constant_memory_size = self.constant_memory_size;
119 let aligned_heap_start = align(original_constant_memory_size, ALIGNMENT);
120 self.heap_start = aligned_heap_start;
121 self.heap_alloc_offset = aligned_heap_start;
122
123 self.execution_mode = ExecutionMode::ConstantEvaluation;
125 }
126
127 pub fn incorporate_heap_into_constant_area(&mut self) {
128 let constant_heap_end = self.heap_alloc_offset;
129
130 let new_stack_start = align(constant_heap_end, ALIGNMENT);
136
137 self.stack_start = new_stack_start;
138 self.stack_offset = new_stack_start;
139 self.frame_offset = new_stack_start;
140 self.constant_memory_size = new_stack_start;
141
142 let new_heap_start = align(new_stack_start + self.stack_memory_size, ALIGNMENT);
143 self.heap_start = new_heap_start;
144 self.heap_alloc_offset = new_heap_start;
145
146 self.execution_mode = ExecutionMode::NormalExecution;
148 }
151
152 pub fn alloc_before_stack(
153 &mut self,
154 size: &MemorySize,
155 alignment: &MemoryAlignment,
156 ) -> HeapMemoryRegion {
157 let start = align(self.stack_start, SAFE_ALIGNMENT);
158 let end = start + size.0 as usize;
159 let new_start = align(end, SAFE_ALIGNMENT);
160
161 unsafe {
162 ptr::write_bytes(self.get_heap_ptr(start), 0, size.0 as usize);
163 }
164
165 self.stack_start = new_start;
166
167 HeapMemoryRegion {
168 addr: HeapMemoryAddress(start as u32),
169 size: *size,
170 }
171 }
172
173 #[inline(always)]
174 #[must_use]
175 pub fn get_heap_ptr(&self, offset: usize) -> *mut u8 {
176 debug_assert!(
177 offset < self.memory_size,
178 "out of bounds for heap. requested {offset} out of {}",
179 self.memory_size,
180 );
181 unsafe { self.memory.add(offset) }
182 }
183
184 pub fn get_heap_const_ptr(&self, offset: usize) -> *const u8 {
185 debug_assert!(
186 offset < self.memory_size,
187 "Memory access out of bounds: offset 0x{:X} >= memory_size 0x{:X}",
188 offset,
189 self.memory_size
190 );
191
192 unsafe { self.memory.add(offset) }
193 }
194
195 pub unsafe fn get_heap_offset(&self, ptr: *const u8) -> u32 {
196 (ptr as usize - self.memory as usize) as u32
200 }
201
202 pub fn heap_allocate_secret(&mut self, size: usize) -> u32 {
203 let aligned_size = align(size, ALIGNMENT);
204 let aligned_offset = self.heap_alloc_offset + aligned_size;
205
206 debug_assert!(
207 aligned_offset <= self.memory_size,
208 "out of heap memory {aligned_offset:X} > {}",
209 self.memory_size
210 );
211
212 let result_offset = self.heap_alloc_offset;
213 self.heap_alloc_offset = aligned_offset;
214
215 #[cfg(feature = "debug_vm")]
216 if self.heap_alloc_offset > self.debug.max_heap_alloc_offset {
217 self.debug.max_heap_alloc_offset = self.heap_alloc_offset - self.heap_start;
218 }
219
220 result_offset as u32
221 }
222
223 pub fn heap_allocate_with_data(&mut self, data: &[u8]) -> u32 {
224 let aligned_size = align(data.len(), ALIGNMENT);
225 let aligned_offset = self.heap_alloc_offset + aligned_size;
226
227 debug_assert!(
228 aligned_offset <= self.memory_size,
229 "out of heap memory {aligned_offset:X} > {}",
230 self.memory_size
231 );
232
233 let result_offset = self.heap_alloc_offset;
234
235 unsafe {
236 let dest_ptr = self.memory.add(result_offset);
237 ptr::copy_nonoverlapping(data.as_ptr(), dest_ptr, data.len());
238 }
239
240 self.heap_alloc_offset = aligned_offset;
241
242 result_offset as u32
243 }
244
245 pub fn frame_offset(&self) -> usize {
246 self.frame_offset
247 }
248
249 #[inline(always)]
255 pub(crate) const fn inc_sp(&mut self, aligned_size: usize) {
256 self.stack_offset += aligned_size;
257 }
258
259 #[inline(always)]
263 pub const fn set_fp_from_sp(&mut self) {
264 self.frame_offset = self.stack_offset;
265 }
266
267 pub(crate) fn set_stack_and_frame(&mut self, addr: usize) {
268 assert!(
269 addr > self.constant_memory_size,
270 "must be greater than the constant area"
271 );
272 assert!(addr > self.stack_start);
273 self.frame_offset = addr;
274 self.stack_offset = addr;
275 }
276
277 #[inline]
278 pub(crate) const fn pop(&mut self, previous_frame_offset: usize, previous_stack_offset: usize) {
279 self.frame_offset = previous_frame_offset;
280 self.stack_offset = previous_stack_offset;
281 }
282
283 pub(crate) fn read_debug_stack_slice(&self, start_offset: u32, size: u16) -> Vec<u8> {
284 let slice = unsafe {
285 slice::from_raw_parts(
286 self.get_stack_const_ptr(start_offset as usize),
287 size as usize,
288 )
289 };
290
291 slice.to_vec()
292 }
293
294 #[must_use]
296 pub fn frame_ptr(&self) -> *mut u8 {
297 self.get_frame_ptr(0)
298 }
299
300 #[inline(always)]
301 pub fn get_frame_ptr_as_u32(&self, offset: u32) -> *mut u32 {
302 debug_assert!(
303 (self.frame_offset + offset as usize) < self.memory_size,
304 "out of stack space frame base:{} offset:{offset} total: {}",
305 self.frame_offset,
306 self.memory_size,
307 );
308 debug_assert_eq!(offset % 4, 0, "Unaligned i32 access at offset {offset}");
309
310 unsafe { self.get_heap_ptr(offset as usize + self.frame_offset) as *mut u32 }
311 }
312
313 #[inline(always)]
314 pub fn get_frame_ptr_as_u16(&self, offset: u32) -> *mut u16 {
315 debug_assert!(
316 (self.frame_offset + offset as usize) < self.memory_size,
317 "wrong frame addr"
318 );
319 debug_assert_eq!(
321 (self.frame_offset + offset as usize) % 2,
322 0,
323 "Unaligned u16 access at offset {offset}",
324 );
325
326 unsafe { self.get_frame_ptr(offset) as *mut u16 }
327 }
328
329 #[inline(always)]
330 #[must_use]
331 pub fn get_frame_ptr(&self, fp_offset: u32) -> *mut u8 {
332 debug_assert!(
333 (self.frame_offset + fp_offset as usize) < self.memory_size,
334 "wrong frame addr"
335 );
336
337 unsafe { self.get_heap_ptr(fp_offset as usize + self.frame_offset) }
338 }
339
340 #[inline(always)]
341 #[must_use]
342 pub fn get_frame_const_ptr(&self, fp_offset: u32) -> *mut u8 {
343 debug_assert!(
344 (self.frame_offset + fp_offset as usize) < self.memory_size,
345 "wrong frame addr"
346 );
347
348 unsafe { self.get_heap_ptr(fp_offset as usize + self.frame_offset) }
349 }
350 pub(crate) fn read_frame_debug_slice(&self, start_offset: u32, size: u16) -> Vec<u8> {
351 let slice =
352 unsafe { slice::from_raw_parts(self.get_frame_const_ptr(start_offset), size as usize) };
353
354 slice.to_vec()
355 }
356
357 #[inline(always)]
358 #[must_use]
359 pub fn get_frame_ptr_as_i32(&self, some_addressing: u32) -> *mut i32 {
360 debug_assert_eq!(
362 some_addressing % 4,
363 0,
364 "Unaligned i32 access at offset {some_addressing}"
365 );
366 unsafe { self.get_frame_ptr(some_addressing) as *mut i32 }
369 }
370
371 #[inline(always)]
372 #[must_use]
373 pub fn get_frame_const_ptr_as_i32(&self, addressing: u32) -> *const i32 {
374 debug_assert_eq!(
376 addressing % 4,
377 0,
378 "Unaligned i32 access at offset {addressing}"
379 );
380 debug_assert!(
381 (self.frame_offset + addressing as usize) < self.memory_size,
382 "wrong frame addr"
383 );
384
385 unsafe { self.get_frame_const_ptr(addressing) as *const i32 }
386 }
387
388 #[inline(always)]
389 #[must_use]
390 pub fn get_frame_const_ptr_as_u32(&self, offset: u32) -> *const u32 {
391 let absolute_offset = self.frame_offset + offset as usize;
392 debug_assert!(
393 (self.frame_offset + offset as usize) < self.memory_size,
394 "wrong frame addr"
395 );
396
397 debug_assert_eq!(
399 absolute_offset % mem::align_of::<u32>(),
400 0,
401 "Unaligned u32 access at absolute offset {absolute_offset} (frame: {}, offset: {})",
402 self.frame_offset,
403 offset
404 );
405
406 unsafe { self.get_frame_const_ptr(offset) as *const u32 }
408 }
409
410 #[inline(always)]
411 #[must_use]
412 pub fn get_frame_const_ptr_as_u16(&self, addressing: u32) -> *const u16 {
413 let absolute_offset = self.frame_offset + addressing as usize;
414 debug_assert!(
415 (self.frame_offset + addressing as usize) < self.memory_size,
416 "wrong frame addr"
417 );
418
419 debug_assert_eq!(
421 absolute_offset % mem::align_of::<u32>(),
422 0,
423 "Unaligned u32 access at absolute offset {absolute_offset} (frame: {}, offset: {})",
424 self.frame_offset,
425 addressing
426 );
427
428 unsafe { self.get_frame_const_ptr(addressing) as *const u16 }
430 }
431
432 #[must_use]
433 pub fn read_frame_i32(&self, offset: u32) -> i32 {
434 unsafe { *(self.get_frame_const_ptr_as_i32(offset)) }
435 }
436
437 #[inline(always)]
438 #[must_use]
439 pub fn read_frame_u8(&self, offset: u32) -> u8 {
440 unsafe { *self.get_frame_const_ptr(offset) }
441 }
442
443 #[inline(always)]
444 #[must_use]
445 pub fn read_frame_bool(&self, offset: u32) -> bool {
446 unsafe { *self.get_frame_const_ptr(offset) != 0 }
447 }
448
449 #[inline(always)]
450 #[must_use]
451 pub fn read_frame_u16(&self, offset: u32) -> u16 {
452 unsafe { *self.get_frame_const_ptr_as_u16(offset) }
453 }
454
455 #[inline(always)]
456 #[must_use]
457 pub fn read_frame_u32(&self, offset: u32) -> u32 {
458 unsafe { *self.get_frame_const_ptr_as_u32(offset) }
459 }
460
461 #[inline(always)]
463 #[must_use]
464 pub const fn get_stack_const_ptr(&self, stack_offset: usize) -> *const u8 {
465 debug_assert!(stack_offset < self.memory_size, "wrong stack addr");
466 unsafe { self.memory.add(stack_offset) }
467 }
468
469 pub(crate) fn read_debug_slice(&self, start_offset: u32, size: u16) -> Vec<u8> {
470 let slice =
471 unsafe { slice::from_raw_parts(self.memory.add(start_offset as usize), size as usize) };
472
473 slice.to_vec()
474 }
475
476 #[must_use]
477 #[inline(always)]
478 pub fn read_heap_offset_via_frame(&self, frame_offset: u32) -> u32 {
479 self.read_frame_u32(frame_offset)
480 }
481
482 #[inline(always)]
483 #[must_use]
484 pub fn get_heap_ptr_via_frame(&self, frame_offset: u32) -> *mut u8 {
485 let heap_offset = self.read_frame_u32(frame_offset);
486 self.get_heap_ptr(heap_offset as usize)
487 }
488
489 #[inline(always)]
490 #[must_use]
491 pub fn get_heap_u32_ptr_via_frame(&self, frame_offset: u32) -> *mut u32 {
492 let heap_offset = self.read_frame_u32(frame_offset);
493 self.get_heap_ptr(heap_offset as usize) as *mut u32
494 }
495
496 #[inline(always)]
497 #[must_use]
498 pub fn get_heap_ptr_via_frame_with_offset(
499 &self,
500 frame_offset: u32,
501 heap_ptr_offset: u32,
502 ) -> *mut u8 {
503 let heap_offset = self.read_heap_offset_via_frame(frame_offset);
504 self.get_heap_ptr(heap_offset as usize + heap_ptr_offset as usize)
505 }
506}