1use super::core::{CallFrame, ExecutionLocation};
7use crate::{JitError, JitResult};
8use std::collections::HashMap;
9
10#[derive(Debug, Clone)]
12pub struct CallStack {
13 frames: Vec<CallFrame>,
14 max_depth: usize,
15}
16
17impl CallStack {
18 pub fn new() -> Self {
20 Self {
21 frames: Vec::new(),
22 max_depth: 1000, }
24 }
25
26 pub fn with_max_depth(max_depth: usize) -> Self {
31 Self {
32 frames: Vec::new(),
33 max_depth,
34 }
35 }
36
37 pub fn push(&mut self, frame: CallFrame) -> JitResult<()> {
45 if self.frames.len() >= self.max_depth {
46 return Err(JitError::RuntimeError(format!(
47 "Call stack overflow: maximum depth {} exceeded",
48 self.max_depth
49 )));
50 }
51 self.frames.push(frame);
52 Ok(())
53 }
54
55 pub fn pop(&mut self) -> ExecutionLocation {
60 if let Some(frame) = self.frames.pop() {
61 frame.return_location
62 } else {
63 ExecutionLocation::Completed
64 }
65 }
66
67 pub fn depth(&self) -> usize {
69 self.frames.len()
70 }
71
72 pub fn is_empty(&self) -> bool {
74 self.frames.is_empty()
75 }
76
77 pub fn current_frame(&self) -> Option<&CallFrame> {
79 self.frames.last()
80 }
81
82 pub fn current_frame_mut(&mut self) -> Option<&mut CallFrame> {
84 self.frames.last_mut()
85 }
86
87 pub fn frame_at(&self, depth: usize) -> Option<&CallFrame> {
92 if depth < self.frames.len() {
93 Some(&self.frames[self.frames.len() - 1 - depth])
94 } else {
95 None
96 }
97 }
98
99 pub fn frames(&self) -> &[CallFrame] {
101 &self.frames
102 }
103
104 pub fn clear(&mut self) {
106 self.frames.clear();
107 }
108
109 pub fn find_frames_by_function(&self, function_name: &str) -> Vec<usize> {
117 let mut depths: Vec<usize> = self
118 .frames
119 .iter()
120 .enumerate()
121 .filter_map(|(i, frame)| {
122 if frame.function_name == function_name {
123 Some(self.frames.len() - 1 - i) } else {
125 None
126 }
127 })
128 .collect();
129 depths.sort(); depths
131 }
132
133 pub fn get_summary(&self) -> CallStackSummary {
135 let function_calls: Vec<String> = self
136 .frames
137 .iter()
138 .rev() .map(|frame| frame.function_name.clone())
140 .collect();
141
142 CallStackSummary {
143 depth: self.frames.len(),
144 max_depth: self.max_depth,
145 function_calls,
146 }
147 }
148
149 pub fn set_max_depth(&mut self, max_depth: usize) {
151 self.max_depth = max_depth;
152 }
153
154 pub fn max_depth(&self) -> usize {
156 self.max_depth
157 }
158}
159
160#[derive(Debug, Clone)]
162pub struct CallStackSummary {
163 pub depth: usize,
164 pub max_depth: usize,
165 pub function_calls: Vec<String>,
166}
167
168#[derive(Debug, Clone)]
170pub struct MemoryState {
171 memory: HashMap<u64, u8>,
172 allocated_regions: HashMap<u64, MemoryRegion>,
173 next_allocation_id: u64,
174 total_allocated: usize,
175 max_memory_usage: usize,
176}
177
178#[derive(Debug, Clone)]
180pub struct MemoryRegion {
181 pub id: u64,
182 pub start_address: u64,
183 pub size: usize,
184 pub allocation_time: std::time::SystemTime,
185 pub label: Option<String>,
186}
187
188impl MemoryState {
189 pub fn new() -> Self {
191 Self {
192 memory: HashMap::new(),
193 allocated_regions: HashMap::new(),
194 next_allocation_id: 1,
195 total_allocated: 0,
196 max_memory_usage: usize::MAX,
197 }
198 }
199
200 pub fn with_limit(max_memory_usage: usize) -> Self {
205 Self {
206 memory: HashMap::new(),
207 allocated_regions: HashMap::new(),
208 next_allocation_id: 1,
209 total_allocated: 0,
210 max_memory_usage,
211 }
212 }
213
214 pub fn read_memory(&self, address: u64, size: usize) -> JitResult<Vec<u8>> {
223 let mut data = Vec::new();
224 for i in 0..size {
225 let byte = self.memory.get(&(address + i as u64)).copied().unwrap_or(0);
226 data.push(byte);
227 }
228 Ok(data)
229 }
230
231 pub fn write_memory(&mut self, address: u64, data: &[u8]) -> JitResult<()> {
237 for (i, &byte) in data.iter().enumerate() {
238 self.memory.insert(address + i as u64, byte);
239 }
240 Ok(())
241 }
242
243 pub fn allocate_region(
253 &mut self,
254 address: u64,
255 size: usize,
256 label: Option<String>,
257 ) -> JitResult<u64> {
258 if self.total_allocated + size > self.max_memory_usage {
260 return Err(JitError::RuntimeError(format!(
261 "Memory allocation would exceed limit: {} + {} > {}",
262 self.total_allocated, size, self.max_memory_usage
263 )));
264 }
265
266 for region in self.allocated_regions.values() {
268 let region_end = region.start_address + region.size as u64;
269 let new_end = address + size as u64;
270
271 if !(new_end <= region.start_address || address >= region_end) {
272 return Err(JitError::RuntimeError(format!(
273 "Memory region overlap: new region [0x{:x}, 0x{:x}) overlaps with existing region [0x{:x}, 0x{:x})",
274 address, new_end, region.start_address, region_end
275 )));
276 }
277 }
278
279 let allocation_id = self.next_allocation_id;
280 self.next_allocation_id += 1;
281
282 let region = MemoryRegion {
283 id: allocation_id,
284 start_address: address,
285 size,
286 allocation_time: std::time::SystemTime::now(),
287 label,
288 };
289
290 self.allocated_regions.insert(allocation_id, region);
291 self.total_allocated += size;
292
293 Ok(allocation_id)
294 }
295
296 pub fn deallocate_region(&mut self, allocation_id: u64) -> JitResult<()> {
301 if let Some(region) = self.allocated_regions.remove(&allocation_id) {
302 self.total_allocated -= region.size;
303
304 for i in 0..region.size {
306 self.memory.remove(&(region.start_address + i as u64));
307 }
308
309 Ok(())
310 } else {
311 Err(JitError::RuntimeError(format!(
312 "Allocation ID {} not found",
313 allocation_id
314 )))
315 }
316 }
317
318 pub fn get_allocated_regions(&self) -> Vec<&MemoryRegion> {
320 self.allocated_regions.values().collect()
321 }
322
323 pub fn find_region_containing(&self, address: u64) -> Option<&MemoryRegion> {
331 self.allocated_regions.values().find(|region| {
332 address >= region.start_address && address < region.start_address + region.size as u64
333 })
334 }
335
336 pub fn get_memory_stats(&self) -> MemoryStats {
338 MemoryStats {
339 total_allocated: self.total_allocated,
340 max_memory_usage: self.max_memory_usage,
341 allocated_regions_count: self.allocated_regions.len(),
342 memory_utilization: if self.max_memory_usage > 0 {
343 self.total_allocated as f64 / self.max_memory_usage as f64
344 } else {
345 0.0
346 },
347 }
348 }
349
350 pub fn clear(&mut self) {
352 self.memory.clear();
353 self.allocated_regions.clear();
354 self.total_allocated = 0;
355 }
356
357 pub fn read_u32(&self, address: u64) -> JitResult<u32> {
365 let bytes = self.read_memory(address, 4)?;
366 Ok(u32::from_le_bytes([bytes[0], bytes[1], bytes[2], bytes[3]]))
367 }
368
369 pub fn read_u64(&self, address: u64) -> JitResult<u64> {
371 let bytes = self.read_memory(address, 8)?;
372 Ok(u64::from_le_bytes([
373 bytes[0], bytes[1], bytes[2], bytes[3], bytes[4], bytes[5], bytes[6], bytes[7],
374 ]))
375 }
376
377 pub fn read_f32(&self, address: u64) -> JitResult<f32> {
379 let bytes = self.read_memory(address, 4)?;
380 Ok(f32::from_le_bytes([bytes[0], bytes[1], bytes[2], bytes[3]]))
381 }
382
383 pub fn read_f64(&self, address: u64) -> JitResult<f64> {
385 let bytes = self.read_memory(address, 8)?;
386 Ok(f64::from_le_bytes([
387 bytes[0], bytes[1], bytes[2], bytes[3], bytes[4], bytes[5], bytes[6], bytes[7],
388 ]))
389 }
390
391 pub fn write_u32(&mut self, address: u64, value: u32) -> JitResult<()> {
393 let bytes = value.to_le_bytes();
394 self.write_memory(address, &bytes)
395 }
396
397 pub fn write_u64(&mut self, address: u64, value: u64) -> JitResult<()> {
399 let bytes = value.to_le_bytes();
400 self.write_memory(address, &bytes)
401 }
402
403 pub fn write_f32(&mut self, address: u64, value: f32) -> JitResult<()> {
405 let bytes = value.to_le_bytes();
406 self.write_memory(address, &bytes)
407 }
408
409 pub fn write_f64(&mut self, address: u64, value: f64) -> JitResult<()> {
411 let bytes = value.to_le_bytes();
412 self.write_memory(address, &bytes)
413 }
414
415 pub fn set_memory_limit(&mut self, limit: usize) {
417 self.max_memory_usage = limit;
418 }
419
420 pub fn memory_limit(&self) -> usize {
422 self.max_memory_usage
423 }
424}
425
426#[derive(Debug, Clone)]
428pub struct MemoryStats {
429 pub total_allocated: usize,
430 pub max_memory_usage: usize,
431 pub allocated_regions_count: usize,
432 pub memory_utilization: f64,
433}
434
435impl Default for CallStack {
436 fn default() -> Self {
437 Self::new()
438 }
439}
440
441impl Default for MemoryState {
442 fn default() -> Self {
443 Self::new()
444 }
445}
446
447#[cfg(test)]
448mod tests {
449 use super::*;
450
451 #[test]
452 fn test_call_stack_operations() {
453 let mut stack = CallStack::new();
454 assert!(stack.is_empty());
455 assert_eq!(stack.depth(), 0);
456
457 let frame = CallFrame {
458 function_name: "test_function".to_string(),
459 location: ExecutionLocation::GraphNode(crate::NodeId::new(0)),
460 return_location: ExecutionLocation::GraphNode(crate::NodeId::new(1)),
461 local_variables: HashMap::new(),
462 };
463
464 stack.push(frame.clone()).unwrap();
465 assert_eq!(stack.depth(), 1);
466 assert!(!stack.is_empty());
467
468 let current = stack.current_frame().unwrap();
469 assert_eq!(current.function_name, "test_function");
470
471 let return_location = stack.pop();
472 assert_eq!(stack.depth(), 0);
473 assert!(matches!(return_location, ExecutionLocation::GraphNode(_)));
474 }
475
476 #[test]
477 fn test_call_stack_max_depth() {
478 let mut stack = CallStack::with_max_depth(2);
479
480 let frame1 = CallFrame {
481 function_name: "func1".to_string(),
482 location: ExecutionLocation::GraphNode(crate::NodeId::new(0)),
483 return_location: ExecutionLocation::Completed,
484 local_variables: HashMap::new(),
485 };
486
487 let frame2 = CallFrame {
488 function_name: "func2".to_string(),
489 location: ExecutionLocation::GraphNode(crate::NodeId::new(1)),
490 return_location: ExecutionLocation::Completed,
491 local_variables: HashMap::new(),
492 };
493
494 let frame3 = CallFrame {
495 function_name: "func3".to_string(),
496 location: ExecutionLocation::GraphNode(crate::NodeId::new(2)),
497 return_location: ExecutionLocation::Completed,
498 local_variables: HashMap::new(),
499 };
500
501 assert!(stack.push(frame1).is_ok());
502 assert!(stack.push(frame2).is_ok());
503 assert!(stack.push(frame3).is_err()); }
505
506 #[test]
507 fn test_memory_state_basic_operations() {
508 let mut memory = MemoryState::new();
509
510 let data = vec![1, 2, 3, 4, 5];
511 memory.write_memory(0x1000, &data).unwrap();
512
513 let read_data = memory.read_memory(0x1000, 5).unwrap();
514 assert_eq!(read_data, data);
515
516 let partial_data = memory.read_memory(0x1002, 2).unwrap();
517 assert_eq!(partial_data, vec![3, 4]);
518 }
519
520 #[test]
521 fn test_memory_allocation() {
522 let mut memory = MemoryState::new();
523
524 let id1 = memory
525 .allocate_region(0x1000, 100, Some("test_region".to_string()))
526 .unwrap();
527 assert_eq!(memory.get_allocated_regions().len(), 1);
528
529 let region = memory.find_region_containing(0x1050).unwrap();
530 assert_eq!(region.id, id1);
531 assert_eq!(region.size, 100);
532
533 memory.deallocate_region(id1).unwrap();
534 assert_eq!(memory.get_allocated_regions().len(), 0);
535 }
536
537 #[test]
538 fn test_memory_overlap_detection() {
539 let mut memory = MemoryState::new();
540
541 memory.allocate_region(0x1000, 100, None).unwrap();
542
543 let result = memory.allocate_region(0x1050, 100, None);
545 assert!(result.is_err());
546 }
547
548 #[test]
549 fn test_memory_typed_access() {
550 let mut memory = MemoryState::new();
551
552 memory.write_u32(0x1000, 0x12345678).unwrap();
553 memory.write_f64(0x1004, 3.14159).unwrap();
554
555 let u32_val = memory.read_u32(0x1000).unwrap();
556 assert_eq!(u32_val, 0x12345678);
557
558 let f64_val = memory.read_f64(0x1004).unwrap();
559 assert!((f64_val - 3.14159).abs() < 1e-10);
560 }
561
562 #[test]
563 fn test_memory_limit() {
564 let mut memory = MemoryState::with_limit(50);
565
566 memory.allocate_region(0x1000, 40, None).unwrap();
568
569 let result = memory.allocate_region(0x2000, 20, None);
571 assert!(result.is_err());
572 }
573
574 #[test]
575 fn test_call_stack_find_functions() {
576 let mut stack = CallStack::new();
577
578 let frame1 = CallFrame {
579 function_name: "main".to_string(),
580 location: ExecutionLocation::GraphNode(crate::NodeId::new(0)),
581 return_location: ExecutionLocation::Completed,
582 local_variables: HashMap::new(),
583 };
584
585 let frame2 = CallFrame {
586 function_name: "helper".to_string(),
587 location: ExecutionLocation::GraphNode(crate::NodeId::new(1)),
588 return_location: ExecutionLocation::Completed,
589 local_variables: HashMap::new(),
590 };
591
592 let frame3 = CallFrame {
593 function_name: "main".to_string(),
594 location: ExecutionLocation::GraphNode(crate::NodeId::new(2)),
595 return_location: ExecutionLocation::Completed,
596 local_variables: HashMap::new(),
597 };
598
599 stack.push(frame1).unwrap();
600 stack.push(frame2).unwrap();
601 stack.push(frame3).unwrap();
602
603 let main_indices = stack.find_frames_by_function("main");
604 assert_eq!(main_indices.len(), 2);
605 assert_eq!(main_indices, vec![0, 2]); let helper_indices = stack.find_frames_by_function("helper");
608 assert_eq!(helper_indices.len(), 1);
609 assert_eq!(helper_indices[0], 1);
610 }
611
612 #[test]
613 fn test_memory_stats() {
614 let mut memory = MemoryState::with_limit(1000);
615
616 memory.allocate_region(0x1000, 100, None).unwrap();
617 memory.allocate_region(0x2000, 200, None).unwrap();
618
619 let stats = memory.get_memory_stats();
620 assert_eq!(stats.total_allocated, 300);
621 assert_eq!(stats.max_memory_usage, 1000);
622 assert_eq!(stats.allocated_regions_count, 2);
623 assert_eq!(stats.memory_utilization, 0.3);
624 }
625}