gcrecomp_core/runtime/memory.rs
1//! Memory Manager
2//!
3//! This module provides memory management for the GameCube recompiler runtime.
4//! It handles address translation, memory reads/writes, and bulk operations.
5//!
6//! # Memory Map
7//! GameCube uses a flat memory model with the following regions:
8//! - **0x80000000 - 0x817FFFFF**: Main RAM (24MB)
9//! - **0xCC000000 - 0xCFFFFFFF**: ARAM (16MB audio RAM)
10//! - **0x80000000 - 0x807FFFFF**: Locked cache (8MB, overlaps with main RAM)
11//!
12//! # Memory Optimizations
13//! - All hot-path functions use `#[inline(always)]` for address translation
14//! - Read/write functions use `#[inline]` for performance
15//! - Explicit type annotations to reduce compiler inference overhead
16//! - Bulk operations use optimized copy_from_slice for non-overlapping ranges
17//!
18//! # Address Translation
19//! GameCube uses physical addresses directly. Main RAM is mapped at 0x80000000,
20//! so we subtract this base address to get the RAM offset.
21
22use anyhow::{Context, Result};
23
24/// Memory manager for GameCube memory operations.
25///
26/// # Memory Layout
27/// - `ram`: 24MB byte array (heap allocation required for large size)
28///
29/// # Address Translation
30/// GameCube main RAM is mapped to virtual addresses 0x80000000-0x817FFFFF.
31/// Physical addresses are computed by subtracting the base address (0x80000000).
32#[derive(Debug)]
33pub struct MemoryManager {
34 /// Main RAM (24MB)
35 ram: Vec<u8>,
36}
37
38impl MemoryManager {
39 /// Create a new memory manager with 24MB of RAM.
40 ///
41 /// # Returns
42 /// `MemoryManager` - Initialized memory manager with all bytes set to 0
43 ///
44 /// # Examples
45 /// ```rust
46 /// let mut memory = MemoryManager::new();
47 /// ```
48 #[inline] // Constructor - simple, may be inlined
49 pub fn new() -> Self {
50 // 24MB RAM model
51 const RAM_SIZE: usize = 24usize * 1024usize * 1024usize; // 24MB
52 Self {
53 ram: vec![0u8; RAM_SIZE],
54 }
55 }
56
57 /// Translate a virtual address to a physical RAM offset.
58 ///
59 /// # Algorithm
60 /// GameCube uses a flat memory model with physical addresses.
61 /// Main RAM is at 0x80000000 - 0x817FFFFF (24MB).
62 /// Physical offset = virtual_address - 0x80000000
63 ///
64 /// # Arguments
65 /// * `address` - 32-bit virtual address
66 ///
67 /// # Returns
68 /// `Option<usize>` - Physical RAM offset if address is in main RAM, None otherwise
69 ///
70 /// # Examples
71 /// ```rust
72 /// let offset = memory.translate_address(0x80000000);
73 /// assert_eq!(offset, Some(0));
74 /// ```
75 #[inline(always)] // Hot path - always inline for performance
76 fn translate_address(&self, address: u32) -> Option<usize> {
77 // GameCube uses a flat memory model with physical addresses
78 // Main RAM is at 0x80000000 - 0x817FFFFF
79 if address >= 0x80000000u32 && address < 0x81800000u32 {
80 Some((address.wrapping_sub(0x80000000u32)) as usize)
81 } else {
82 None
83 }
84 }
85
86 /// Read a single byte from memory.
87 ///
88 /// # Arguments
89 /// * `address` - 32-bit virtual address
90 ///
91 /// # Returns
92 /// `Result<u8>` - Byte value at address, or error if invalid/out of bounds
93 ///
94 /// # Errors
95 /// Returns error if address is not in main RAM or out of bounds
96 ///
97 /// # Examples
98 /// ```rust
99 /// let value = memory.read_u8(0x80000000)?;
100 /// ```
101 #[inline] // Hot path - may be inlined
102 pub fn read_u8(&self, address: u32) -> Result<u8> {
103 let offset: usize = self
104 .translate_address(address)
105 .context("Invalid memory address")?;
106 Ok(self.ram[offset])
107 }
108
109 /// Read a 16-bit word (big-endian) from memory.
110 ///
111 /// # Arguments
112 /// * `address` - 32-bit virtual address (must be aligned, but we don't enforce it)
113 ///
114 /// # Returns
115 /// `Result<u16>` - 16-bit value at address, or error if invalid/out of bounds
116 ///
117 /// # Errors
118 /// Returns error if address+1 is out of bounds
119 ///
120 /// # Examples
121 /// ```rust
122 /// let value = memory.read_u16(0x80000000)?;
123 /// ```
124 #[inline] // Hot path - may be inlined
125 pub fn read_u16(&self, address: u32) -> Result<u16> {
126 let offset: usize = self
127 .translate_address(address)
128 .context("Invalid memory address")?;
129 if offset.wrapping_add(1usize) >= self.ram.len() {
130 anyhow::bail!("Memory read out of bounds");
131 }
132 let bytes: [u8; 2] = [self.ram[offset], self.ram[offset.wrapping_add(1usize)]];
133 Ok(u16::from_be_bytes(bytes))
134 }
135
136 /// Read a 32-bit word (big-endian) from memory.
137 ///
138 /// # Arguments
139 /// * `address` - 32-bit virtual address (must be aligned, but we don't enforce it)
140 ///
141 /// # Returns
142 /// `Result<u32>` - 32-bit value at address, or error if invalid/out of bounds
143 ///
144 /// # Errors
145 /// Returns error if address+3 is out of bounds
146 ///
147 /// # Examples
148 /// ```rust
149 /// let value = memory.read_u32(0x80000000)?;
150 /// ```
151 #[inline] // Hot path - may be inlined
152 pub fn read_u32(&self, address: u32) -> Result<u32> {
153 let offset: usize = self
154 .translate_address(address)
155 .context("Invalid memory address")?;
156 if offset.wrapping_add(3usize) >= self.ram.len() {
157 anyhow::bail!("Memory read out of bounds");
158 }
159 let bytes: [u8; 4] = [
160 self.ram[offset],
161 self.ram[offset.wrapping_add(1usize)],
162 self.ram[offset.wrapping_add(2usize)],
163 self.ram[offset.wrapping_add(3usize)],
164 ];
165 Ok(u32::from_be_bytes(bytes))
166 }
167
168 /// Read a 64-bit word (big-endian) from memory.
169 ///
170 /// # Arguments
171 /// * `address` - 32-bit virtual address (must be aligned, but we don't enforce it)
172 ///
173 /// # Returns
174 /// `Result<u64>` - 64-bit value at address, or error if invalid/out of bounds
175 ///
176 /// # Errors
177 /// Returns error if address+7 is out of bounds
178 ///
179 /// # Examples
180 /// ```rust
181 /// let value = memory.read_u64(0x80000000)?;
182 /// ```
183 #[inline] // Hot path - may be inlined
184 pub fn read_u64(&self, address: u32) -> Result<u64> {
185 let offset: usize = self
186 .translate_address(address)
187 .context("Invalid memory address")?;
188 if offset.wrapping_add(7usize) >= self.ram.len() {
189 anyhow::bail!("Memory read out of bounds");
190 }
191 let bytes: [u8; 8] = [
192 self.ram[offset],
193 self.ram[offset.wrapping_add(1usize)],
194 self.ram[offset.wrapping_add(2usize)],
195 self.ram[offset.wrapping_add(3usize)],
196 self.ram[offset.wrapping_add(4usize)],
197 self.ram[offset.wrapping_add(5usize)],
198 self.ram[offset.wrapping_add(6usize)],
199 self.ram[offset.wrapping_add(7usize)],
200 ];
201 Ok(u64::from_be_bytes(bytes))
202 }
203
204 /// Write a single byte to memory.
205 ///
206 /// # Arguments
207 /// * `address` - 32-bit virtual address
208 /// * `value` - Byte value to write
209 ///
210 /// # Returns
211 /// `Result<()>` - Success, or error if invalid/out of bounds
212 ///
213 /// # Errors
214 /// Returns error if address is not in main RAM or out of bounds
215 ///
216 /// # Examples
217 /// ```rust
218 /// memory.write_u8(0x80000000, 0x42)?;
219 /// ```
220 #[inline] // Hot path - may be inlined
221 pub fn write_u8(&mut self, address: u32, value: u8) -> Result<()> {
222 let offset: usize = self
223 .translate_address(address)
224 .context("Invalid memory address")?;
225 self.ram[offset] = value;
226 Ok(())
227 }
228
229 /// Write a 16-bit word (big-endian) to memory.
230 ///
231 /// # Arguments
232 /// * `address` - 32-bit virtual address (must be aligned, but we don't enforce it)
233 /// * `value` - 16-bit value to write
234 ///
235 /// # Returns
236 /// `Result<()>` - Success, or error if invalid/out of bounds
237 ///
238 /// # Errors
239 /// Returns error if address+1 is out of bounds
240 ///
241 /// # Examples
242 /// ```rust
243 /// memory.write_u16(0x80000000, 0x1234)?;
244 /// ```
245 #[inline] // Hot path - may be inlined
246 pub fn write_u16(&mut self, address: u32, value: u16) -> Result<()> {
247 let offset: usize = self
248 .translate_address(address)
249 .context("Invalid memory address")?;
250 if offset.wrapping_add(1usize) >= self.ram.len() {
251 anyhow::bail!("Memory write out of bounds");
252 }
253 let bytes: [u8; 2] = value.to_be_bytes();
254 self.ram[offset] = bytes[0];
255 self.ram[offset.wrapping_add(1usize)] = bytes[1];
256 Ok(())
257 }
258
259 /// Write a 32-bit word (big-endian) to memory.
260 ///
261 /// # Arguments
262 /// * `address` - 32-bit virtual address (must be aligned, but we don't enforce it)
263 /// * `value` - 32-bit value to write
264 ///
265 /// # Returns
266 /// `Result<()>` - Success, or error if invalid/out of bounds
267 ///
268 /// # Errors
269 /// Returns error if address+3 is out of bounds
270 ///
271 /// # Examples
272 /// ```rust
273 /// memory.write_u32(0x80000000, 0x12345678)?;
274 /// ```
275 #[inline] // Hot path - may be inlined
276 pub fn write_u32(&mut self, address: u32, value: u32) -> Result<()> {
277 let offset: usize = self
278 .translate_address(address)
279 .context("Invalid memory address")?;
280 if offset.wrapping_add(3usize) >= self.ram.len() {
281 anyhow::bail!("Memory write out of bounds");
282 }
283 let bytes: [u8; 4] = value.to_be_bytes();
284 self.ram[offset] = bytes[0];
285 self.ram[offset.wrapping_add(1usize)] = bytes[1];
286 self.ram[offset.wrapping_add(2usize)] = bytes[2];
287 self.ram[offset.wrapping_add(3usize)] = bytes[3];
288 Ok(())
289 }
290
291 /// Write a 64-bit word (big-endian) to memory.
292 ///
293 /// # Arguments
294 /// * `address` - 32-bit virtual address (must be aligned, but we don't enforce it)
295 /// * `value` - 64-bit value to write
296 ///
297 /// # Returns
298 /// `Result<()>` - Success, or error if invalid/out of bounds
299 ///
300 /// # Errors
301 /// Returns error if address+7 is out of bounds
302 ///
303 /// # Examples
304 /// ```rust
305 /// memory.write_u64(0x80000000, 0x1234567890ABCDEF)?;
306 /// ```
307 #[inline] // Hot path - may be inlined
308 pub fn write_u64(&mut self, address: u32, value: u64) -> Result<()> {
309 let offset: usize = self
310 .translate_address(address)
311 .context("Invalid memory address")?;
312 if offset.wrapping_add(7usize) >= self.ram.len() {
313 anyhow::bail!("Memory write out of bounds");
314 }
315 let bytes: [u8; 8] = value.to_be_bytes();
316 for (i, byte) in bytes.iter().enumerate() {
317 self.ram[offset.wrapping_add(i)] = *byte;
318 }
319 Ok(())
320 }
321
322 /// Read multiple bytes from memory.
323 ///
324 /// # Arguments
325 /// * `address` - 32-bit virtual address
326 /// * `len` - Number of bytes to read
327 ///
328 /// # Returns
329 /// `Result<Vec<u8>>` - Byte vector, or error if invalid/out of bounds
330 ///
331 /// # Errors
332 /// Returns error if address+len is out of bounds
333 ///
334 /// # Examples
335 /// ```rust
336 /// let data = memory.read_bytes(0x80000000, 1024)?;
337 /// ```
338 #[inline] // May be inlined for small lengths
339 pub fn read_bytes(&self, address: u32, len: usize) -> Result<Vec<u8>> {
340 let offset: usize = self
341 .translate_address(address)
342 .context("Invalid memory address")?;
343 if offset.wrapping_add(len) > self.ram.len() {
344 anyhow::bail!("Memory read out of bounds");
345 }
346 Ok(self.ram[offset..offset.wrapping_add(len)].to_vec())
347 }
348
349 /// Write multiple bytes to memory.
350 ///
351 /// # Arguments
352 /// * `address` - 32-bit virtual address
353 /// * `data` - Byte slice to write
354 ///
355 /// # Returns
356 /// `Result<()>` - Success, or error if invalid/out of bounds
357 ///
358 /// # Errors
359 /// Returns error if address+data.len() is out of bounds
360 ///
361 /// # Examples
362 /// ```rust
363 /// memory.write_bytes(0x80000000, &[0x42, 0x43, 0x44])?;
364 /// ```
365 #[inline] // May be inlined for small lengths
366 pub fn write_bytes(&mut self, address: u32, data: &[u8]) -> Result<()> {
367 let offset: usize = self
368 .translate_address(address)
369 .context("Invalid memory address")?;
370 if offset.wrapping_add(data.len()) > self.ram.len() {
371 anyhow::bail!("Memory write out of bounds");
372 }
373 self.ram[offset..offset.wrapping_add(data.len())].copy_from_slice(data);
374 Ok(())
375 }
376
377 /// Load a section of data into memory (convenience wrapper for write_bytes).
378 ///
379 /// # Arguments
380 /// * `address` - 32-bit virtual address
381 /// * `data` - Byte slice to write
382 ///
383 /// # Returns
384 /// `Result<()>` - Success, or error if invalid/out of bounds
385 ///
386 /// # Examples
387 /// ```rust
388 /// memory.load_section(0x80000000, §ion_data)?;
389 /// ```
390 #[inline] // Simple wrapper - may be inlined
391 pub fn load_section(&mut self, address: u32, data: &[u8]) -> Result<()> {
392 self.write_bytes(address, data)
393 }
394
395 /// Optimized bulk memory copy.
396 ///
397 /// # Algorithm
398 /// Copies `len` bytes from `src` to `dest`. Uses optimized `copy_from_slice`
399 /// for non-overlapping ranges. For overlapping ranges, uses temporary buffer
400 /// to ensure correct copy semantics.
401 ///
402 /// # Arguments
403 /// * `dest` - Destination address
404 /// * `src` - Source address
405 /// * `len` - Number of bytes to copy
406 ///
407 /// # Returns
408 /// `Result<()>` - Success, or error if invalid/out of bounds
409 ///
410 /// # Errors
411 /// Returns error if either address is invalid or copy would go out of bounds
412 ///
413 /// # Examples
414 /// ```rust
415 /// memory.bulk_copy(0x80001000, 0x80000000, 1024)?;
416 /// ```
417 #[inline] // May be inlined for small lengths
418 pub fn bulk_copy(&mut self, dest: u32, src: u32, len: usize) -> Result<()> {
419 let dest_offset: usize = self.translate_address(dest)
420 .context("Invalid destination address")?;
421 let src_offset: usize = self.translate_address(src)
422 .context("Invalid source address")?;
423
424 if dest_offset.wrapping_add(len) > self.ram.len() || src_offset.wrapping_add(len) > self.ram.len() {
425 anyhow::bail!("Bulk copy out of bounds");
426 }
427
428 // Always use temporary buffer to avoid borrow checker issues with overlapping slices
429 let temp: Vec<u8> = self.ram[src_offset..src_offset.wrapping_add(len)].to_vec();
430 self.ram[dest_offset..dest_offset.wrapping_add(len)].copy_from_slice(&temp);
431
432 Ok(())
433 }
434
435 /// Get a read-only slice of memory.
436 ///
437 /// # Safety
438 /// This function is safe but returns a reference to internal memory.
439 /// The caller must ensure the slice is not used after the MemoryManager is dropped.
440 ///
441 /// # Arguments
442 /// * `address` - 32-bit virtual address
443 /// * `len` - Length of slice
444 ///
445 /// # Returns
446 /// `Result<&[u8]>` - Byte slice, or error if invalid/out of bounds
447 ///
448 /// # Errors
449 /// Returns error if address+len is out of bounds
450 ///
451 /// # Examples
452 /// ```rust
453 /// let slice = memory.get_slice(0x80000000, 1024)?;
454 /// ```
455 #[inline] // May be inlined for small lengths
456 pub fn get_slice(&self, address: u32, len: usize) -> Result<&[u8]> {
457 let offset: usize = self.translate_address(address)
458 .context("Invalid memory address")?;
459 if offset.wrapping_add(len) > self.ram.len() {
460 anyhow::bail!("Memory slice out of bounds");
461 }
462 Ok(&self.ram[offset..offset.wrapping_add(len)])
463 }
464}
465
466impl Default for MemoryManager {
467 #[inline] // Simple default implementation
468 fn default() -> Self {
469 Self::new()
470 }
471}