swamp_vm_isa/
lib.rs

1pub mod aligner;
2pub mod opcode;
3pub mod prelude;
4
5use crate::aligner::align;
6use hashmap_mem::MapHeader;
7use std::fmt::{Alignment, Display, Formatter};
8use std::ops::{Add, Div, Sub};
9
10/// An instruction is always 9 bytes.
11#[repr(C)]
12#[derive(Clone)]
13pub struct BinaryInstruction {
14    pub opcode: u8,
15    pub operands: [u8; 8], // Do not increase the size
16}
17
18#[derive(Clone, Debug)]
19pub struct RegIndex(pub u8);
20
21impl Display for RegIndex {
22    fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
23        write!(f, "r{}", self.0)
24    }
25}
26
27impl From<MemoryAlignment> for MemoryOffset {
28    fn from(val: MemoryAlignment) -> Self {
29        let octets: usize = val.into();
30        Self(octets as u32)
31    }
32}
33
34#[derive(Debug, Copy, Eq, PartialEq, Hash, Clone, Ord, PartialOrd)]
35pub struct MemoryOffset(pub u32);
36
37impl MemoryOffset {
38    #[must_use]
39    pub const fn to_size(&self) -> MemorySize {
40        MemorySize(self.0)
41    }
42}
43
44impl Display for MemoryOffset {
45    fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
46        write!(f, "+{:X}", self.0)
47    }
48}
49
50impl MemoryOffset {
51    pub fn space(&mut self, memory_size: MemorySize, alignment: MemoryAlignment) -> Self {
52        let start = align(self.0 as usize, alignment.into()) as u32;
53        self.0 = start + memory_size.0;
54        Self(start)
55    }
56}
57
58impl Add<MemorySize> for MemoryOffset {
59    type Output = Self;
60
61    fn add(self, rhs: MemorySize) -> Self {
62        Self(self.0 + rhs.0)
63    }
64}
65
66impl Add<Self> for MemoryOffset {
67    type Output = Self;
68
69    fn add(self, rhs: Self) -> Self {
70        Self(self.0 + rhs.0)
71    }
72}
73
74impl Sub<Self> for MemoryOffset {
75    type Output = Self;
76
77    fn sub(self, rhs: Self) -> Self {
78        assert!(rhs.0 <= self.0);
79        Self(self.0 - rhs.0)
80    }
81}
82
83impl MemoryOffset {
84    #[must_use]
85    pub const fn as_size(&self) -> MemorySize {
86        MemorySize(self.0)
87    }
88}
89
90impl MemoryOffset {
91    #[must_use]
92    pub fn add(&self, size: MemorySize, alignment: MemoryAlignment) -> Self {
93        let new_start = align(self.0 as usize, alignment.into()) as u32;
94        Self(new_start + size.0)
95    }
96}
97
98#[derive(Copy, Clone, Debug)]
99pub struct MemoryAddress(pub u32);
100
101impl MemoryAddress {
102    #[must_use]
103    pub const fn space(&self, memory_size: MemorySize, _alignment: Alignment) -> Self {
104        Self(self.0 + memory_size.0)
105    }
106}
107
108#[derive(Debug, Copy, Clone)]
109pub struct HeapMemoryAddress(pub u32);
110impl Display for HeapMemoryAddress {
111    fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
112        write!(f, "${:08X}", self.0)
113    }
114}
115
116#[derive(Debug, Copy, Clone)]
117pub struct HeapMemoryRegion {
118    pub addr: HeapMemoryAddress,
119    pub size: MemorySize,
120}
121
122impl Display for HeapMemoryRegion {
123    fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
124        write!(f, "{}:{}", self.addr, self.size)
125    }
126}
127
128#[derive(Debug, Copy, Clone, PartialOrd, Ord, Eq, PartialEq)]
129pub struct MemorySize(pub u32);
130
131impl Display for MemorySize {
132    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
133        let bytes = f64::from(self.0);
134
135        if bytes < 1024.0 {
136            write!(f, "{bytes} B")
137        } else if bytes < 1024.0 * 1024.0 {
138            write!(f, "{:.2} KiB", bytes / 1024.0)
139        } else if bytes < 1024.0 * 1024.0 * 1024.0 {
140            write!(f, "{:.2} MiB", bytes / (1024.0 * 1024.0))
141        } else {
142            write!(f, "{:.2} GiB", bytes / (1024.0 * 1024.0 * 1024.0))
143        }
144    }
145}
146
147#[derive(Copy, Clone)]
148pub struct CountU32(pub u32);
149
150impl From<MemorySize> for usize {
151    fn from(val: MemorySize) -> Self {
152        val.0 as Self
153    }
154}
155
156impl Div<Self> for MemorySize {
157    type Output = CountU32;
158
159    fn div(self, rhs: Self) -> Self::Output {
160        assert!(rhs.0 > 0, "Division by zero in MemorySize");
161        assert!(
162            self.0 > 0,
163            "Numerator must be positive in MemorySize division"
164        );
165        assert_eq!(
166            self.0 % rhs.0,
167            0,
168            "MemorySize division must be exact and positive"
169        );
170
171        CountU32(self.0 / rhs.0)
172    }
173}
174
175#[derive(Debug, Copy, Clone, Eq, PartialEq, Hash)]
176pub struct InstructionPosition(pub u32);
177
178impl Add<ProgramCounterDelta> for InstructionPosition {
179    type Output = Self;
180
181    fn add(self, rhs: ProgramCounterDelta) -> Self::Output {
182        Self(((self.0 as i32) + i32::from(rhs.0)) as u32)
183    }
184}
185
186impl Sub<Self> for InstructionPosition {
187    type Output = ProgramCounterDelta;
188
189    fn sub(self, rhs: Self) -> Self::Output {
190        assert!(self.0 >= rhs.0);
191
192        ProgramCounterDelta(((self.0 as i32) - (rhs.0 as i32)) as i16)
193    }
194}
195
196#[derive(Debug, Copy, Clone, Eq, PartialEq, Hash)]
197pub struct ProgramCounterDelta(pub i16);
198
199#[derive(Debug, Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)]
200pub enum MemoryAlignment {
201    // Do not change the order.
202    U8,
203    U16,
204    U32,
205    U64,
206}
207
208impl MemoryAlignment {
209    #[must_use]
210    const fn rank(&self) -> usize {
211        match self {
212            Self::U8 => 1,
213            Self::U16 => 2,
214            Self::U32 => 3,
215            Self::U64 => 4,
216        }
217    }
218    #[must_use]
219    pub const fn greater_than(&self, other: Self) -> bool {
220        self.rank() > other.rank()
221    }
222}
223
224impl From<MemoryAlignment> for usize {
225    fn from(val: MemoryAlignment) -> Self {
226        match val {
227            MemoryAlignment::U8 => 1,
228            MemoryAlignment::U16 => 2,
229            MemoryAlignment::U32 => 4,
230            MemoryAlignment::U64 => 8,
231        }
232    }
233}
234
235impl From<MemoryAlignment> for u8 {
236    fn from(val: MemoryAlignment) -> Self {
237        match val {
238            MemoryAlignment::U8 => 1,
239            MemoryAlignment::U16 => 2,
240            MemoryAlignment::U32 => 4,
241            MemoryAlignment::U64 => 8,
242        }
243    }
244}
245
246impl TryInto<MemoryAlignment> for usize {
247    type Error = ();
248
249    fn try_into(self) -> Result<MemoryAlignment, Self::Error> {
250        let converted = match self {
251            1 => MemoryAlignment::U8,
252            2 => MemoryAlignment::U16,
253            4 => MemoryAlignment::U32,
254            8 => MemoryAlignment::U64,
255
256            _ => return Err(()),
257        };
258        Ok(converted)
259    }
260}
261
262#[derive(Copy, Clone, Debug)]
263pub struct FrameMemorySize(pub u32);
264
265impl Display for FrameMemorySize {
266    fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
267        write!(f, "frame size: {:04X}", self.0)
268    }
269}
270
271impl FrameMemorySize {
272    #[must_use]
273    pub const fn add(&self, inc: MemorySize) -> Self {
274        Self(self.0 + inc.0)
275    }
276}
277
278pub const INT_SIZE: u16 = 4;
279pub const FLOAT_SIZE: u16 = 4;
280pub const BOOL_SIZE: u16 = 1;
281
282pub const PTR_SIZE: MemorySize = MemorySize(4);
283pub const PTR_ALIGNMENT: MemoryAlignment = MemoryAlignment::U32;
284
285pub const HEAP_PTR_ON_FRAME_SIZE: MemorySize = MemorySize(4);
286pub const HEAP_PTR_ON_FRAME_ALIGNMENT: MemoryAlignment = MemoryAlignment::U32;
287
288pub const REG_ON_FRAME_SIZE: MemorySize = MemorySize(4);
289pub const REG_ON_FRAME_ALIGNMENT: MemoryAlignment = MemoryAlignment::U32;
290
291pub const COLLECTION_CAPACITY_OFFSET: MemoryOffset = MemoryOffset(0); // Capacity should always be first
292pub const COLLECTION_ELEMENT_COUNT_OFFSET: MemoryOffset = MemoryOffset(2); // Element count should always be second
293
294#[repr(C)]
295#[derive(Copy, Clone, Debug)]
296pub struct VecHeader {
297    /// Do not change the order of the fields!
298    ///
299    /// Keep the capacity field at the start of the header for consistency across all
300    /// container types. Placing it first simplifies copy operations: we can verify
301    /// and preserve capacity before copying the remainder of the header in one contiguous operation.
302    pub capacity: u16,
303
304    /// Number of live (active) elements currently stored in the collection.
305    ///
306    /// Always located at offset 2, enabling:
307    /// - **Logical size**: Represents the number of valid elements in use.
308    /// - **Bounds checking**: Index and assignment checks (`0 <= idx < element_count`)
309    ///   can load this field in a single instruction.
310    /// - **Iteration**: Iterators read this field to determine the end of the collection.
311    /// - **ABI stability**: External tools, debuggers, and serializers can consistently locate
312    ///   `capacity` and `element_count` across all container types.
313    pub element_count: u16,
314    pub element_size: u32,
315    pub padding: u32,
316}
317
318pub const VEC_HEADER_SIZE: MemorySize = MemorySize(size_of::<VecHeader>() as u32);
319
320pub const VEC_HEADER_ELEMENT_COUNT_OFFSET: MemoryOffset = MemoryOffset(2);
321pub const VEC_HEADER_PAYLOAD_OFFSET: MemoryOffset = MemoryOffset(size_of::<VecHeader>() as u32);
322pub const VEC_HEADER_ALIGNMENT: MemoryAlignment = MemoryAlignment::U16;
323pub const VEC_HEADER_MAGIC_CODE: u32 = 0xC001C0DE;
324
325pub const VEC_PTR_SIZE: MemorySize = HEAP_PTR_ON_FRAME_SIZE;
326pub const VEC_PTR_ALIGNMENT: MemoryAlignment = HEAP_PTR_ON_FRAME_ALIGNMENT;
327
328#[repr(C)]
329pub struct VecIterator {
330    pub vec_header_heap_ptr: u32,
331    pub index: u16,
332}
333
334pub const VEC_ITERATOR_SIZE: MemorySize = MemorySize(size_of::<VecIterator>() as u32);
335pub const VEC_ITERATOR_ALIGNMENT: MemoryAlignment = MemoryAlignment::U32;
336
337#[repr(C)]
338#[derive(Copy, Clone, Debug)]
339pub struct AnyHeader {
340    /// VM heap pointer to the actual data
341    pub data_ptr: u32,
342
343    /// Size in bytes of the pointed-to data
344    pub size: u32,
345
346    /// Universal hash of the type
347    pub type_hash: u32,
348}
349
350pub const ANY_HEADER_SIZE: MemorySize = MemorySize(size_of::<AnyHeader>() as u32);
351
352pub const ANY_HEADER_PTR_OFFSET: MemoryOffset = MemoryOffset(0);
353pub const ANY_HEADER_SIZE_OFFSET: MemoryOffset = MemoryOffset(4);
354pub const ANY_HEADER_HASH_OFFSET: MemoryOffset = MemoryOffset(8);
355pub const ANY_HEADER_ALIGNMENT: MemoryAlignment = MemoryAlignment::U32;
356
357#[repr(C)]
358pub struct StringIterator {
359    pub string_heap_ptr: u32,
360    pub byte_index: u16,
361    pub index: u32, // how many times we iterated
362}
363
364pub const STRING_ITERATOR_SIZE: MemorySize = MemorySize(size_of::<StringIterator>() as u32);
365pub const STRING_ITERATOR_ALIGNMENT: MemoryAlignment = MemoryAlignment::U32;
366
367#[repr(C)]
368pub struct SparseIterator {
369    pub sparse_header_heap_ptr: u32,
370    pub index: u16,
371}
372
373pub const SPARSE_ITERATOR_SIZE: MemorySize = MemorySize(size_of::<SparseIterator>() as u32);
374pub const SPARSE_ITERATOR_ALIGNMENT: MemoryAlignment = MemoryAlignment::U32;
375
376#[repr(C)]
377#[derive(Debug)]
378pub struct RangeIterator {
379    pub index: i32,
380    pub end: i32,
381    pub direction: i32,
382}
383
384pub const RANGE_ITERATOR_SIZE: MemorySize = MemorySize(size_of::<RangeIterator>() as u32);
385pub const RANGE_ITERATOR_ALIGNMENT: MemoryAlignment = MemoryAlignment::U32;
386
387#[repr(C)]
388#[derive(Copy, Clone, Debug)]
389pub struct RangeHeader {
390    // Do not change! These must match the structure in Swamp core exactly
391    pub min: i32,
392    pub max: i32,
393    pub inclusive: bool,
394}
395pub const RANGE_HEADER_SIZE: MemorySize = MemorySize(size_of::<RangeHeader>() as u32);
396pub const RANGE_HEADER_ALIGNMENT: MemoryAlignment = MemoryAlignment::U32;
397
398#[repr(C)]
399#[derive(Copy, Clone)]
400pub struct GridHeader {
401    /// Do not change the order of the fields!
402    ///
403    /// Keep the capacity field at the start of the header for consistency across all
404    /// container types. Placing it first simplifies copy operations: we can verify
405    /// and preserve capacity before copying the remainder of the header in one contiguous operation.
406    pub capacity: u16,
407
408    /// Number of live (active) elements currently stored in the collection.
409    ///
410    /// Always located at offset 2, enabling:
411    /// - **Logical size**: Represents the number of valid elements in use.
412    /// - **Bounds checking**: Index and assignment checks (`0 <= idx < element_count`)
413    ///   can load this field in a single instruction.
414    /// - **Iteration**: Iterators read this field to determine the end of the collection.
415    /// - **ABI stability**: External tools, debuggers, and serializers can consistently locate
416    ///   `capacity` and `element_count` across all container types.
417    pub element_count: u16, // Always same as capacity
418
419    pub element_size: u32,
420
421    pub width: u16,
422    pub height: u16,
423    pub padding: u32,
424}
425
426pub const GRID_HEADER_SIZE: MemorySize = MemorySize(size_of::<GridHeader>() as u32);
427pub const GRID_HEADER_ALIGNMENT: MemoryAlignment = MemoryAlignment::U32;
428pub const GRID_HEADER_WIDTH_OFFSET: MemoryOffset = MemoryOffset(8);
429pub const GRID_HEADER_HEIGHT_OFFSET: MemoryOffset = MemoryOffset(10);
430pub const GRID_HEADER_PAYLOAD_OFFSET: MemoryOffset = MemoryOffset(size_of::<GridHeader>() as u32);
431
432pub const GRID_SECRET_CODE: u32 = 0x00_C0FFEE;
433
434// NOTE: Must align to U32, therefor the padding at the end
435
436pub const MAP_HEADER_SIZE: MemorySize = MemorySize(size_of::<MapHeader>() as u32);
437pub const MAP_HEADER_ALIGNMENT: MemoryAlignment = MemoryAlignment::U32;
438pub const MAP_HEADER_KEY_SIZE_OFFSET: MemoryOffset = MemoryOffset(4);
439pub const MAP_HEADER_TUPLE_SIZE_OFFSET: MemoryOffset = MemoryOffset(6);
440pub const MAP_HEADER_LOGICAL_LIMIT_OFFSET: MemoryOffset = MemoryOffset(8);
441pub const MAP_BUCKETS_OFFSET: MemoryOffset = MemoryOffset(MAP_HEADER_SIZE.0);
442
443#[repr(C)]
444pub struct MapIterator {
445    pub map_header_frame_offset: u32,
446    pub index: u32,
447}
448
449pub const MAP_ITERATOR_SIZE: MemorySize = MemorySize(size_of::<MapIterator>() as u32);
450pub const MAP_ITERATOR_ALIGNMENT: MemoryAlignment = MemoryAlignment::U32;
451
452pub const MAX_STRING_LEN: u16 = 16 * 1024;
453
454pub const STRING_PTR_SIZE: MemorySize = HEAP_PTR_ON_FRAME_SIZE;
455pub const STRING_PTR_ALIGNMENT: MemoryAlignment = HEAP_PTR_ON_FRAME_ALIGNMENT;