swamp_vm_types/
lib.rs

1/*
2 * Copyright (c) Peter Bjorklund. All rights reserved. https://github.com/swamp/swamp
3 * Licensed under the MIT License. See LICENSE in the project root for license information.
4 */
5use crate::aligner::align;
6use crate::types::BasicTypeRef;
7use crate::types::{TypedRegister, VmType};
8use hashmap_mem::MapHeader;
9use source_map_node::Node;
10use std::cmp::PartialOrd;
11use std::fmt::{Alignment, Debug, Display, Formatter};
12use std::ops::{Add, Div, Sub};
13
14pub mod aligner;
15pub mod opcode;
16pub mod prelude;
17pub mod types;
18
19/// An instruction is always 9 bytes.
20#[repr(C)]
21#[derive(Clone)]
22pub struct BinaryInstruction {
23    pub opcode: u8,
24    pub operands: [u8; 8], // Do not increase the size
25}
26
27#[derive(Clone, Debug)]
28pub struct RegIndex(pub u8);
29
30impl Display for RegIndex {
31    fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
32        write!(f, "r{}", self.0)
33    }
34}
35
36#[derive(Copy, Clone, Debug)]
37pub struct MemoryAddress(pub u32);
38
39#[derive(Copy, Clone)]
40pub struct StackMemoryAddress(pub u32);
41
42impl Add<MemorySize> for StackMemoryAddress {
43    type Output = Self;
44
45    fn add(self, rhs: MemorySize) -> Self::Output {
46        Self(self.0 + rhs.0)
47    }
48}
49
50impl Add<MemoryOffset> for StackMemoryAddress {
51    type Output = Self;
52
53    fn add(self, rhs: MemoryOffset) -> Self::Output {
54        Self(self.0 + rhs.0)
55    }
56}
57
58#[derive(Debug, Copy, Clone)]
59pub struct CountU16(pub u16);
60
61impl Display for CountU16 {
62    fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
63        write!(f, "{}", self.0)
64    }
65}
66
67#[derive(Copy, Clone)]
68pub struct CountU32(pub u32);
69
70impl StackMemoryAddress {
71    #[must_use]
72    pub const fn add(&self, memory_size: MemorySize) -> Self {
73        Self(self.0 + memory_size.0)
74    }
75}
76
77#[derive(Debug, Copy, Clone)]
78pub struct HeapMemoryAddress(pub u32);
79impl Display for HeapMemoryAddress {
80    fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
81        write!(f, "${:08X}", self.0)
82    }
83}
84
85#[derive(Debug, Copy, Clone)]
86pub struct HeapMemoryRegion {
87    pub addr: HeapMemoryAddress,
88    pub size: MemorySize,
89}
90
91impl Display for HeapMemoryRegion {
92    fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
93        write!(f, "{}:{}", self.addr, self.size)
94    }
95}
96
97#[derive(Debug, Copy, Clone, Eq, PartialEq)]
98pub struct FrameMemoryAddress(pub u32);
99
100impl Display for FrameMemoryAddress {
101    fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
102        write!(f, "${:04X}", self.0)
103    }
104}
105
106impl Add<MemoryOffset> for FrameMemoryAddress {
107    type Output = Self;
108
109    fn add(self, rhs: MemoryOffset) -> Self::Output {
110        Self(self.0 + rhs.0)
111    }
112}
113
114#[derive(Debug, Copy, Clone)]
115pub struct FrameMemoryRegion {
116    pub addr: FrameMemoryAddress,
117    pub size: MemorySize,
118}
119
120impl Display for FrameMemoryRegion {
121    fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
122        write!(f, "{}:{}", self.addr, self.size)
123    }
124}
125
126impl Default for FrameMemoryRegion {
127    fn default() -> Self {
128        Self {
129            addr: FrameMemoryAddress(0),
130            size: MemorySize(0),
131        }
132    }
133}
134
135impl FrameMemoryRegion {
136    #[must_use]
137    pub const fn new(frame_addr: FrameMemoryAddress, size: MemorySize) -> Self {
138        Self {
139            addr: frame_addr,
140            size,
141        }
142    }
143
144    #[must_use]
145    pub fn last_valid_end_addr(&self) -> FrameMemoryAddress {
146        self.addr.add(MemoryOffset(self.size.0))
147    }
148}
149
150impl FrameMemoryRegion {
151    #[must_use]
152    pub const fn addr(&self) -> FrameMemoryAddress {
153        self.addr
154    }
155}
156
157#[derive(Debug, Copy, Clone)]
158pub struct FrameMemoryAddressIndirectPointer(pub FrameMemoryAddress);
159
160#[derive(Debug, Copy, Clone)]
161pub struct TempFrameMemoryAddress(pub FrameMemoryAddress);
162
163impl TempFrameMemoryAddress {
164    #[must_use]
165    pub const fn to_addr(&self) -> FrameMemoryAddress {
166        self.0
167    }
168}
169
170impl FrameMemoryAddress {
171    #[must_use]
172    pub const fn advance(&self, memory_offset: MemoryOffset) -> Self {
173        Self(self.0 + memory_offset.0)
174    }
175}
176
177/// relative to the frame pointer
178impl FrameMemoryAddress {
179    #[must_use]
180    pub const fn add(&self, memory_size: MemorySize) -> Self {
181        Self(self.0 + memory_size.0)
182    }
183
184    #[must_use]
185    pub const fn add_offset(&self, memory_offset: MemoryOffset) -> Self {
186        Self(self.0 + memory_offset.0)
187    }
188    #[must_use]
189    pub const fn as_size(&self) -> FrameMemorySize {
190        FrameMemorySize(self.0)
191    }
192}
193
194#[must_use]
195pub fn align_to(addr: MemoryOffset, alignment: MemoryAlignment) -> MemoryOffset {
196    MemoryOffset(align(addr.0 as usize, alignment.into()) as u32)
197}
198
199/// # Arguments
200/// * `offset` - The offset after the last field (end of layout).
201/// * `base_offset` - The starting offset of the struct/tuple/union.
202/// * `max_alignment` - The maximum alignment required by any field.
203///
204/// # Returns
205/// The total size, rounded up to `max_alignment`.
206/// # Notes
207/// The total size of a struct is always rounded up to a multiple of its alignment.
208/// It might be strange in that it "wastes" memory for the potential parent struct
209/// to place items of lower memory alignment. (reuse tail padding).
210/// It simplifies things as well with code generation and similar, that a struct
211/// is always the same size and doesn't have to rely on where the struct is contained.
212/// It also ensures that arrays of the struct are correctly aligned according to the ABI,
213/// and matches the behavior of C, C++, and Rust.
214/// Note: The tail padding at the end of a struct is not reused for subsequent fields
215/// in a parent struct-this is required for safe and predictable layout
216#[must_use]
217pub fn adjust_size_to_alignment(
218    unaligned_size: MemorySize,
219    max_alignment: MemoryAlignment,
220) -> MemorySize {
221    align_to(MemoryOffset(unaligned_size.0), max_alignment).to_size()
222}
223
224impl MemoryAddress {
225    #[must_use]
226    pub const fn space(&self, memory_size: MemorySize, _alignment: Alignment) -> Self {
227        Self(self.0 + memory_size.0)
228    }
229}
230
231#[derive(Debug, Copy, Eq, PartialEq, Hash, Clone, Ord, PartialOrd)]
232pub struct HeapMemoryOffset(pub u32);
233
234impl HeapMemoryOffset {
235    #[must_use]
236    pub const fn to_size(&self) -> HeapMemorySize {
237        HeapMemorySize(self.0)
238    }
239}
240
241impl Display for HeapMemoryOffset {
242    fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
243        write!(f, "+{:08X}]", self.0)
244    }
245}
246
247impl Add<HeapMemorySize> for HeapMemoryOffset {
248    type Output = Self;
249
250    fn add(self, rhs: HeapMemorySize) -> Self {
251        Self(self.0 + rhs.0)
252    }
253}
254
255impl Sub<Self> for HeapMemoryOffset {
256    type Output = Self;
257
258    fn sub(self, rhs: Self) -> Self {
259        assert!(rhs.0 <= self.0);
260        Self(self.0 - rhs.0)
261    }
262}
263
264impl HeapMemoryOffset {
265    #[must_use]
266    pub const fn as_size(&self) -> HeapMemorySize {
267        HeapMemorySize(self.0)
268    }
269}
270
271impl HeapMemoryOffset {
272    #[must_use]
273    pub fn add(&self, size: HeapMemorySize, alignment: MemoryAlignment) -> Self {
274        let new_start = align(self.0 as usize, alignment.into());
275        Self(new_start as u32 + size.0)
276    }
277}
278
279#[derive(Clone)]
280pub struct PointerLocation {
281    pub ptr_reg: TypedRegister,
282}
283
284impl PointerLocation {
285    #[must_use]
286    pub const fn new(ptr_reg: TypedRegister) -> Self {
287        Self { ptr_reg }
288    }
289    #[must_use]
290    pub const fn addressing(&self) -> u8 {
291        self.ptr_reg.addressing()
292    }
293}
294
295impl PointerLocation {
296    #[must_use]
297    pub fn memory_location(&self) -> MemoryLocation {
298        MemoryLocation {
299            base_ptr_reg: self.ptr_reg.clone(),
300            offset: MemoryOffset(0),
301            ty: self.ptr_reg.ty.clone(),
302        }
303    }
304}
305
306#[derive(Clone)]
307pub struct MemoryLocation {
308    pub base_ptr_reg: TypedRegister,
309    pub offset: MemoryOffset,
310    pub ty: VmType,
311}
312
313impl Display for MemoryLocation {
314    fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
315        write!(f, "[{}+{} ({})]", self.base_ptr_reg, self.offset, self.ty)
316    }
317}
318
319impl MemoryLocation {
320    #[must_use]
321    pub const fn vm_type(&self) -> &VmType {
322        &self.ty
323    }
324
325    #[must_use]
326    pub fn unsafe_add_offset(&self, offset: MemoryOffset) -> Self {
327        Self {
328            base_ptr_reg: self.base_ptr_reg.clone(),
329            offset: self.offset.add(offset),
330            ty: self.ty.clone(),
331        }
332    }
333
334    #[must_use]
335    pub fn new_copy_over_whole_type_with_zero_offset(base_ptr_reg: TypedRegister) -> Self {
336        Self {
337            ty: base_ptr_reg.ty.clone(),
338            base_ptr_reg,
339            offset: MemoryOffset(0),
340        }
341    }
342}
343
344impl Debug for MemoryLocation {
345    fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
346        write!(
347            f,
348            "MemLoc[{}+{:04X}] ({})",
349            self.base_ptr_reg, self.offset.0, self.ty
350        )
351    }
352}
353
354impl MemoryLocation {
355    #[must_use]
356    pub fn pointer_location(&self) -> Option<PointerLocation> {
357        if self.offset.0 == 0 {
358            Some(PointerLocation {
359                ptr_reg: self.base_ptr_reg.clone(),
360            })
361        } else {
362            None
363        }
364    }
365
366    #[must_use]
367    pub const fn reg(&self) -> &TypedRegister {
368        &self.base_ptr_reg
369    }
370
371    #[must_use]
372    pub const fn as_direct_register(&self) -> Option<&TypedRegister> {
373        if self.offset.0 == 0 {
374            Some(&self.base_ptr_reg)
375        } else {
376            None
377        }
378    }
379}
380
381#[derive(Clone)]
382pub struct ScalarMemoryLocation {
383    pub location: MemoryLocation,
384}
385#[derive(Clone)]
386pub struct AggregateMemoryLocation {
387    pub location: MemoryLocation,
388}
389
390impl AggregateMemoryLocation {
391    #[must_use]
392    pub const fn new(location: MemoryLocation) -> Self {
393        Self { location }
394    }
395}
396
397impl Display for AggregateMemoryLocation {
398    fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
399        write!(f, "{}", self.location)
400    }
401}
402
403impl AggregateMemoryLocation {
404    #[must_use]
405    pub fn offset(&self, memory_offset: MemoryOffset, new_type: BasicTypeRef) -> Self {
406        let new_location = MemoryLocation {
407            base_ptr_reg: self.location.base_ptr_reg.clone(),
408            offset: self.location.offset + memory_offset,
409            ty: VmType::new_unknown_placement(new_type),
410        };
411        Self {
412            location: new_location,
413        }
414    }
415}
416
417#[derive(Debug, Copy, Eq, PartialEq, Hash, Clone, Ord, PartialOrd)]
418pub struct MemoryOffset(pub u32);
419
420impl MemoryOffset {
421    #[must_use]
422    pub const fn to_size(&self) -> MemorySize {
423        MemorySize(self.0)
424    }
425}
426
427impl Display for MemoryOffset {
428    fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
429        write!(f, "+{:X}", self.0)
430    }
431}
432
433impl MemoryOffset {
434    pub fn space(&mut self, memory_size: MemorySize, alignment: MemoryAlignment) -> Self {
435        let start = align(self.0 as usize, alignment.into()) as u32;
436        self.0 = start + memory_size.0;
437        Self(start)
438    }
439}
440
441impl Add<MemorySize> for MemoryOffset {
442    type Output = Self;
443
444    fn add(self, rhs: MemorySize) -> Self {
445        Self(self.0 + rhs.0)
446    }
447}
448
449impl Add<Self> for MemoryOffset {
450    type Output = Self;
451
452    fn add(self, rhs: Self) -> Self {
453        Self(self.0 + rhs.0)
454    }
455}
456
457impl Sub<Self> for MemoryOffset {
458    type Output = Self;
459
460    fn sub(self, rhs: Self) -> Self {
461        assert!(rhs.0 <= self.0);
462        Self(self.0 - rhs.0)
463    }
464}
465
466impl MemoryOffset {
467    #[must_use]
468    pub const fn as_size(&self) -> MemorySize {
469        MemorySize(self.0)
470    }
471}
472
473impl MemoryOffset {
474    #[must_use]
475    pub fn add(&self, size: MemorySize, alignment: MemoryAlignment) -> Self {
476        let new_start = align(self.0 as usize, alignment.into()) as u32;
477        Self(new_start + size.0)
478    }
479}
480
481pub enum ZFlagPolarity {
482    TrueWhenSet,
483    TrueWhenClear,
484}
485
486#[derive(Debug, Copy, Clone, PartialOrd, Ord, Eq, PartialEq)]
487pub struct HeapMemorySize(pub u32);
488
489impl Div<Self> for HeapMemorySize {
490    type Output = CountU32;
491
492    fn div(self, rhs: Self) -> Self::Output {
493        assert!(rhs.0 > 0, "Division by zero in MemorySize");
494        assert!(
495            self.0 > 0,
496            "Numerator must be positive in MemorySize division"
497        );
498        assert_eq!(
499            self.0 % rhs.0,
500            0,
501            "MemorySize division must be exact and positive"
502        );
503
504        CountU32(self.0 / rhs.0)
505    }
506}
507
508#[derive(Debug, Copy, Clone, PartialOrd, Ord, Eq, PartialEq)]
509pub struct MemorySize(pub u32);
510
511impl Display for MemorySize {
512    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
513        let bytes = f64::from(self.0);
514
515        if bytes < 1024.0 {
516            write!(f, "{bytes} B")
517        } else if bytes < 1024.0 * 1024.0 {
518            write!(f, "{:.2} KiB", bytes / 1024.0)
519        } else if bytes < 1024.0 * 1024.0 * 1024.0 {
520            write!(f, "{:.2} MiB", bytes / (1024.0 * 1024.0))
521        } else {
522            write!(f, "{:.2} GiB", bytes / (1024.0 * 1024.0 * 1024.0))
523        }
524    }
525}
526
527impl From<MemorySize> for usize {
528    fn from(val: MemorySize) -> Self {
529        val.0 as Self
530    }
531}
532
533impl Div<Self> for MemorySize {
534    type Output = CountU32;
535
536    fn div(self, rhs: Self) -> Self::Output {
537        assert!(rhs.0 > 0, "Division by zero in MemorySize");
538        assert!(
539            self.0 > 0,
540            "Numerator must be positive in MemorySize division"
541        );
542        assert_eq!(
543            self.0 % rhs.0,
544            0,
545            "MemorySize division must be exact and positive"
546        );
547
548        CountU32(self.0 / rhs.0)
549    }
550}
551
552#[derive(Debug, Copy, Clone, PartialEq, Eq, PartialOrd, Ord)]
553pub enum MemoryAlignment {
554    // Do not change the order.
555    U8,
556    U16,
557    U32,
558    U64,
559}
560
561impl MemoryAlignment {
562    #[must_use]
563    const fn rank(&self) -> usize {
564        match self {
565            Self::U8 => 1,
566            Self::U16 => 2,
567            Self::U32 => 3,
568            Self::U64 => 4,
569        }
570    }
571    #[must_use]
572    pub const fn greater_than(&self, other: Self) -> bool {
573        self.rank() > other.rank()
574    }
575}
576
577impl From<MemoryAlignment> for usize {
578    fn from(val: MemoryAlignment) -> Self {
579        match val {
580            MemoryAlignment::U8 => 1,
581            MemoryAlignment::U16 => 2,
582            MemoryAlignment::U32 => 4,
583            MemoryAlignment::U64 => 8,
584        }
585    }
586}
587
588impl From<MemoryAlignment> for u8 {
589    fn from(val: MemoryAlignment) -> Self {
590        match val {
591            MemoryAlignment::U8 => 1,
592            MemoryAlignment::U16 => 2,
593            MemoryAlignment::U32 => 4,
594            MemoryAlignment::U64 => 8,
595        }
596    }
597}
598
599impl TryInto<MemoryAlignment> for usize {
600    type Error = ();
601
602    fn try_into(self) -> Result<MemoryAlignment, Self::Error> {
603        let converted = match self {
604            1 => MemoryAlignment::U8,
605            2 => MemoryAlignment::U16,
606            4 => MemoryAlignment::U32,
607            8 => MemoryAlignment::U64,
608
609            _ => return Err(()),
610        };
611        Ok(converted)
612    }
613}
614
615impl From<MemoryAlignment> for MemoryOffset {
616    fn from(val: MemoryAlignment) -> Self {
617        let octets: usize = val.into();
618        Self(octets as u32)
619    }
620}
621
622#[must_use]
623pub fn align_frame_addr(
624    memory_address: FrameMemoryAddress,
625    alignment: MemoryAlignment,
626) -> FrameMemoryAddress {
627    let raw_addr = align(memory_address.0 as usize, alignment.into());
628
629    FrameMemoryAddress(raw_addr as u32)
630}
631
632#[must_use]
633pub fn align_offset(memory_address: MemoryOffset, alignment: MemoryAlignment) -> MemoryOffset {
634    let raw_addr = align(memory_address.0 as usize, alignment.into());
635
636    MemoryOffset(raw_addr as u32)
637}
638
639#[derive(Copy, Clone, Debug)]
640pub struct FrameMemorySize(pub u32);
641
642impl Display for FrameMemorySize {
643    fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
644        write!(f, "frame size: {:04X}", self.0)
645    }
646}
647
648impl FrameMemorySize {
649    #[must_use]
650    pub const fn add(&self, inc: MemorySize) -> Self {
651        Self(self.0 + inc.0)
652    }
653}
654
655#[derive(Clone)]
656pub struct Meta {
657    pub comment: String,
658    pub node: Node,
659}
660
661#[derive(Debug)]
662pub struct PatchPosition(pub InstructionPosition);
663
664#[derive(Debug, Copy, Clone, Eq, PartialEq, Hash)]
665pub struct InstructionPosition(pub u32);
666
667impl Add<ProgramCounterDelta> for InstructionPosition {
668    type Output = Self;
669
670    fn add(self, rhs: ProgramCounterDelta) -> Self::Output {
671        Self(((self.0 as i32) + i32::from(rhs.0)) as u32)
672    }
673}
674
675impl Sub<Self> for InstructionPosition {
676    type Output = ProgramCounterDelta;
677
678    fn sub(self, rhs: Self) -> Self::Output {
679        assert!(self.0 >= rhs.0);
680
681        ProgramCounterDelta(((self.0 as i32) - (rhs.0 as i32)) as i16)
682    }
683}
684
685#[derive(Debug, Copy, Clone, Eq, PartialEq, Hash)]
686pub struct ProgramCounterDelta(pub i16);
687
688#[derive(Debug, Clone, Eq, PartialEq, Hash)]
689pub struct InstructionPositionOffset(pub u32);
690
691#[derive(Debug, Clone, Eq, PartialEq, Hash)]
692pub struct InstructionRange {
693    pub start: InstructionPosition,
694    pub count: InstructionPositionOffset,
695}
696
697pub const INT_SIZE: u16 = 4;
698pub const FLOAT_SIZE: u16 = 4;
699pub const BOOL_SIZE: u16 = 1;
700
701pub const PTR_SIZE: MemorySize = MemorySize(4);
702pub const PTR_ALIGNMENT: MemoryAlignment = MemoryAlignment::U32;
703
704pub const HEAP_PTR_ON_FRAME_SIZE: MemorySize = MemorySize(4);
705pub const HEAP_PTR_ON_FRAME_ALIGNMENT: MemoryAlignment = MemoryAlignment::U32;
706
707pub const REG_ON_FRAME_SIZE: MemorySize = MemorySize(4);
708pub const REG_ON_FRAME_ALIGNMENT: MemoryAlignment = MemoryAlignment::U32;
709
710pub const COLLECTION_CAPACITY_OFFSET: MemoryOffset = MemoryOffset(0); // Capacity should always be first
711pub const COLLECTION_ELEMENT_COUNT_OFFSET: MemoryOffset = MemoryOffset(2); // Element count should always be second
712
713#[repr(C)]
714#[derive(Copy, Clone, Debug)]
715pub struct VecHeader {
716    /// Do not change the order of the fields!
717    ///
718    /// Keep the capacity field at the start of the header for consistency across all
719    /// container types. Placing it first simplifies copy operations: we can verify
720    /// and preserve capacity before copying the remainder of the header in one contiguous operation.
721    pub capacity: u16,
722
723    /// Number of live (active) elements currently stored in the collection.
724    ///
725    /// Always located at offset 2, enabling:
726    /// - **Logical size**: Represents the number of valid elements in use.
727    /// - **Bounds checking**: Index and assignment checks (`0 <= idx < element_count`)
728    ///   can load this field in a single instruction.
729    /// - **Iteration**: Iterators read this field to determine the end of the collection.
730    /// - **ABI stability**: External tools, debuggers, and serializers can consistently locate
731    ///   `capacity` and `element_count` across all container types.
732    pub element_count: u16,
733    pub element_size: u32,
734    pub padding: u32,
735}
736
737pub const VEC_HEADER_SIZE: MemorySize = MemorySize(size_of::<VecHeader>() as u32);
738
739pub const VEC_HEADER_ELEMENT_COUNT_OFFSET: MemoryOffset = MemoryOffset(2);
740pub const VEC_HEADER_PAYLOAD_OFFSET: MemoryOffset = MemoryOffset(size_of::<VecHeader>() as u32);
741pub const VEC_HEADER_ALIGNMENT: MemoryAlignment = MemoryAlignment::U16;
742pub const VEC_HEADER_MAGIC_CODE: u32 = 0xC001C0DE;
743
744pub const VEC_PTR_SIZE: MemorySize = HEAP_PTR_ON_FRAME_SIZE;
745pub const VEC_PTR_ALIGNMENT: MemoryAlignment = HEAP_PTR_ON_FRAME_ALIGNMENT;
746
747#[repr(C)]
748pub struct VecIterator {
749    pub vec_header_heap_ptr: u32,
750    pub index: u16,
751}
752
753pub const VEC_ITERATOR_SIZE: MemorySize = MemorySize(size_of::<VecIterator>() as u32);
754pub const VEC_ITERATOR_ALIGNMENT: MemoryAlignment = MemoryAlignment::U32;
755
756#[repr(C)]
757pub struct StringIterator {
758    pub string_heap_ptr: u32,
759    pub byte_index: u16,
760    pub index: u32, // how many times we iterated
761}
762
763pub const STRING_ITERATOR_SIZE: MemorySize = MemorySize(size_of::<StringIterator>() as u32);
764pub const STRING_ITERATOR_ALIGNMENT: MemoryAlignment = MemoryAlignment::U32;
765
766#[repr(C)]
767pub struct SparseIterator {
768    pub sparse_header_heap_ptr: u32,
769    pub index: u16,
770}
771
772pub const SPARSE_ITERATOR_SIZE: MemorySize = MemorySize(size_of::<SparseIterator>() as u32);
773pub const SPARSE_ITERATOR_ALIGNMENT: MemoryAlignment = MemoryAlignment::U32;
774
775#[repr(C)]
776#[derive(Debug)]
777pub struct RangeIterator {
778    pub index: i32,
779    pub end: i32,
780    pub direction: i32,
781}
782
783pub const RANGE_ITERATOR_SIZE: MemorySize = MemorySize(size_of::<RangeIterator>() as u32);
784pub const RANGE_ITERATOR_ALIGNMENT: MemoryAlignment = MemoryAlignment::U32;
785
786#[repr(C)]
787#[derive(Copy, Clone, Debug)]
788pub struct RangeHeader {
789    // Do not change! These must match the structure in Swamp core exactly
790    pub min: i32,
791    pub max: i32,
792    pub inclusive: bool,
793}
794pub const RANGE_HEADER_SIZE: MemorySize = MemorySize(size_of::<RangeHeader>() as u32);
795pub const RANGE_HEADER_ALIGNMENT: MemoryAlignment = MemoryAlignment::U32;
796
797#[repr(C)]
798#[derive(Copy, Clone)]
799pub struct GridHeader {
800    /// Do not change the order of the fields!
801    ///
802    /// Keep the capacity field at the start of the header for consistency across all
803    /// container types. Placing it first simplifies copy operations: we can verify
804    /// and preserve capacity before copying the remainder of the header in one contiguous operation.
805    pub capacity: u16,
806
807    /// Number of live (active) elements currently stored in the collection.
808    ///
809    /// Always located at offset 2, enabling:
810    /// - **Logical size**: Represents the number of valid elements in use.
811    /// - **Bounds checking**: Index and assignment checks (`0 <= idx < element_count`)
812    ///   can load this field in a single instruction.
813    /// - **Iteration**: Iterators read this field to determine the end of the collection.
814    /// - **ABI stability**: External tools, debuggers, and serializers can consistently locate
815    ///   `capacity` and `element_count` across all container types.
816    pub element_count: u16, // Always same as capacity
817
818    pub element_size: u32,
819
820    pub width: u16,
821    pub height: u16,
822    pub padding: u32,
823}
824
825pub const GRID_HEADER_SIZE: MemorySize = MemorySize(size_of::<GridHeader>() as u32);
826pub const GRID_HEADER_ALIGNMENT: MemoryAlignment = MemoryAlignment::U32;
827pub const GRID_HEADER_WIDTH_OFFSET: MemoryOffset = MemoryOffset(8);
828pub const GRID_HEADER_HEIGHT_OFFSET: MemoryOffset = MemoryOffset(10);
829pub const GRID_HEADER_PAYLOAD_OFFSET: MemoryOffset = MemoryOffset(size_of::<GridHeader>() as u32);
830
831pub const GRID_SECRET_CODE: u32 = 0x00_C0FFEE;
832
833// NOTE: Must align to U32, therefor the padding at the end
834
835pub const MAP_HEADER_SIZE: MemorySize = MemorySize(size_of::<MapHeader>() as u32);
836pub const MAP_HEADER_ALIGNMENT: MemoryAlignment = MemoryAlignment::U32;
837pub const MAP_HEADER_KEY_SIZE_OFFSET: MemoryOffset = MemoryOffset(4);
838pub const MAP_HEADER_TUPLE_SIZE_OFFSET: MemoryOffset = MemoryOffset(6);
839pub const MAP_HEADER_LOGICAL_LIMIT_OFFSET: MemoryOffset = MemoryOffset(8);
840pub const MAP_BUCKETS_OFFSET: MemoryOffset = MemoryOffset(MAP_HEADER_SIZE.0);
841
842#[repr(C)]
843pub struct MapIterator {
844    pub map_header_frame_offset: u32,
845    pub index: u32,
846}
847
848pub const MAP_ITERATOR_SIZE: MemorySize = MemorySize(size_of::<MapIterator>() as u32);
849pub const MAP_ITERATOR_ALIGNMENT: MemoryAlignment = MemoryAlignment::U32;
850
851pub const MAX_STRING_LEN: u16 = 16 * 1024;
852
853pub const STRING_PTR_SIZE: MemorySize = HEAP_PTR_ON_FRAME_SIZE;
854pub const STRING_PTR_ALIGNMENT: MemoryAlignment = HEAP_PTR_ON_FRAME_ALIGNMENT;