sparse_mem/
lib.rs

1/// Memory layout for a fixed-capacity, generation-tracked sparse array in memory. Designed for the Swamp VM.
2///
3/// Follows Swamp VM Collection standard with capacity and element count first, 2-bytes each.
4/// Aligns to a 4-byte aligned values region, since it is mainly for a 32-bit VM:
5/// ```text
6/// offset 0:   capacity                (u16)
7/// offset 2:   element_count           (u16)
8/// offset 4:   element_size            (u32)
9/// offset 8:   slot_to_id              (u16[capacity])             // reverse lookup
10/// offset 8+2*capacity: id_to_slot     (u16[capacity])             // forward lookup
11/// offset 8+4*capacity: generation     (u16[capacity])             // bumped on alloc/remove
12/// offset 8+6*capacity:     pad (u8[pad])                          // pad to 4-byte boundary
13/// offset 8+6*capacity+pad: values (raw bytes)
14/// ```
15use std::ptr;
16
17/// Compute total bytes needed in memory for a sparse array. Used for code generator to know
18/// how much space to reserve.
19#[must_use]
20pub const fn layout_size(capacity: u16, element_size: u32) -> usize {
21    let cap = capacity as usize;
22    // slot_to_id + id_to_slot: each u16[capacity]
23    let lookup = 2 * cap * size_of::<u16>();
24    // generation: u16[capacity]
25    let generation_size = cap * size_of::<u16>();
26    // bytes before values
27    let before_vals = HEADER_SIZE + lookup + generation_size;
28    // pad to 8-byte alignment
29    let pad = (8 - (before_vals % 8)) % 8;
30    // values: raw bytes per element
31    let vals = cap * element_size as usize;
32    before_vals + pad + vals
33}
34
35/// Alignment requirement for the sparse array.
36/// Report 4 just for the benefit of values
37#[must_use]
38pub const fn alignment() -> usize {
39    8
40}
41
42const SLOT_OFFSET: usize = HEADER_SIZE;
43const HEADER_SIZE: usize = 8;
44
45/// Initialize the sparse array to memory specified by the raw memory pointer.
46/// `base` must point to a region of at least `layout_size(capacity, element_size)` bytes.
47pub unsafe fn init(base: *mut u8, capacity: u16, element_size: u32) {
48    unsafe {
49        ptr::write(base.cast::<u16>(), capacity);
50        ptr::write(base.add(2).cast::<u16>(), 0);
51        ptr::write(base.add(4).cast::<u32>(), element_size);
52        let cap = capacity as usize;
53        let id_offset = SLOT_OFFSET + cap * size_of::<u16>();
54        let generation_offset = id_offset + cap * size_of::<u16>();
55        for i in 0..cap {
56            ptr::write(
57                base.add(SLOT_OFFSET).cast::<u16>().add(i),
58                (cap - 1 - i) as u16,
59            );
60        }
61        for i in 0..cap {
62            ptr::write(base.add(id_offset).cast::<u16>().add(i), u16::MAX);
63        }
64        for i in 0..cap {
65            ptr::write(base.add(generation_offset).cast::<u16>().add(i), 0);
66        }
67    }
68}
69
70/// Allocate a new ID and generation.
71pub unsafe fn allocate(base: *mut u8) -> Option<(u16, u16)> {
72    unsafe {
73        let capacity = *base.cast::<u16>() as usize;
74        let count_ptr = base.add(2).cast::<u16>();
75        let count = *count_ptr as usize;
76        if count >= capacity {
77            return None;
78        }
79
80        let id_offset = SLOT_OFFSET + capacity * size_of::<u16>();
81        let generation_offset = id_offset + capacity * size_of::<u16>();
82
83        let id = *base.add(SLOT_OFFSET).cast::<u16>().add(count);
84        ptr::write(count_ptr, (count as u16).wrapping_add(1));
85        ptr::write(
86            base.add(id_offset).cast::<u16>().add(id as usize),
87            count as u16,
88        );
89
90        let generation_ptr = base.add(generation_offset).cast::<u16>();
91        let new_gen = generation_ptr.add(id as usize).read().wrapping_add(1);
92
93        ptr::write(generation_ptr.add(id as usize), new_gen);
94
95        Some((id, new_gen))
96    }
97}
98
99/// Compute offset of values region (values are aligned to 8 bytes)
100#[must_use]
101pub const fn values_offset(base: *const u8) -> usize {
102    let capacity = unsafe { *base.cast::<u16>() } as usize;
103    let id_offset = SLOT_OFFSET + capacity * size_of::<u16>();
104    let generation_offset = id_offset + capacity * size_of::<u16>();
105    let before_values = generation_offset + capacity * size_of::<u16>();
106    let padding = (8 - (before_values % 8)) % 8;
107    before_values + padding
108}
109
110/// Insert raw bytes at handle id
111/// # Safety
112///
113#[inline]
114pub unsafe fn insert(base: *mut u8, id: u16, src: *const u8) {
115    unsafe {
116        let element_size = *base.add(4).cast::<u32>() as usize;
117        let off = values_offset(base) + id as usize * element_size;
118        ptr::copy_nonoverlapping(src, base.add(off), element_size);
119    }
120}
121
122/// Remove by handle; frees slot and bumps generation. Returns false on mismatching generations.
123/// # Safety
124///
125pub unsafe fn remove(base: *mut u8, id: u16, generation: u16) -> bool {
126    unsafe {
127        if !is_alive(base, id, generation) {
128            return false;
129        }
130
131        let capacity = *base.cast::<u16>() as usize;
132        let id_offset = SLOT_OFFSET + capacity * size_of::<u16>();
133        let generation_offset = id_offset + capacity * size_of::<u16>();
134        let count_ptr = base.add(2).cast::<u16>();
135        let count = (*count_ptr) as usize;
136        let last = count - 1;
137        ptr::write(count_ptr, last as u16);
138
139        let slot_ptr = base.add(SLOT_OFFSET).cast::<u16>();
140        let slot_idx = *base.add(id_offset).cast::<u16>().add(id as usize) as usize;
141        let last_id = *slot_ptr.add(last);
142        ptr::write(slot_ptr.add(slot_idx), last_id);
143        ptr::write(
144            base.add(id_offset).cast::<u16>().add(last_id as usize),
145            slot_idx as u16,
146        );
147        ptr::write(slot_ptr.add(last), id);
148        ptr::write(base.add(id_offset).cast::<u16>().add(id as usize), u16::MAX);
149        let gen_ptr = base.add(generation_offset).cast::<u16>();
150        ptr::write(gen_ptr.add(id as usize), generation.wrapping_add(1));
151
152        true
153    }
154}
155
156/// Check handle validity
157/// # Safety
158///
159pub unsafe fn is_alive(base: *mut u8, id: u16, generation: u16) -> bool {
160    unsafe {
161        let capacity = *base.cast::<u16>() as usize;
162        let count = *base.add(2).cast::<u16>() as usize;
163        let id_offset = SLOT_OFFSET + capacity * size_of::<u16>();
164        let generation_offset = id_offset + capacity * size_of::<u16>();
165        // Not only check generation, but also check that it slot is among the live ones.
166        // Also makes extra check to look in the other direction as well.
167        let slot = *base.add(id_offset).cast::<u16>().add(id as usize) as usize;
168        let current_generation = *base.add(generation_offset).cast::<u16>().add(id as usize);
169        slot < count
170            && current_generation == generation
171            && *base.add(SLOT_OFFSET).cast::<u16>().add(slot) == id
172    }
173}
174
175/// Get a pointer to the `slot_to_id` array (reverse lookup)
176/// # Safety
177///
178pub const unsafe fn slot_to_id_ptr(base: *mut u8) -> *mut u16 {
179    unsafe { base.add(SLOT_OFFSET).cast::<u16>() }
180}
181
182#[must_use]
183pub const unsafe fn generation_ptr_const(base: *const u8) -> *const u16 {
184    unsafe {
185        let capacity = *base.cast::<u16>() as usize;
186        let id_offset = SLOT_OFFSET + capacity * size_of::<u16>();
187        let generation_offset = id_offset + capacity * size_of::<u16>();
188
189        base.add(generation_offset).cast::<u16>()
190    }
191}
192
193/// Get a pointer to the `slot_to_id` array (reverse lookup)
194/// # Safety
195///
196#[must_use]
197pub const unsafe fn slot_to_id_ptr_const(base: *const u8) -> *const u16 {
198    unsafe { base.add(SLOT_OFFSET).cast::<u16>() }
199}
200
201/// Get a pointer to the `id_to_slot` array (forward lookup)
202/// # Safety
203///
204pub unsafe fn id_to_slot_ptr(base: *mut u8) -> *mut u16 {
205    unsafe {
206        let capacity = *base.cast::<u16>() as usize;
207        base.add(SLOT_OFFSET + capacity * size_of::<u16>())
208            .cast::<u16>()
209    }
210}
211
212#[must_use]
213pub const unsafe fn id_to_slot_ptr_const(base: *const u8) -> *const u16 {
214    unsafe {
215        let capacity = *base.cast::<u16>() as usize;
216        base.add(SLOT_OFFSET + capacity * size_of::<u16>())
217            .cast::<u16>()
218    }
219}
220
221/// Get current element count
222/// # Safety
223///
224#[must_use]
225pub const unsafe fn element_count(base: *const u8) -> u16 {
226    unsafe { *base.add(2).cast::<u16>() }
227}
228
229/// Get current element size
230/// # Safety
231#[must_use]
232pub const unsafe fn element_size(base: *const u8) -> u32 {
233    unsafe { *base.add(4).cast::<u32>() }
234}