1use super::vertex::{VertexId, VertexKind};
2use crate::SheetId;
3use formualizer_common::Coord as AbsCoord;
4use std::sync::atomic::{AtomicU8, Ordering};
5
6#[cfg(test)]
7mod tests {
8 use super::*;
9
10 #[test]
11 fn test_vertex_store_allocation() {
12 let mut store = VertexStore::new();
13 let id = store.allocate(AbsCoord::new(10, 20), 1, 0x01);
14 assert_eq!(store.coord(id), AbsCoord::new(10, 20));
15 assert_eq!(store.sheet_id(id), 1);
16 assert_eq!(store.flags(id), 0x01);
17 }
18
19 #[test]
20 fn test_vertex_store_grow() {
21 let mut store = VertexStore::with_capacity(1000);
22 for i in 0..10_000 {
23 store.allocate(AbsCoord::new(i, i), 0, 0);
24 }
25 assert_eq!(store.len(), 10_000);
26 }
30
31 #[test]
32 fn test_vertex_store_capacity() {
33 let store = VertexStore::with_capacity(100);
34 assert!(store.coords.capacity() >= 100);
35 assert!(store.sheet_kind.capacity() >= 100);
36 assert!(store.flags.capacity() >= 100);
37 assert!(store.value_ref.capacity() >= 100);
38 assert!(store.edge_offset.capacity() >= 100);
39 }
40
41 #[test]
42 fn test_vertex_store_accessors() {
43 let mut store = VertexStore::new();
44 let id = store.allocate(AbsCoord::new(5, 10), 3, 0x03);
45
46 assert_eq!(store.coord(id).row(), 5);
48 assert_eq!(store.coord(id).col(), 10);
49
50 assert_eq!(store.sheet_id(id), 3);
52
53 assert_eq!(store.flags(id), 0x03);
55 assert!(store.is_dirty(id));
56 assert!(store.is_volatile(id));
57
58 store.set_kind(id, VertexKind::Cell);
60 assert_eq!(store.kind(id), VertexKind::Cell);
61 }
62
63 #[test]
64 fn test_reserved_vertex_range() {
65 let mut store = VertexStore::new();
66 let id = store.allocate(AbsCoord::new(0, 0), 0, 0);
68 assert!(id.0 >= FIRST_NORMAL_VERTEX);
69 }
70
71 #[test]
72 fn test_atomic_flag_operations() {
73 let mut store = VertexStore::new();
74 let id = store.allocate(AbsCoord::new(0, 0), 0, 0);
75
76 store.set_dirty(id, true);
78 assert!(store.is_dirty(id));
79
80 store.set_dirty(id, false);
81 assert!(!store.is_dirty(id));
82
83 store.set_volatile(id, true);
84 assert!(store.is_volatile(id));
85 }
86
87 #[test]
88 fn test_vertex_store_set_coord() {
89 let mut store = VertexStore::new();
90 let id = store.allocate(AbsCoord::new(1, 1), 0, 0);
91
92 store.set_coord(id, AbsCoord::new(5, 10));
94 assert_eq!(store.coord(id), AbsCoord::new(5, 10));
95 }
96
97 #[test]
98 fn test_vertex_store_atomic_flags() {
99 let mut store = VertexStore::new();
100 let id = store.allocate(AbsCoord::new(0, 0), 0, 0);
101
102 store.set_dirty(id, true);
104 assert!(store.is_dirty(id));
105
106 store.set_volatile(id, true);
107 assert!(store.is_volatile(id));
108
109 store.mark_deleted(id, true);
111 assert!(store.is_deleted(id));
112 }
113
114 #[test]
115 fn test_reserved_id_range_preserved() {
116 let mut store = VertexStore::new();
117
118 let id = store.allocate(AbsCoord::new(0, 0), 0, 0);
120 assert!(id.0 >= FIRST_NORMAL_VERTEX);
121
122 store.mark_deleted(id, true);
124 assert!(store.vertex_exists(id));
125 assert!(store.is_deleted(id));
126 }
127}
128
129pub const FIRST_NORMAL_VERTEX: u32 = 1024;
131pub const RANGE_VERTEX_START: u32 = 0;
132pub const EXTERNAL_VERTEX_START: u32 = 256;
133
134#[repr(C, align(64))]
141#[derive(Debug)]
142pub struct VertexStore {
143 coords: Vec<AbsCoord>, sheet_kind: Vec<u32>, flags: Vec<AtomicU8>, value_ref: Vec<u32>, edge_offset: Vec<u32>, len: usize,
152}
153
154impl Default for VertexStore {
155 fn default() -> Self {
156 Self::new()
157 }
158}
159
160impl VertexStore {
161 pub fn new() -> Self {
162 Self {
163 coords: Vec::new(),
164 sheet_kind: Vec::new(),
165 flags: Vec::new(),
166 value_ref: Vec::new(),
167 edge_offset: Vec::new(),
168 len: 0,
169 }
170 }
171
172 pub fn with_capacity(capacity: usize) -> Self {
173 Self {
174 coords: Vec::with_capacity(capacity),
175 sheet_kind: Vec::with_capacity(capacity),
176 flags: Vec::with_capacity(capacity),
177 value_ref: Vec::with_capacity(capacity),
178 edge_offset: Vec::with_capacity(capacity),
179 len: 0,
180 }
181 }
182
183 pub fn reserve(&mut self, additional: usize) {
185 if additional == 0 {
186 return;
187 }
188 let target = self.len + additional;
190 if self.coords.capacity() < target {
191 self.coords.reserve(additional);
192 }
193 if self.sheet_kind.capacity() < target {
194 self.sheet_kind.reserve(additional);
195 }
196 if self.flags.capacity() < target {
197 self.flags.reserve(additional);
198 }
199 if self.value_ref.capacity() < target {
200 self.value_ref.reserve(additional);
201 }
202 if self.edge_offset.capacity() < target {
203 self.edge_offset.reserve(additional);
204 }
205 }
206
207 pub fn allocate(&mut self, coord: AbsCoord, sheet: SheetId, flags: u8) -> VertexId {
210 let id = VertexId(self.len as u32 + FIRST_NORMAL_VERTEX);
211 debug_assert!(id.0 >= FIRST_NORMAL_VERTEX);
212
213 self.coords.push(coord);
214 self.sheet_kind.push((sheet as u32) << 16);
215 self.flags.push(AtomicU8::new(flags));
216 self.value_ref.push(0);
217 self.edge_offset.push(0);
218 self.len += 1;
219
220 id
221 }
222
223 pub fn allocate_contiguous(
226 &mut self,
227 sheet: SheetId,
228 coords: &[AbsCoord],
229 flags: u8,
230 ) -> Vec<VertexId> {
231 if coords.is_empty() {
232 return Vec::new();
233 }
234 self.reserve(coords.len());
235 let mut ids = Vec::with_capacity(coords.len());
236 for &coord in coords {
237 ids.push(self.allocate(coord, sheet, flags));
238 }
239 ids
240 }
241
242 #[inline]
243 pub fn len(&self) -> usize {
244 self.len
245 }
246
247 #[inline]
249 fn vertex_id_to_index(&self, id: VertexId) -> Option<usize> {
250 if id.0 < FIRST_NORMAL_VERTEX {
251 return None;
252 }
253 let idx = (id.0 - FIRST_NORMAL_VERTEX) as usize;
254 if idx >= self.len {
255 return None;
256 }
257 Some(idx)
258 }
259
260 #[inline]
261 pub fn is_empty(&self) -> bool {
262 self.len == 0
263 }
264
265 #[inline]
267 pub fn coord(&self, id: VertexId) -> AbsCoord {
268 if let Some(idx) = self.vertex_id_to_index(id) {
269 self.coords[idx]
270 } else {
271 AbsCoord::new(0, 0) }
273 }
274
275 #[inline]
276 pub fn sheet_id(&self, id: VertexId) -> SheetId {
277 if let Some(idx) = self.vertex_id_to_index(id) {
278 (self.sheet_kind[idx] >> 16) as SheetId
279 } else {
280 0 }
282 }
283
284 #[inline]
285 pub fn kind(&self, id: VertexId) -> VertexKind {
286 if let Some(idx) = self.vertex_id_to_index(id) {
287 let tag = ((self.sheet_kind[idx] >> 8) & 0xFF) as u8;
288 VertexKind::from_tag(tag)
289 } else {
290 VertexKind::Empty }
292 }
293
294 #[inline]
295 pub fn set_kind(&mut self, id: VertexId, kind: VertexKind) {
296 if let Some(idx) = self.vertex_id_to_index(id) {
297 let sheet_bits = self.sheet_kind[idx] & 0xFFFF0000;
298 self.sheet_kind[idx] = sheet_bits | ((kind.to_tag() as u32) << 8);
299 }
300 }
301
302 #[inline]
303 pub fn flags(&self, id: VertexId) -> u8 {
304 if let Some(idx) = self.vertex_id_to_index(id) {
305 self.flags[idx].load(Ordering::Acquire)
306 } else {
307 0 }
309 }
310
311 #[inline]
312 pub fn is_dirty(&self, id: VertexId) -> bool {
313 self.flags(id) & 0x01 != 0
314 }
315
316 #[inline]
317 pub fn is_volatile(&self, id: VertexId) -> bool {
318 self.flags(id) & 0x02 != 0
319 }
320
321 #[inline]
322 pub fn is_deleted(&self, id: VertexId) -> bool {
323 self.flags(id) & 0x04 != 0
324 }
325
326 #[inline]
327 pub fn is_dynamic(&self, id: VertexId) -> bool {
328 self.flags(id) & 0x08 != 0
329 }
330
331 #[inline]
332 pub fn set_dirty(&self, id: VertexId, dirty: bool) {
333 if id.0 < FIRST_NORMAL_VERTEX {
334 return; }
336 let idx = (id.0 - FIRST_NORMAL_VERTEX) as usize;
337 if idx >= self.flags.len() {
338 return; }
340 if dirty {
341 self.flags[idx].fetch_or(0x01, Ordering::Release);
342 } else {
343 self.flags[idx].fetch_and(!0x01, Ordering::Release);
344 }
345 }
346
347 #[inline]
348 pub fn set_volatile(&self, id: VertexId, volatile: bool) {
349 if id.0 < FIRST_NORMAL_VERTEX {
350 return;
351 }
352 if let Some(idx) = self.vertex_id_to_index(id) {
353 if volatile {
354 self.flags[idx].fetch_or(0x02, std::sync::atomic::Ordering::Release);
355 } else {
356 self.flags[idx].fetch_and(!0x02, std::sync::atomic::Ordering::Release);
357 }
358 }
359 }
360
361 #[inline]
362 pub fn set_dynamic(&self, id: VertexId, dynamic: bool) {
363 if id.0 < FIRST_NORMAL_VERTEX {
364 return;
365 }
366 if let Some(idx) = self.vertex_id_to_index(id) {
367 if dynamic {
368 self.flags[idx].fetch_or(0x08, std::sync::atomic::Ordering::Release);
369 } else {
370 self.flags[idx].fetch_and(!0x08, std::sync::atomic::Ordering::Release);
371 }
372 }
373 }
374
375 #[inline]
376 pub fn value_ref(&self, id: VertexId) -> u32 {
377 if let Some(idx) = self.vertex_id_to_index(id) {
378 self.value_ref[idx]
379 } else {
380 0 }
382 }
383
384 #[inline]
385 pub fn set_value_ref(&mut self, id: VertexId, value_ref: u32) {
386 if let Some(idx) = self.vertex_id_to_index(id) {
387 self.value_ref[idx] = value_ref;
388 }
389 }
390
391 #[inline]
392 pub fn edge_offset(&self, id: VertexId) -> u32 {
393 if let Some(idx) = self.vertex_id_to_index(id) {
394 self.edge_offset[idx]
395 } else {
396 0 }
398 }
399
400 #[inline]
401 pub fn set_edge_offset(&mut self, id: VertexId, offset: u32) {
402 if let Some(idx) = self.vertex_id_to_index(id) {
403 self.edge_offset[idx] = offset;
404 }
405 }
406
407 #[doc(hidden)]
411 pub fn set_coord(&mut self, id: VertexId, coord: AbsCoord) {
412 if let Some(idx) = self.vertex_id_to_index(id) {
413 self.coords[idx] = coord;
414 }
415 }
416
417 pub fn mark_deleted(&self, id: VertexId, deleted: bool) {
419 if let Some(idx) = self.vertex_id_to_index(id) {
420 if deleted {
421 self.flags[idx].fetch_or(0x04, Ordering::Release);
422 } else {
423 self.flags[idx].fetch_and(!0x04, Ordering::Release);
424 }
425 }
426 }
427
428 pub fn vertex_exists(&self, id: VertexId) -> bool {
430 self.vertex_id_to_index(id).is_some()
431 }
432
433 pub fn vertex_exists_active(&self, id: VertexId) -> bool {
435 self.vertex_id_to_index(id)
436 .map(|_| !self.is_deleted(id))
437 .unwrap_or(false)
438 }
439
440 pub fn all_vertices(&self) -> impl Iterator<Item = VertexId> + '_ {
442 (0..self.len).map(|i| VertexId((i as u32) + FIRST_NORMAL_VERTEX))
443 }
444}