1use super::packed_coord::PackedCoord;
2use super::vertex::{VertexId, VertexKind};
3use crate::SheetId;
4use std::sync::atomic::{AtomicU8, Ordering};
5
6#[cfg(test)]
7mod tests {
8 use super::*;
9
10 #[test]
11 fn test_vertex_store_allocation() {
12 let mut store = VertexStore::new();
13 let id = store.allocate(PackedCoord::new(10, 20), 1, 0x01);
14 assert_eq!(store.coord(id), PackedCoord::new(10, 20));
15 assert_eq!(store.sheet_id(id), 1);
16 assert_eq!(store.flags(id), 0x01);
17 }
18
19 #[test]
20 fn test_vertex_store_grow() {
21 let mut store = VertexStore::with_capacity(1000);
22 for i in 0..10_000 {
23 store.allocate(PackedCoord::new(i, i), 0, 0);
24 }
25 assert_eq!(store.len(), 10_000);
26 }
30
31 #[test]
32 fn test_vertex_store_capacity() {
33 let store = VertexStore::with_capacity(100);
34 assert!(store.coords.capacity() >= 100);
35 assert!(store.sheet_kind.capacity() >= 100);
36 assert!(store.flags.capacity() >= 100);
37 assert!(store.value_ref.capacity() >= 100);
38 assert!(store.edge_offset.capacity() >= 100);
39 }
40
41 #[test]
42 fn test_vertex_store_accessors() {
43 let mut store = VertexStore::new();
44 let id = store.allocate(PackedCoord::new(5, 10), 3, 0x03);
45
46 assert_eq!(store.coord(id).row(), 5);
48 assert_eq!(store.coord(id).col(), 10);
49
50 assert_eq!(store.sheet_id(id), 3);
52
53 assert_eq!(store.flags(id), 0x03);
55 assert!(store.is_dirty(id));
56 assert!(store.is_volatile(id));
57
58 store.set_kind(id, VertexKind::Cell);
60 assert_eq!(store.kind(id), VertexKind::Cell);
61 }
62
63 #[test]
64 fn test_reserved_vertex_range() {
65 let mut store = VertexStore::new();
66 let id = store.allocate(PackedCoord::new(0, 0), 0, 0);
68 assert!(id.0 >= FIRST_NORMAL_VERTEX);
69 }
70
71 #[test]
72 fn test_atomic_flag_operations() {
73 let mut store = VertexStore::new();
74 let id = store.allocate(PackedCoord::new(0, 0), 0, 0);
75
76 store.set_dirty(id, true);
78 assert!(store.is_dirty(id));
79
80 store.set_dirty(id, false);
81 assert!(!store.is_dirty(id));
82
83 store.set_volatile(id, true);
84 assert!(store.is_volatile(id));
85 }
86
87 #[test]
88 fn test_vertex_store_set_coord() {
89 let mut store = VertexStore::new();
90 let id = store.allocate(PackedCoord::new(1, 1), 0, 0);
91
92 store.set_coord(id, PackedCoord::new(5, 10));
94 assert_eq!(store.coord(id), PackedCoord::new(5, 10));
95 }
96
97 #[test]
98 fn test_vertex_store_atomic_flags() {
99 let mut store = VertexStore::new();
100 let id = store.allocate(PackedCoord::new(0, 0), 0, 0);
101
102 store.set_dirty(id, true);
104 assert!(store.is_dirty(id));
105
106 store.set_volatile(id, true);
107 assert!(store.is_volatile(id));
108
109 store.mark_deleted(id, true);
111 assert!(store.is_deleted(id));
112 }
113
114 #[test]
115 fn test_reserved_id_range_preserved() {
116 let mut store = VertexStore::new();
117
118 let id = store.allocate(PackedCoord::new(0, 0), 0, 0);
120 assert!(id.0 >= FIRST_NORMAL_VERTEX);
121
122 store.mark_deleted(id, true);
124 assert!(store.vertex_exists(id));
125 assert!(store.is_deleted(id));
126 }
127}
128
129pub const FIRST_NORMAL_VERTEX: u32 = 1024;
131pub const RANGE_VERTEX_START: u32 = 0;
132pub const EXTERNAL_VERTEX_START: u32 = 256;
133
134#[repr(C, align(64))]
141#[derive(Debug)]
142pub struct VertexStore {
143 coords: Vec<PackedCoord>, sheet_kind: Vec<u32>, flags: Vec<AtomicU8>, value_ref: Vec<u32>, edge_offset: Vec<u32>, len: usize,
152}
153
154impl Default for VertexStore {
155 fn default() -> Self {
156 Self::new()
157 }
158}
159
160impl VertexStore {
161 pub fn new() -> Self {
162 Self {
163 coords: Vec::new(),
164 sheet_kind: Vec::new(),
165 flags: Vec::new(),
166 value_ref: Vec::new(),
167 edge_offset: Vec::new(),
168 len: 0,
169 }
170 }
171
172 pub fn with_capacity(capacity: usize) -> Self {
173 Self {
174 coords: Vec::with_capacity(capacity),
175 sheet_kind: Vec::with_capacity(capacity),
176 flags: Vec::with_capacity(capacity),
177 value_ref: Vec::with_capacity(capacity),
178 edge_offset: Vec::with_capacity(capacity),
179 len: 0,
180 }
181 }
182
183 pub fn reserve(&mut self, additional: usize) {
185 if additional == 0 {
186 return;
187 }
188 let target = self.len + additional;
190 if self.coords.capacity() < target {
191 self.coords.reserve(additional);
192 }
193 if self.sheet_kind.capacity() < target {
194 self.sheet_kind.reserve(additional);
195 }
196 if self.flags.capacity() < target {
197 self.flags.reserve(additional);
198 }
199 if self.value_ref.capacity() < target {
200 self.value_ref.reserve(additional);
201 }
202 if self.edge_offset.capacity() < target {
203 self.edge_offset.reserve(additional);
204 }
205 }
206
207 pub fn allocate(&mut self, coord: PackedCoord, sheet: SheetId, flags: u8) -> VertexId {
210 let id = VertexId(self.len as u32 + FIRST_NORMAL_VERTEX);
211 debug_assert!(id.0 >= FIRST_NORMAL_VERTEX);
212
213 self.coords.push(coord);
214 self.sheet_kind.push((sheet as u32) << 16);
215 self.flags.push(AtomicU8::new(flags));
216 self.value_ref.push(0);
217 self.edge_offset.push(0);
218 self.len += 1;
219
220 id
221 }
222
223 pub fn allocate_contiguous(
226 &mut self,
227 sheet: SheetId,
228 coords: &[PackedCoord],
229 flags: u8,
230 ) -> Vec<VertexId> {
231 if coords.is_empty() {
232 return Vec::new();
233 }
234 self.reserve(coords.len());
235 let mut ids = Vec::with_capacity(coords.len());
236 for &coord in coords {
237 ids.push(self.allocate(coord, sheet, flags));
238 }
239 ids
240 }
241
242 #[inline]
243 pub fn len(&self) -> usize {
244 self.len
245 }
246
247 #[inline]
249 fn vertex_id_to_index(&self, id: VertexId) -> Option<usize> {
250 if id.0 < FIRST_NORMAL_VERTEX {
251 return None;
252 }
253 let idx = (id.0 - FIRST_NORMAL_VERTEX) as usize;
254 if idx >= self.len {
255 return None;
256 }
257 Some(idx)
258 }
259
260 #[inline]
261 pub fn is_empty(&self) -> bool {
262 self.len == 0
263 }
264
265 #[inline]
267 pub fn coord(&self, id: VertexId) -> PackedCoord {
268 if let Some(idx) = self.vertex_id_to_index(id) {
269 self.coords[idx]
270 } else {
271 PackedCoord::new(0, 0) }
273 }
274
275 #[inline]
276 pub fn sheet_id(&self, id: VertexId) -> SheetId {
277 if let Some(idx) = self.vertex_id_to_index(id) {
278 (self.sheet_kind[idx] >> 16) as SheetId
279 } else {
280 0 }
282 }
283
284 #[inline]
285 pub fn kind(&self, id: VertexId) -> VertexKind {
286 if let Some(idx) = self.vertex_id_to_index(id) {
287 let tag = ((self.sheet_kind[idx] >> 8) & 0xFF) as u8;
288 VertexKind::from_tag(tag)
289 } else {
290 VertexKind::Empty }
292 }
293
294 #[inline]
295 pub fn set_kind(&mut self, id: VertexId, kind: VertexKind) {
296 if let Some(idx) = self.vertex_id_to_index(id) {
297 let sheet_bits = self.sheet_kind[idx] & 0xFFFF0000;
298 self.sheet_kind[idx] = sheet_bits | ((kind.to_tag() as u32) << 8);
299 }
300 }
301
302 #[inline]
303 pub fn flags(&self, id: VertexId) -> u8 {
304 if let Some(idx) = self.vertex_id_to_index(id) {
305 self.flags[idx].load(Ordering::Acquire)
306 } else {
307 0 }
309 }
310
311 #[inline]
312 pub fn is_dirty(&self, id: VertexId) -> bool {
313 self.flags(id) & 0x01 != 0
314 }
315
316 #[inline]
317 pub fn is_volatile(&self, id: VertexId) -> bool {
318 self.flags(id) & 0x02 != 0
319 }
320
321 #[inline]
322 pub fn is_deleted(&self, id: VertexId) -> bool {
323 self.flags(id) & 0x04 != 0
324 }
325
326 #[inline]
327 pub fn set_dirty(&self, id: VertexId, dirty: bool) {
328 if id.0 < FIRST_NORMAL_VERTEX {
329 return; }
331 let idx = (id.0 - FIRST_NORMAL_VERTEX) as usize;
332 if idx >= self.flags.len() {
333 return; }
335 if dirty {
336 self.flags[idx].fetch_or(0x01, Ordering::Release);
337 } else {
338 self.flags[idx].fetch_and(!0x01, Ordering::Release);
339 }
340 }
341
342 #[inline]
343 pub fn set_volatile(&self, id: VertexId, volatile: bool) {
344 if let Some(idx) = self.vertex_id_to_index(id) {
345 if volatile {
346 self.flags[idx].fetch_or(0x02, Ordering::Release);
347 } else {
348 self.flags[idx].fetch_and(!0x02, Ordering::Release);
349 }
350 }
351 }
352
353 #[inline]
354 pub fn value_ref(&self, id: VertexId) -> u32 {
355 if let Some(idx) = self.vertex_id_to_index(id) {
356 self.value_ref[idx]
357 } else {
358 0 }
360 }
361
362 #[inline]
363 pub fn set_value_ref(&mut self, id: VertexId, value_ref: u32) {
364 if let Some(idx) = self.vertex_id_to_index(id) {
365 self.value_ref[idx] = value_ref;
366 }
367 }
368
369 #[inline]
370 pub fn edge_offset(&self, id: VertexId) -> u32 {
371 if let Some(idx) = self.vertex_id_to_index(id) {
372 self.edge_offset[idx]
373 } else {
374 0 }
376 }
377
378 #[inline]
379 pub fn set_edge_offset(&mut self, id: VertexId, offset: u32) {
380 if let Some(idx) = self.vertex_id_to_index(id) {
381 self.edge_offset[idx] = offset;
382 }
383 }
384
385 #[doc(hidden)]
389 pub fn set_coord(&mut self, id: VertexId, coord: PackedCoord) {
390 if let Some(idx) = self.vertex_id_to_index(id) {
391 self.coords[idx] = coord;
392 }
393 }
394
395 pub fn mark_deleted(&self, id: VertexId, deleted: bool) {
397 if let Some(idx) = self.vertex_id_to_index(id) {
398 if deleted {
399 self.flags[idx].fetch_or(0x04, Ordering::Release);
400 } else {
401 self.flags[idx].fetch_and(!0x04, Ordering::Release);
402 }
403 }
404 }
405
406 pub fn vertex_exists(&self, id: VertexId) -> bool {
408 self.vertex_id_to_index(id).is_some()
409 }
410
411 pub fn vertex_exists_active(&self, id: VertexId) -> bool {
413 self.vertex_id_to_index(id)
414 .map(|_| !self.is_deleted(id))
415 .unwrap_or(false)
416 }
417
418 pub fn all_vertices(&self) -> impl Iterator<Item = VertexId> + '_ {
420 (0..self.len).map(|i| VertexId((i as u32) + FIRST_NORMAL_VERTEX))
421 }
422}