fusion_blossom/
dual_module_serial.rs

1//! Serial Dual Module
2//!
3//! A serial implementation of the dual module. This is the very basic fusion blossom algorithm that aims at debugging and as a ground truth
4//! where traditional matching is too time consuming because of their |E| = O(|V|^2) scaling.
5//!
6//! This implementation supports fast clear: optimized for a small number of syndrome and small cluster coverage, the ``clear growth'' operator
7//! can be executed in O(1) time, at the cost of dynamic check and dynamic reset. This also increases cache coherency, because a global clear
8//! operation is unfriendly to cache.
9//!
10
11#![cfg_attr(feature = "unsafe_pointer", allow(dropping_references))]
12use super::dual_module::*;
13use super::pointers::*;
14use super::util::*;
15use super::visualize::*;
16use crate::derivative::Derivative;
17use crate::weak_table::PtrWeakKeyHashMap;
18use std::collections::HashMap;
19
20pub struct DualModuleSerial {
21    /// all vertices including virtual ones
22    pub vertices: Vec<VertexPtr>,
23    /// nodes internal information
24    pub nodes: Vec<Option<DualNodeInternalPtr>>,
25    /// current nodes length, to enable constant-time clear operation
26    pub nodes_length: usize,
27    /// keep edges, which can also be accessed in [`Self::vertices`]
28    pub edges: Vec<EdgePtr>,
29    /// current timestamp
30    pub active_timestamp: FastClearTimestamp,
31    /// the number of all vertices (including those partitioned into other serial modules)
32    pub vertex_num: VertexNum,
33    /// the number of all edges (including those partitioned into other serial modules)
34    pub edge_num: usize,
35    /// vertices exclusively owned by this module, useful when partitioning the decoding graph into multiple [`DualModuleSerial`]
36    pub owning_range: VertexRange,
37    /// module information when used as a component in the partitioned dual module
38    pub unit_module_info: Option<UnitModuleInfo>,
39    /// maintain an active list to optimize for average cases: most defect vertices have already been matched, and we only need to work on a few remained;
40    /// note that this list may contain deleted node as well as duplicate nodes
41    pub active_list: Vec<DualNodeInternalWeak>,
42    /// helps to deduplicate [`DualModuleSerial::active_list`]
43    current_cycle: usize,
44    /// remember the edges that's modified by erasures
45    pub edge_modifier: EdgeWeightModifier,
46    /// deduplicate edges in the boundary, helpful when the decoding problem is partitioned
47    pub edge_dedup_timestamp: FastClearTimestamp,
48    /// temporary list of synchronize requests, i.e. those propagating into the mirrored vertices; should always be empty when not partitioned, i.e. serial version
49    pub sync_requests: Vec<SyncRequest>,
50    /// temporary variable to reduce reallocation
51    updated_boundary: Vec<(bool, EdgeWeak)>,
52    /// temporary variable to reduce reallocation
53    propagating_vertices: Vec<(VertexWeak, Option<DualNodeInternalWeak>)>,
54}
55
56/// records information only available when used as a unit in the partitioned dual module
57#[derive(Derivative)]
58#[derivative(Debug)]
59pub struct UnitModuleInfo {
60    /// unit index
61    pub unit_index: usize,
62    /// all mirrored vertices (excluding owned ones) to query if this module contains the vertex
63    pub mirrored_vertices: HashMap<VertexIndex, VertexIndex>,
64    /// owned dual nodes range
65    pub owning_dual_range: NodeRange,
66    /// hash table for mapping [`DualNodePtr`] to internal [`DualNodeInternalPtr`]
67    pub dual_node_pointers: PtrWeakKeyHashMap<DualNodeWeak, usize>,
68}
69
70pub type DualModuleSerialPtr = ArcManualSafeLock<DualModuleSerial>;
71pub type DualModuleSerialWeak = WeakManualSafeLock<DualModuleSerial>;
72
73/// internal information of the dual node, added to the [`DualNode`]
74#[derive(Derivative)]
75#[derivative(Debug)]
76pub struct DualNodeInternal {
77    /// the pointer to the origin [`DualNode`]
78    pub origin: DualNodeWeak,
79    /// local index, to find myself in [`DualModuleSerial::nodes`]
80    index: NodeIndex,
81    /// dual variable of this node
82    pub dual_variable: Weight,
83    /// edges on the boundary of this node, (`is_left`, `edge`)
84    pub boundary: Vec<(bool, EdgeWeak)>,
85    /// over-grown vertices on the boundary of this node, this is to solve a bug where all surrounding edges are fully grown
86    /// so all edges are deleted from the boundary... this will lose track of the real boundary when shrinking back
87    pub overgrown_stack: Vec<(VertexWeak, Weight)>,
88    /// helps to prevent duplicate visit in a single cycle
89    last_visit_cycle: usize,
90}
91
92// when using feature `dangerous_pointer`, it doesn't provide the `upgrade()` function, so we have to fall back to the safe solution
93pub type DualNodeInternalPtr = ArcManualSafeLock<DualNodeInternal>;
94pub type DualNodeInternalWeak = WeakManualSafeLock<DualNodeInternal>;
95
96impl std::fmt::Debug for DualNodeInternalPtr {
97    fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
98        let dual_node_internal = self.read_recursive();
99        write!(f, "{}", dual_node_internal.index)
100    }
101}
102
103impl std::fmt::Debug for DualNodeInternalWeak {
104    fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
105        self.upgrade_force().fmt(f)
106    }
107}
108
109#[derive(Derivative)]
110#[derivative(Debug)]
111pub struct Vertex {
112    /// the index of this vertex in the decoding graph, not necessary the index in [`DualModuleSerial::vertices`] if it's partitioned
113    pub vertex_index: VertexIndex,
114    /// if a vertex is virtual, then it can be matched any times
115    pub is_virtual: bool,
116    /// if a vertex is defect, then [`Vertex::propagated_dual_node`] always corresponds to that root
117    pub is_defect: bool,
118    /// if it's a mirrored vertex (present on multiple units), then this is the parallel unit that exclusively owns it
119    pub mirror_unit: Option<PartitionUnitWeak>,
120    /// all neighbor edges, in surface code this should be constant number of edges
121    #[derivative(Debug = "ignore")]
122    pub edges: Vec<EdgeWeak>,
123    /// propagated dual node
124    pub propagated_dual_node: Option<DualNodeInternalWeak>,
125    /// propagated grandson node: must be a syndrome node
126    pub propagated_grandson_dual_node: Option<DualNodeInternalWeak>,
127    /// for fast clear
128    pub timestamp: FastClearTimestamp,
129}
130
131pub type VertexPtr = FastClearArcManualSafeLockDangerous<Vertex>;
132pub type VertexWeak = FastClearWeakManualSafeLockDangerous<Vertex>;
133
134impl std::fmt::Debug for VertexPtr {
135    fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
136        let vertex = self.read_recursive_force();
137        write!(f, "{}", vertex.vertex_index)
138    }
139}
140
141impl std::fmt::Debug for VertexWeak {
142    fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
143        let vertex_ptr = self.upgrade_force();
144        let vertex = vertex_ptr.read_recursive_force();
145        write!(f, "{}", vertex.vertex_index)
146    }
147}
148
149#[derive(Derivative)]
150#[derivative(Debug)]
151pub struct Edge {
152    /// global edge index, not necessary the index in [`DualModuleSerial::edges`]
153    pub edge_index: EdgeIndex,
154    /// total weight of this edge
155    pub weight: Weight,
156    /// left vertex (always with smaller index for consistency)
157    #[derivative(Debug = "ignore")]
158    pub left: VertexWeak,
159    /// right vertex (always with larger index for consistency)
160    #[derivative(Debug = "ignore")]
161    pub right: VertexWeak,
162    /// growth from the left point
163    pub left_growth: Weight,
164    /// growth from the right point
165    pub right_growth: Weight,
166    /// left active tree node (if applicable)
167    pub left_dual_node: Option<DualNodeInternalWeak>,
168    /// left grandson node: must be a syndrome node
169    pub left_grandson_dual_node: Option<DualNodeInternalWeak>,
170    /// right active tree node (if applicable)
171    pub right_dual_node: Option<DualNodeInternalWeak>,
172    /// left grandson node: must be a syndrome node
173    pub right_grandson_dual_node: Option<DualNodeInternalWeak>,
174    /// for fast clear
175    pub timestamp: FastClearTimestamp,
176    /// deduplicate edge in a boundary
177    pub dedup_timestamp: (FastClearTimestamp, FastClearTimestamp),
178}
179
180pub type EdgePtr = FastClearArcManualSafeLockDangerous<Edge>;
181pub type EdgeWeak = FastClearWeakManualSafeLockDangerous<Edge>;
182
183impl std::fmt::Debug for EdgePtr {
184    fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
185        let edge = self.read_recursive_force();
186        write!(f, "{}", edge.edge_index)
187    }
188}
189
190impl std::fmt::Debug for EdgeWeak {
191    fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
192        let edge_ptr = self.upgrade_force();
193        let edge = edge_ptr.read_recursive_force();
194        write!(f, "{}", edge.edge_index)
195    }
196}
197
198impl DualModuleImpl for DualModuleSerial {
199    /// initialize the dual module, which is supposed to be reused for multiple decoding tasks with the same structure
200    #[allow(clippy::unnecessary_cast)]
201    fn new_empty(initializer: &SolverInitializer) -> Self {
202        let active_timestamp = 0;
203        // create vertices
204        let vertices: Vec<VertexPtr> = (0..initializer.vertex_num)
205            .map(|vertex_index| {
206                VertexPtr::new_value(Vertex {
207                    vertex_index,
208                    is_virtual: false,
209                    is_defect: false,
210                    mirror_unit: None,
211                    edges: Vec::new(),
212                    propagated_dual_node: None,
213                    propagated_grandson_dual_node: None,
214                    timestamp: active_timestamp,
215                })
216            })
217            .collect();
218        // set virtual vertices
219        for &virtual_vertex in initializer.virtual_vertices.iter() {
220            let mut vertex = vertices[virtual_vertex as usize].write(active_timestamp);
221            vertex.is_virtual = true;
222        }
223        // set edges
224        let mut edges = Vec::<EdgePtr>::new();
225        for &(i, j, weight) in initializer.weighted_edges.iter() {
226            assert_ne!(i, j, "invalid edge from and to the same vertex {}", i);
227            assert!(
228                weight % 2 == 0,
229                "edge ({}, {}) has odd weight value; weight should be even",
230                i,
231                j
232            );
233            assert!(weight >= 0, "edge ({}, {}) is negative-weighted", i, j);
234            assert!(
235                i < initializer.vertex_num,
236                "edge ({}, {}) connected to an invalid vertex {}",
237                i,
238                j,
239                i
240            );
241            assert!(
242                j < initializer.vertex_num,
243                "edge ({}, {}) connected to an invalid vertex {}",
244                i,
245                j,
246                j
247            );
248            let left = VertexIndex::min(i, j);
249            let right = VertexIndex::max(i, j);
250            let edge_ptr = EdgePtr::new_value(Edge {
251                edge_index: edges.len() as EdgeIndex,
252                weight,
253                left: vertices[left as usize].downgrade(),
254                right: vertices[right as usize].downgrade(),
255                left_growth: 0,
256                right_growth: 0,
257                left_dual_node: None,
258                left_grandson_dual_node: None,
259                right_dual_node: None,
260                right_grandson_dual_node: None,
261                timestamp: 0,
262                dedup_timestamp: (0, 0),
263            });
264            for (a, b) in [(i, j), (j, i)] {
265                lock_write!(vertex, vertices[a as usize], active_timestamp);
266                debug_assert!({
267                    // O(N^2) sanity check, debug mode only (actually this bug is not critical, only the shorter edge will take effect)
268                    let mut no_duplicate = true;
269                    for edge_weak in vertex.edges.iter() {
270                        let edge_ptr = edge_weak.upgrade_force();
271                        let edge = edge_ptr.read_recursive(active_timestamp);
272                        if edge.left == vertices[b as usize].downgrade() || edge.right == vertices[b as usize].downgrade() {
273                            no_duplicate = false;
274                            eprintln!("duplicated edge between {} and {} with weight w1 = {} and w2 = {}, consider merge them into a single edge", i, j, weight, edge.weight);
275                            break;
276                        }
277                    }
278                    no_duplicate
279                });
280                vertex.edges.push(edge_ptr.downgrade());
281            }
282            edges.push(edge_ptr);
283        }
284        Self {
285            vertices,
286            nodes: vec![],
287            nodes_length: 0,
288            edges,
289            active_timestamp: 0,
290            vertex_num: initializer.vertex_num,
291            edge_num: initializer.weighted_edges.len(),
292            owning_range: VertexRange::new(0, initializer.vertex_num),
293            unit_module_info: None, // disabled
294            active_list: vec![],
295            current_cycle: 0,
296            edge_modifier: EdgeWeightModifier::new(),
297            edge_dedup_timestamp: 0,
298            sync_requests: vec![],
299            updated_boundary: vec![],
300            propagating_vertices: vec![],
301        }
302    }
303
304    /// clear all growth and existing dual nodes
305    #[allow(clippy::unnecessary_cast)]
306    fn clear(&mut self) {
307        // recover erasure edges first
308        while self.edge_modifier.has_modified_edges() {
309            let (edge_index, original_weight) = self.edge_modifier.pop_modified_edge();
310            let edge_ptr = &self.edges[edge_index as usize];
311            let mut edge = edge_ptr.write(self.active_timestamp);
312            edge.weight = original_weight;
313        }
314        self.clear_graph();
315        self.nodes_length = 0; // without actually dropping all the nodes, to enable constant time clear
316        if let Some(unit_module_info) = self.unit_module_info.as_mut() {
317            unit_module_info.owning_dual_range = VertexRange::new(0, 0);
318            unit_module_info.dual_node_pointers = PtrWeakKeyHashMap::<DualNodeWeak, usize>::new();
319        }
320        self.active_list.clear();
321    }
322
323    /// add a new dual node from dual module root
324    #[allow(clippy::unnecessary_cast)]
325    fn add_dual_node(&mut self, dual_node_ptr: &DualNodePtr) {
326        self.register_dual_node_ptr(dual_node_ptr);
327        let active_timestamp = self.active_timestamp;
328        let node = dual_node_ptr.read_recursive();
329        let node_index = self.nodes_length as NodeIndex;
330        let node_internal_ptr = if node_index < self.nodes.len() as NodeIndex && self.nodes[node_index as usize].is_some() {
331            let node_ptr = self.nodes[node_index as usize].take().unwrap();
332            let mut node = node_ptr.write();
333            node.origin = dual_node_ptr.downgrade();
334            node.index = node_index;
335            node.dual_variable = 0;
336            node.boundary.clear();
337            node.overgrown_stack.clear();
338            node.last_visit_cycle = 0;
339            drop(node);
340            node_ptr
341        } else {
342            DualNodeInternalPtr::new_value(DualNodeInternal {
343                origin: dual_node_ptr.downgrade(),
344                index: node_index,
345                dual_variable: 0,
346                boundary: Vec::new(),
347                overgrown_stack: Vec::new(),
348                last_visit_cycle: 0,
349            })
350        };
351        {
352            let boundary = &mut node_internal_ptr.write().boundary;
353            match &node.class {
354                DualNodeClass::Blossom { nodes_circle, .. } => {
355                    // copy all the boundary edges and modify edge belongings
356                    for dual_node_weak in nodes_circle.iter() {
357                        let dual_node_ptr = dual_node_weak.upgrade_force();
358                        if self.unit_module_info.is_none() {
359                            // it's required to do it in the outer loop and synchronize everybody, so no need to do it here
360                            self.prepare_dual_node_growth(&dual_node_ptr, false);
361                            // prepare all nodes in shrinking mode for consistency
362                        }
363                        if let Some(dual_node_internal_ptr) = self.get_dual_node_internal_ptr_optional(&dual_node_ptr) {
364                            let dual_node_internal = dual_node_internal_ptr.read_recursive();
365                            for (is_left, edge_weak) in dual_node_internal.boundary.iter() {
366                                let edge_ptr = edge_weak.upgrade_force();
367                                boundary.push((*is_left, edge_weak.clone()));
368                                let mut edge = edge_ptr.write(active_timestamp);
369                                debug_assert!(
370                                    if *is_left {
371                                        edge.left_dual_node.is_some()
372                                    } else {
373                                        edge.right_dual_node.is_some()
374                                    },
375                                    "dual node of edge should be some"
376                                );
377                                debug_assert!(
378                                    if *is_left {
379                                        edge.left_dual_node == Some(dual_node_internal_ptr.downgrade())
380                                    } else {
381                                        edge.right_dual_node == Some(dual_node_internal_ptr.downgrade())
382                                    },
383                                    "edge belonging"
384                                );
385                                if *is_left {
386                                    edge.left_dual_node = Some(node_internal_ptr.downgrade());
387                                } else {
388                                    edge.right_dual_node = Some(node_internal_ptr.downgrade());
389                                }
390                            }
391                        } else {
392                            debug_assert!(
393                                self.unit_module_info.is_some(),
394                                "only partitioned could ignore some of its children"
395                            );
396                        }
397                    }
398                }
399                DualNodeClass::DefectVertex { defect_index } => {
400                    let vertex_index = self
401                        .get_vertex_index(*defect_index)
402                        .expect("syndrome not belonging to this dual module");
403                    let vertex_ptr = &self.vertices[vertex_index];
404                    vertex_ptr.dynamic_clear(active_timestamp);
405                    let mut vertex = vertex_ptr.write(active_timestamp);
406                    vertex.propagated_dual_node = Some(node_internal_ptr.downgrade());
407                    vertex.propagated_grandson_dual_node = Some(node_internal_ptr.downgrade());
408                    vertex.is_defect = true;
409                    for edge_weak in vertex.edges.iter() {
410                        let edge_ptr = edge_weak.upgrade_force();
411                        edge_ptr.dynamic_clear(active_timestamp);
412                        let mut edge = edge_ptr.write(active_timestamp);
413                        let is_left = vertex_ptr.downgrade() == edge.left;
414                        debug_assert!(
415                            if is_left {
416                                edge.left_dual_node.is_none()
417                            } else {
418                                edge.right_dual_node.is_none()
419                            },
420                            "dual node of edge should be none"
421                        );
422                        if is_left {
423                            edge.left_dual_node = Some(node_internal_ptr.downgrade());
424                            edge.left_grandson_dual_node = Some(node_internal_ptr.downgrade());
425                        } else {
426                            edge.right_dual_node = Some(node_internal_ptr.downgrade());
427                            edge.right_grandson_dual_node = Some(node_internal_ptr.downgrade());
428                        }
429                        boundary.push((is_left, edge_weak.clone()));
430                    }
431                }
432            }
433        }
434        self.active_list.push(node_internal_ptr.downgrade());
435        self.nodes_length += 1;
436        if self.nodes.len() < self.nodes_length {
437            self.nodes.push(None);
438        }
439        self.nodes[node_index as usize] = Some(node_internal_ptr);
440    }
441
442    #[allow(clippy::unnecessary_cast)]
443    fn remove_blossom(&mut self, dual_node_ptr: DualNodePtr) {
444        let active_timestamp = self.active_timestamp;
445        self.prepare_dual_node_growth(&dual_node_ptr, false); // prepare the blossom into shrinking
446        let node = dual_node_ptr.read_recursive();
447        let dual_node_internal_ptr = self.get_dual_node_internal_ptr(&dual_node_ptr);
448        let dual_node_internal = dual_node_internal_ptr.read_recursive();
449        debug_assert_eq!(
450            dual_node_internal.dual_variable, 0,
451            "only blossom with dual variable = 0 can be safely removed"
452        );
453        debug_assert!(
454            dual_node_internal.overgrown_stack.is_empty(),
455            "removing a blossom with non-empty overgrown stack forbidden"
456        );
457        let node_idx = dual_node_internal.index;
458        debug_assert!(
459            self.nodes[node_idx as usize].is_some(),
460            "blossom may have already been removed, do not call twice"
461        );
462        debug_assert!(
463            self.nodes[node_idx as usize].as_ref().unwrap() == &dual_node_internal_ptr,
464            "the blossom doesn't belong to this DualModuleInterface"
465        );
466        // recover edge belongings
467        for (is_left, edge_weak) in dual_node_internal.boundary.iter() {
468            let edge_ptr = edge_weak.upgrade_force();
469            let mut edge = edge_ptr.write(active_timestamp);
470            debug_assert!(
471                if *is_left {
472                    edge.left_dual_node.is_some()
473                } else {
474                    edge.right_dual_node.is_some()
475                },
476                "dual node of edge should be some"
477            );
478            if *is_left {
479                edge.left_dual_node = None;
480            } else {
481                edge.right_dual_node = None;
482            }
483        }
484        if let DualNodeClass::Blossom { nodes_circle, .. } = &node.class {
485            for circle_dual_node_weak in nodes_circle.iter() {
486                let circle_dual_node_ptr = circle_dual_node_weak.upgrade_force();
487                if let Some(circle_dual_node_internal_ptr) = self.get_dual_node_internal_ptr_optional(&circle_dual_node_ptr)
488                {
489                    let circle_dual_node_internal = circle_dual_node_internal_ptr.read_recursive();
490                    for (is_left, edge_weak) in circle_dual_node_internal.boundary.iter() {
491                        let edge_ptr = edge_weak.upgrade_force();
492                        let mut edge = edge_ptr.write(active_timestamp);
493                        debug_assert!(
494                            if *is_left {
495                                edge.left_dual_node.is_none()
496                            } else {
497                                edge.right_dual_node.is_none()
498                            },
499                            "dual node of edge should be none"
500                        );
501                        if *is_left {
502                            edge.left_dual_node = Some(circle_dual_node_internal_ptr.downgrade());
503                        } else {
504                            edge.right_dual_node = Some(circle_dual_node_internal_ptr.downgrade());
505                        }
506                    }
507                } else {
508                    debug_assert!(self.unit_module_info.is_some(), "only happens if partitioned");
509                }
510            }
511        } else {
512            unreachable!()
513        }
514        self.nodes[node_idx as usize] = None; // simply remove this blossom node
515    }
516
517    fn set_grow_state(&mut self, dual_node_ptr: &DualNodePtr, grow_state: DualNodeGrowState) {
518        let dual_node = dual_node_ptr.read_recursive();
519        if dual_node.grow_state == DualNodeGrowState::Stay && grow_state != DualNodeGrowState::Stay {
520            let dual_node_internal_ptr = self.get_dual_node_internal_ptr(dual_node_ptr);
521            self.active_list.push(dual_node_internal_ptr.downgrade())
522        }
523    }
524
525    #[allow(clippy::collapsible_else_if)]
526    fn compute_maximum_update_length_dual_node(
527        &mut self,
528        dual_node_ptr: &DualNodePtr,
529        is_grow: bool,
530        simultaneous_update: bool,
531    ) -> MaxUpdateLength {
532        let active_timestamp = self.active_timestamp;
533        if !simultaneous_update {
534            // when `simultaneous_update` is set, it's assumed that all nodes are prepared to grow or shrink
535            // this is because if we dynamically prepare them, it would be inefficient
536            self.prepare_dual_node_growth(dual_node_ptr, is_grow);
537        }
538        let mut max_length_abs = Weight::MAX;
539        let dual_node_internal_ptr = self.get_dual_node_internal_ptr(dual_node_ptr);
540        let dual_node_internal = dual_node_internal_ptr.read_recursive();
541        if !is_grow {
542            if dual_node_internal.dual_variable == 0 {
543                let dual_node = dual_node_ptr.read_recursive();
544                match dual_node.class {
545                    DualNodeClass::Blossom { .. } => return MaxUpdateLength::BlossomNeedExpand(dual_node_ptr.clone()),
546                    DualNodeClass::DefectVertex { defect_index } => {
547                        // try to report Conflicting event or give a VertexShrinkStop with potential conflicting node
548                        if let Some(vertex_index) = self.get_vertex_index(defect_index) {
549                            // since propagated node is never removed, this event could happen with no vertex
550                            let vertex_ptr = &self.vertices[vertex_index];
551                            let vertex = vertex_ptr.read_recursive(active_timestamp);
552                            let mut potential_conflict: Option<(DualNodePtr, DualNodePtr)> = None;
553                            for edge_weak in vertex.edges.iter() {
554                                let edge_ptr = edge_weak.upgrade_force();
555                                let edge = edge_ptr.read_recursive(active_timestamp);
556                                let is_left = vertex_ptr.downgrade() == edge.left;
557                                let remaining_length = edge.weight - edge.left_growth - edge.right_growth;
558                                if remaining_length == 0 {
559                                    let peer_dual_node = if is_left {
560                                        &edge.right_dual_node
561                                    } else {
562                                        &edge.left_dual_node
563                                    };
564                                    if let Some(peer_dual_node_ptr) = peer_dual_node {
565                                        let peer_grandson_dual_node = if is_left {
566                                            &edge.right_grandson_dual_node
567                                        } else {
568                                            &edge.left_grandson_dual_node
569                                        };
570                                        let peer_dual_node_ptr =
571                                            peer_dual_node_ptr.upgrade_force().read_recursive().origin.upgrade_force();
572                                        let peer_grandson_dual_node_ptr = peer_grandson_dual_node
573                                            .as_ref()
574                                            .unwrap()
575                                            .upgrade_force()
576                                            .read_recursive()
577                                            .origin
578                                            .upgrade_force();
579                                        if peer_dual_node_ptr.read_recursive().grow_state == DualNodeGrowState::Grow {
580                                            if let Some((other_dual_node_ptr, other_grandson_dual_node)) =
581                                                &potential_conflict
582                                            {
583                                                if &peer_dual_node_ptr != other_dual_node_ptr {
584                                                    return MaxUpdateLength::Conflicting(
585                                                        (other_dual_node_ptr.clone(), other_grandson_dual_node.clone()),
586                                                        (peer_dual_node_ptr, peer_grandson_dual_node_ptr),
587                                                    );
588                                                }
589                                            } else {
590                                                potential_conflict = Some((peer_dual_node_ptr, peer_grandson_dual_node_ptr));
591                                            }
592                                        }
593                                    }
594                                }
595                            }
596                            return MaxUpdateLength::VertexShrinkStop((dual_node_ptr.clone(), potential_conflict));
597                        } else {
598                            return MaxUpdateLength::VertexShrinkStop((dual_node_ptr.clone(), None));
599                        }
600                    }
601                }
602            }
603            if !dual_node_internal.overgrown_stack.is_empty() {
604                let last_index = dual_node_internal.overgrown_stack.len() - 1;
605                let (_, overgrown) = &dual_node_internal.overgrown_stack[last_index];
606                max_length_abs = std::cmp::min(max_length_abs, *overgrown);
607            }
608            max_length_abs = std::cmp::min(max_length_abs, dual_node_internal.dual_variable);
609        }
610        for (is_left, edge_weak) in dual_node_internal.boundary.iter() {
611            let edge_ptr = edge_weak.upgrade_force();
612            let is_left = *is_left;
613            let edge = edge_ptr.read_recursive(active_timestamp);
614            if is_grow {
615                // first check if both side belongs to the same tree node, if so, no constraint on this edge
616                let peer_dual_node_internal_ptr: Option<DualNodeInternalPtr> = if is_left {
617                    edge.right_dual_node.as_ref().map(|ptr| ptr.upgrade_force())
618                } else {
619                    edge.left_dual_node.as_ref().map(|ptr| ptr.upgrade_force())
620                };
621                match peer_dual_node_internal_ptr {
622                    Some(peer_dual_node_internal_ptr) => {
623                        if peer_dual_node_internal_ptr == dual_node_internal_ptr {
624                            continue;
625                        } else {
626                            let peer_dual_node_internal = peer_dual_node_internal_ptr.read_recursive();
627                            let peer_dual_node_ptr = peer_dual_node_internal.origin.upgrade_force();
628                            let peer_dual_node = peer_dual_node_ptr.read_recursive();
629                            let remaining_length = edge.weight - edge.left_growth - edge.right_growth;
630                            let local_max_length_abs = match peer_dual_node.grow_state {
631                                DualNodeGrowState::Grow => {
632                                    debug_assert!(remaining_length % 2 == 0, "there is odd gap between two growing nodes, please make sure all weights are even numbers");
633                                    remaining_length / 2
634                                }
635                                DualNodeGrowState::Shrink => {
636                                    // Yue 2022.9.5: remove Conflicting event detection here, move it to the 0-dual syndrome node
637                                    continue;
638                                }
639                                DualNodeGrowState::Stay => remaining_length,
640                            };
641                            if local_max_length_abs == 0 {
642                                let peer_grandson_ptr = if is_left {
643                                    edge.right_grandson_dual_node
644                                        .as_ref()
645                                        .map(|ptr| ptr.upgrade_force())
646                                        .unwrap()
647                                        .read_recursive()
648                                        .origin
649                                        .upgrade_force()
650                                } else {
651                                    edge.left_grandson_dual_node
652                                        .as_ref()
653                                        .map(|ptr| ptr.upgrade_force())
654                                        .unwrap()
655                                        .read_recursive()
656                                        .origin
657                                        .upgrade_force()
658                                };
659                                let grandson_ptr = if is_left {
660                                    edge.left_grandson_dual_node
661                                        .as_ref()
662                                        .map(|ptr| ptr.upgrade_force())
663                                        .unwrap()
664                                        .read_recursive()
665                                        .origin
666                                        .upgrade_force()
667                                } else {
668                                    edge.right_grandson_dual_node
669                                        .as_ref()
670                                        .map(|ptr| ptr.upgrade_force())
671                                        .unwrap()
672                                        .read_recursive()
673                                        .origin
674                                        .upgrade_force()
675                                };
676                                return MaxUpdateLength::Conflicting(
677                                    (peer_dual_node_ptr.clone(), peer_grandson_ptr),
678                                    (dual_node_ptr.clone(), grandson_ptr),
679                                );
680                            }
681                            max_length_abs = std::cmp::min(max_length_abs, local_max_length_abs);
682                        }
683                    }
684                    None => {
685                        let local_max_length_abs = edge.weight - edge.left_growth - edge.right_growth;
686                        if local_max_length_abs == 0 {
687                            // check if peer is virtual node
688                            let peer_vertex_ptr = if is_left {
689                                edge.right.upgrade_force()
690                            } else {
691                                edge.left.upgrade_force()
692                            };
693                            let peer_vertex = peer_vertex_ptr.read_recursive(active_timestamp);
694                            if peer_vertex.is_virtual || peer_vertex.is_mirror_blocked() {
695                                let grandson_ptr = if is_left {
696                                    edge.left_grandson_dual_node
697                                        .as_ref()
698                                        .map(|ptr| ptr.upgrade_force())
699                                        .unwrap()
700                                        .read_recursive()
701                                        .origin
702                                        .upgrade_force()
703                                } else {
704                                    edge.right_grandson_dual_node
705                                        .as_ref()
706                                        .map(|ptr| ptr.upgrade_force())
707                                        .unwrap()
708                                        .read_recursive()
709                                        .origin
710                                        .upgrade_force()
711                                };
712                                return MaxUpdateLength::TouchingVirtual(
713                                    (dual_node_ptr.clone(), grandson_ptr),
714                                    (peer_vertex.vertex_index, peer_vertex.is_mirror_blocked()),
715                                );
716                            } else {
717                                println!("edge: {edge_ptr:?}, peer_vertex_ptr: {peer_vertex_ptr:?}");
718                                unreachable!("this edge should've been removed from boundary because it's already fully grown, and it's peer vertex is not virtual")
719                            }
720                        }
721                        max_length_abs = std::cmp::min(max_length_abs, local_max_length_abs);
722                    }
723                }
724            } else {
725                if is_left {
726                    if edge.left_growth == 0 {
727                        unreachable!()
728                    }
729                    max_length_abs = std::cmp::min(max_length_abs, edge.left_growth);
730                } else {
731                    if edge.right_growth == 0 {
732                        unreachable!()
733                    }
734                    max_length_abs = std::cmp::min(max_length_abs, edge.right_growth);
735                }
736            }
737        }
738        MaxUpdateLength::NonZeroGrow((max_length_abs, dual_node_internal.boundary.is_empty()))
739    }
740
741    fn compute_maximum_update_length(&mut self) -> GroupMaxUpdateLength {
742        // first prepare all nodes for individual grow or shrink; Stay nodes will be prepared to shrink in order to minimize effect on others
743        self.prepare_all();
744        // after preparing all the growth, there should be no sync requests
745        debug_assert!(
746            self.sync_requests.is_empty(),
747            "no sync requests should arise here; make sure to deal with all sync requests before growing"
748        );
749        let mut group_max_update_length = GroupMaxUpdateLength::new();
750        for i in 0..self.active_list.len() {
751            let dual_node_ptr = {
752                let internal_dual_node_ptr = self.active_list[i].upgrade_force();
753                let dual_node_internal = internal_dual_node_ptr.read_recursive();
754                dual_node_internal.origin.upgrade_force()
755            };
756            let dual_node = dual_node_ptr.read_recursive();
757            let is_grow = match dual_node.grow_state {
758                DualNodeGrowState::Grow => true,
759                DualNodeGrowState::Shrink => false,
760                DualNodeGrowState::Stay => continue,
761            };
762            drop(dual_node); // unlock, otherwise it causes deadlock when updating the dual node
763            let max_update_length = self.compute_maximum_update_length_dual_node(&dual_node_ptr, is_grow, true);
764            group_max_update_length.add(max_update_length);
765        }
766        group_max_update_length
767    }
768
769    fn grow_dual_node(&mut self, dual_node_ptr: &DualNodePtr, length: Weight) {
770        let active_timestamp = self.active_timestamp;
771        if length == 0 {
772            eprintln!("[warning] calling `grow_dual_node` with zero length, nothing to do");
773            return;
774        }
775        self.prepare_dual_node_growth(dual_node_ptr, length > 0);
776        let dual_node_internal_ptr = self.get_dual_node_internal_ptr(dual_node_ptr);
777        {
778            // update node dual variable and do sanity check
779            let mut dual_node_internal = dual_node_internal_ptr.write();
780            dual_node_internal.dual_variable += length;
781            debug_assert!(
782                dual_node_internal.dual_variable >= 0,
783                "shrinking to negative dual variable is forbidden"
784            );
785            // update over-grown vertices
786            if !dual_node_internal.overgrown_stack.is_empty() {
787                let last_index = dual_node_internal.overgrown_stack.len() - 1;
788                let (_, overgrown) = &mut dual_node_internal.overgrown_stack[last_index];
789                if length < 0 {
790                    debug_assert!(*overgrown >= -length, "overgrown vertex cannot shrink so much");
791                }
792                *overgrown += length;
793            }
794        }
795        let dual_node_internal = dual_node_internal_ptr.read_recursive();
796        for (is_left, edge_weak) in dual_node_internal.boundary.iter() {
797            let edge_ptr = edge_weak.upgrade_force();
798            let is_left = *is_left;
799            let (growth, weight) = {
800                // minimize writer lock acquisition
801                let mut edge = edge_ptr.write(active_timestamp);
802                if is_left {
803                    edge.left_growth += length;
804                    debug_assert!(edge.left_growth >= 0, "negative growth forbidden");
805                } else {
806                    edge.right_growth += length;
807                    debug_assert!(edge.right_growth >= 0, "negative growth forbidden");
808                }
809                (edge.left_growth + edge.right_growth, edge.weight)
810            };
811            let edge = edge_ptr.read_recursive(active_timestamp);
812            if growth > weight {
813                // first check for if both side belongs to the same dual node, if so, it's ok
814                let dual_node_internal_ptr_2: &Option<DualNodeInternalWeak> = if is_left {
815                    &edge.right_dual_node
816                } else {
817                    &edge.left_dual_node
818                };
819                if dual_node_internal_ptr_2.is_none()
820                    || dual_node_internal_ptr_2.as_ref().unwrap() != &dual_node_internal_ptr.downgrade()
821                {
822                    let left_ptr = edge.left.upgrade_force();
823                    let right_ptr = edge.right.upgrade_force();
824                    panic!(
825                        "over-grown edge ({},{}): {}/{}",
826                        left_ptr.read_recursive(active_timestamp).vertex_index,
827                        right_ptr.read_recursive(active_timestamp).vertex_index,
828                        growth,
829                        weight
830                    );
831                }
832            } else if growth < 0 {
833                let left_ptr = edge.left.upgrade_force();
834                let right_ptr = edge.right.upgrade_force();
835                panic!(
836                    "under-grown edge ({},{}): {}/{}",
837                    left_ptr.read_recursive(active_timestamp).vertex_index,
838                    right_ptr.read_recursive(active_timestamp).vertex_index,
839                    growth,
840                    weight
841                );
842            }
843        }
844    }
845
846    fn grow(&mut self, length: Weight) {
847        debug_assert!(length > 0, "only positive growth is supported");
848        self.renew_active_list();
849        // first handle shrinks and then grow, to make sure they don't conflict
850        for i in 0..self.active_list.len() {
851            let dual_node_ptr = {
852                let internal_dual_node_ptr = self.active_list[i].upgrade_force();
853                let dual_node_internal = internal_dual_node_ptr.read_recursive();
854                dual_node_internal.origin.upgrade_force()
855            };
856            let dual_node = dual_node_ptr.read_recursive();
857            if matches!(dual_node.grow_state, DualNodeGrowState::Shrink) {
858                self.grow_dual_node(&dual_node_ptr, -length);
859            }
860        }
861        // then grow those needed
862        for i in 0..self.active_list.len() {
863            let dual_node_ptr = {
864                let internal_dual_node_ptr = self.active_list[i].upgrade_force();
865                let dual_node_internal = internal_dual_node_ptr.read_recursive();
866                dual_node_internal.origin.upgrade_force()
867            };
868            let dual_node = dual_node_ptr.read_recursive();
869            if matches!(dual_node.grow_state, DualNodeGrowState::Grow) {
870                self.grow_dual_node(&dual_node_ptr, length);
871            }
872        }
873    }
874
875    #[allow(clippy::unnecessary_cast)]
876    fn load_edge_modifier(&mut self, edge_modifier: &[(EdgeIndex, Weight)]) {
877        debug_assert!(
878            !self.edge_modifier.has_modified_edges(),
879            "the current erasure modifier is not clean, probably forget to clean the state?"
880        );
881        let active_timestamp = self.active_timestamp;
882        for (edge_index, target_weight) in edge_modifier.iter() {
883            let edge_ptr = &self.edges[*edge_index as usize];
884            edge_ptr.dynamic_clear(active_timestamp); // may visit stale edges
885            let mut edge = edge_ptr.write(active_timestamp);
886            let original_weight = edge.weight;
887            edge.weight = *target_weight;
888            self.edge_modifier.push_modified_edge(*edge_index, original_weight);
889        }
890    }
891
892    fn prepare_all(&mut self) -> &mut Vec<SyncRequest> {
893        debug_assert!(
894            self.sync_requests.is_empty(),
895            "make sure to remove all sync requests before prepare to avoid out-dated requests"
896        );
897        self.renew_active_list();
898        for i in 0..self.active_list.len() {
899            let dual_node_ptr = {
900                if let Some(internal_dual_node_ptr) = self.active_list[i].upgrade() {
901                    let dual_node_internal = internal_dual_node_ptr.read_recursive();
902                    dual_node_internal.origin.upgrade_force()
903                } else {
904                    continue; // a blossom could be in the active list even after it's been removed
905                }
906            };
907            let dual_node = dual_node_ptr.read_recursive();
908            match dual_node.grow_state {
909                DualNodeGrowState::Grow => {}
910                DualNodeGrowState::Shrink => {
911                    self.prepare_dual_node_growth(&dual_node_ptr, false);
912                }
913                DualNodeGrowState::Stay => {} // do not touch, Stay nodes might have become a part of a blossom, so it's not safe to change the boundary
914            };
915        }
916        for i in 0..self.active_list.len() {
917            let dual_node_ptr = {
918                if let Some(internal_dual_node_ptr) = self.active_list[i].upgrade() {
919                    let dual_node_internal = internal_dual_node_ptr.read_recursive();
920                    dual_node_internal.origin.upgrade_force()
921                } else {
922                    continue; // a blossom could be in the active list even after it's been removed
923                }
924            };
925            let dual_node = dual_node_ptr.read_recursive();
926            match dual_node.grow_state {
927                DualNodeGrowState::Grow => {
928                    self.prepare_dual_node_growth(&dual_node_ptr, true);
929                }
930                DualNodeGrowState::Shrink => {}
931                DualNodeGrowState::Stay => {} // do not touch, Stay nodes might have become a part of a blossom, so it's not safe to change the boundary
932            };
933        }
934        &mut self.sync_requests
935    }
936
937    fn prepare_nodes_shrink(&mut self, nodes_circle: &[DualNodePtr]) -> &mut Vec<SyncRequest> {
938        debug_assert!(
939            self.sync_requests.is_empty(),
940            "make sure to remove all sync requests before prepare to avoid out-dated requests"
941        );
942        for dual_node_ptr in nodes_circle.iter() {
943            if self.contains_dual_node(dual_node_ptr) {
944                self.prepare_dual_node_growth(dual_node_ptr, false); // prepare to shrink
945            }
946        }
947        &mut self.sync_requests
948    }
949
950    fn contains_dual_node(&self, dual_node_ptr: &DualNodePtr) -> bool {
951        self.get_dual_node_index(dual_node_ptr).is_some()
952    }
953
954    #[allow(clippy::unnecessary_cast)]
955    fn new_partitioned(partitioned_initializer: &PartitionedSolverInitializer) -> Self {
956        let active_timestamp = 0;
957        // create vertices
958        let mut vertices: Vec<VertexPtr> = partitioned_initializer
959            .owning_range
960            .iter()
961            .map(|vertex_index| {
962                VertexPtr::new_value(Vertex {
963                    vertex_index,
964                    is_virtual: false,
965                    is_defect: false,
966                    mirror_unit: partitioned_initializer.owning_interface.clone(),
967                    edges: Vec::new(),
968                    propagated_dual_node: None,
969                    propagated_grandson_dual_node: None,
970                    timestamp: active_timestamp,
971                })
972            })
973            .collect();
974        // set virtual vertices
975        for &virtual_vertex in partitioned_initializer.virtual_vertices.iter() {
976            let mut vertex =
977                vertices[(virtual_vertex - partitioned_initializer.owning_range.start()) as usize].write(active_timestamp);
978            vertex.is_virtual = true;
979        }
980        // add interface vertices
981        let mut mirrored_vertices = HashMap::<VertexIndex, VertexIndex>::new(); // all mirrored vertices mapping to their local indices
982        for (mirror_unit, interface_vertices) in partitioned_initializer.interfaces.iter() {
983            for (vertex_index, is_virtual) in interface_vertices.iter() {
984                mirrored_vertices.insert(*vertex_index, vertices.len() as VertexIndex);
985                vertices.push(VertexPtr::new_value(Vertex {
986                    vertex_index: *vertex_index,
987                    is_virtual: *is_virtual, // interface vertices are always virtual at the beginning
988                    is_defect: false,
989                    mirror_unit: Some(mirror_unit.clone()),
990                    edges: Vec::new(),
991                    propagated_dual_node: None,
992                    propagated_grandson_dual_node: None,
993                    timestamp: active_timestamp,
994                }))
995            }
996        }
997        // set edges
998        let mut edges = Vec::<EdgePtr>::new();
999        for &(i, j, weight, edge_index) in partitioned_initializer.weighted_edges.iter() {
1000            assert_ne!(i, j, "invalid edge from and to the same vertex {}", i);
1001            assert!(
1002                weight % 2 == 0,
1003                "edge ({}, {}) has odd weight value; weight should be even",
1004                i,
1005                j
1006            );
1007            assert!(weight >= 0, "edge ({}, {}) is negative-weighted", i, j);
1008            debug_assert!(
1009                partitioned_initializer.owning_range.contains(i) || mirrored_vertices.contains_key(&i),
1010                "edge ({}, {}) connected to an invalid vertex {}",
1011                i,
1012                j,
1013                i
1014            );
1015            debug_assert!(
1016                partitioned_initializer.owning_range.contains(j) || mirrored_vertices.contains_key(&j),
1017                "edge ({}, {}) connected to an invalid vertex {}",
1018                i,
1019                j,
1020                j
1021            );
1022            let left = VertexIndex::min(i, j);
1023            let right = VertexIndex::max(i, j);
1024            let left_index = if partitioned_initializer.owning_range.contains(left) {
1025                left - partitioned_initializer.owning_range.start()
1026            } else {
1027                mirrored_vertices[&left]
1028            };
1029            let right_index = if partitioned_initializer.owning_range.contains(right) {
1030                right - partitioned_initializer.owning_range.start()
1031            } else {
1032                mirrored_vertices[&right]
1033            };
1034            let edge_ptr = EdgePtr::new_value(Edge {
1035                edge_index,
1036                weight,
1037                left: vertices[left_index as usize].downgrade(),
1038                right: vertices[right_index as usize].downgrade(),
1039                left_growth: 0,
1040                right_growth: 0,
1041                left_dual_node: None,
1042                left_grandson_dual_node: None,
1043                right_dual_node: None,
1044                right_grandson_dual_node: None,
1045                timestamp: 0,
1046                dedup_timestamp: (0, 0),
1047            });
1048            for (a, b) in [(left_index, right_index), (right_index, left_index)] {
1049                lock_write!(vertex, vertices[a as usize], active_timestamp);
1050                debug_assert!({
1051                    // O(N^2) sanity check, debug mode only (actually this bug is not critical, only the shorter edge will take effect)
1052                    let mut no_duplicate = true;
1053                    for edge_weak in vertex.edges.iter() {
1054                        let edge_ptr = edge_weak.upgrade_force();
1055                        let edge = edge_ptr.read_recursive(active_timestamp);
1056                        if edge.left == vertices[b as usize].downgrade() || edge.right == vertices[b as usize].downgrade() {
1057                            no_duplicate = false;
1058                            eprintln!("duplicated edge between {} and {} with weight w1 = {} and w2 = {}, consider merge them into a single edge", i, j, weight, edge.weight);
1059                            break;
1060                        }
1061                    }
1062                    no_duplicate
1063                });
1064                vertex.edges.push(edge_ptr.downgrade());
1065            }
1066            edges.push(edge_ptr);
1067        }
1068        Self {
1069            vertices,
1070            nodes: vec![],
1071            nodes_length: 0,
1072            edges,
1073            active_timestamp: 0,
1074            vertex_num: partitioned_initializer.vertex_num,
1075            edge_num: partitioned_initializer.edge_num,
1076            owning_range: partitioned_initializer.owning_range,
1077            unit_module_info: Some(UnitModuleInfo {
1078                unit_index: partitioned_initializer.unit_index,
1079                mirrored_vertices,
1080                owning_dual_range: VertexRange::new(0, 0),
1081                dual_node_pointers: PtrWeakKeyHashMap::<DualNodeWeak, usize>::new(),
1082            }),
1083            active_list: vec![],
1084            current_cycle: 0,
1085            edge_modifier: EdgeWeightModifier::new(),
1086            edge_dedup_timestamp: 0,
1087            sync_requests: vec![],
1088            updated_boundary: vec![],
1089            propagating_vertices: vec![],
1090        }
1091    }
1092
1093    fn contains_vertex(&self, vertex_index: VertexIndex) -> bool {
1094        self.get_vertex_index(vertex_index).is_some()
1095    }
1096
1097    fn bias_dual_node_index(&mut self, bias: NodeIndex) {
1098        self.unit_module_info.as_mut().unwrap().owning_dual_range.bias_by(bias);
1099    }
1100
1101    fn execute_sync_event(&mut self, sync_event: &SyncRequest) {
1102        let active_timestamp = self.active_timestamp;
1103        debug_assert!(self.contains_vertex(sync_event.vertex_index));
1104        let propagated_dual_node_internal_ptr =
1105            sync_event
1106                .propagated_dual_node
1107                .as_ref()
1108                .map(|(dual_node_weak, dual_variable, _representative_vertex)| {
1109                    self.get_otherwise_add_dual_node(&dual_node_weak.upgrade_force(), *dual_variable)
1110                });
1111        let propagated_grandson_dual_node_internal_ptr = sync_event.propagated_grandson_dual_node.as_ref().map(
1112            |(dual_node_weak, dual_variable, _representative_vertex)| {
1113                self.get_otherwise_add_dual_node(&dual_node_weak.upgrade_force(), *dual_variable)
1114            },
1115        );
1116        let local_vertex_index = self
1117            .get_vertex_index(sync_event.vertex_index)
1118            .expect("cannot synchronize at a non-existing vertex");
1119        let vertex_ptr = &self.vertices[local_vertex_index];
1120        vertex_ptr.dynamic_clear(active_timestamp);
1121        let mut vertex = vertex_ptr.write(active_timestamp);
1122        if vertex.propagated_dual_node == propagated_dual_node_internal_ptr.as_ref().map(|x| x.downgrade()) {
1123            // actually this may happen: if the same vertex is propagated from two different units with the same distance
1124            // to the closest grandson, it may happen that sync event will conflict on the grandson...
1125            // this conflict doesn't matter anyway: any grandson is good, as long as they're consistent
1126            // assert_eq!(vertex.propagated_grandson_dual_node, propagated_grandson_dual_node_internal_ptr.as_ref().map(|x| x.downgrade()));
1127            vertex.propagated_grandson_dual_node =
1128                propagated_grandson_dual_node_internal_ptr.as_ref().map(|x| x.downgrade());
1129        } else {
1130            // conflict with existing value, action needed
1131            // first vacate the vertex, recovering dual node boundaries accordingly
1132            if let Some(dual_node_internal_weak) = vertex.propagated_dual_node.as_ref() {
1133                debug_assert!(!vertex.is_defect, "cannot vacate a syndrome vertex: it shouldn't happen that a syndrome vertex is updated in any partitioned unit");
1134                let mut updated_boundary = Vec::<(bool, EdgeWeak)>::new();
1135                let dual_node_internal_ptr = dual_node_internal_weak.upgrade_force();
1136                lock_write!(dual_node_internal, dual_node_internal_ptr);
1137                vertex.propagated_dual_node = None;
1138                vertex.propagated_grandson_dual_node = None;
1139                // iterate over the boundary to remove any edges associated with the vertex and also reset those edges
1140                for (is_left, edge_weak) in dual_node_internal.boundary.iter() {
1141                    let is_left = *is_left;
1142                    let edge_ptr = edge_weak.upgrade_force();
1143                    let mut edge = edge_ptr.write(active_timestamp);
1144                    let this_vertex_ptr = if is_left {
1145                        edge.left.upgrade_force()
1146                    } else {
1147                        edge.right.upgrade_force()
1148                    };
1149                    if &this_vertex_ptr == vertex_ptr {
1150                        debug_assert!(
1151                            if is_left {
1152                                edge.left_growth == 0
1153                            } else {
1154                                edge.right_growth == 0
1155                            },
1156                            "vacating a non-boundary vertex is forbidden"
1157                        );
1158                        let dual_node = if is_left {
1159                            &edge.left_dual_node
1160                        } else {
1161                            &edge.right_dual_node
1162                        };
1163                        if let Some(dual_node_weak) = dual_node.as_ref() {
1164                            // sanity check: if exists, must be the same
1165                            debug_assert!(dual_node_weak.upgrade_force() == dual_node_internal_ptr);
1166                            // reset the dual node to be unoccupied
1167                            if is_left {
1168                                edge.left_dual_node = None;
1169                                edge.left_grandson_dual_node = None;
1170                            } else {
1171                                edge.right_dual_node = None;
1172                                edge.right_grandson_dual_node = None;
1173                            }
1174                        }
1175                    } else {
1176                        updated_boundary.push((is_left, edge_weak.clone()));
1177                    }
1178                }
1179                // iterate over the edges around the vertex to add edges to the boundary
1180                for edge_weak in vertex.edges.iter() {
1181                    let edge_ptr = edge_weak.upgrade_force();
1182                    edge_ptr.dynamic_clear(active_timestamp);
1183                    let mut edge = edge_ptr.write(active_timestamp);
1184                    let is_left = vertex_ptr.downgrade() == edge.left;
1185                    let dual_node = if is_left {
1186                        &edge.left_dual_node
1187                    } else {
1188                        &edge.right_dual_node
1189                    };
1190                    if let Some(dual_node_weak) = dual_node.as_ref() {
1191                        // sanity check: if exists, must be the same
1192                        debug_assert!(dual_node_weak.upgrade_force() == dual_node_internal_ptr);
1193                        // need to add to the boundary
1194                        if is_left {
1195                            edge.left_dual_node = None;
1196                            edge.left_grandson_dual_node = None;
1197                        } else {
1198                            edge.right_dual_node = None;
1199                            edge.right_grandson_dual_node = None;
1200                        };
1201                        updated_boundary.push((!is_left, edge_weak.clone()));
1202                    }
1203                }
1204                // update the boundary
1205                std::mem::swap(&mut updated_boundary, &mut dual_node_internal.boundary);
1206            }
1207            // then update the vertex to the dual node
1208            if let Some(dual_node_internal_ptr) = propagated_dual_node_internal_ptr.as_ref() {
1209                // grandson dual node must present
1210                let grandson_dual_node_internal_ptr = propagated_grandson_dual_node_internal_ptr.unwrap();
1211                vertex.propagated_dual_node = Some(dual_node_internal_ptr.downgrade());
1212                vertex.propagated_grandson_dual_node = Some(grandson_dual_node_internal_ptr.downgrade());
1213                lock_write!(dual_node_internal, dual_node_internal_ptr);
1214                for edge_weak in vertex.edges.iter() {
1215                    let edge_ptr = edge_weak.upgrade_force();
1216                    edge_ptr.dynamic_clear(active_timestamp);
1217                    let mut edge = edge_ptr.write(active_timestamp);
1218                    let is_left = vertex_ptr.downgrade() == edge.left;
1219                    if is_left {
1220                        debug_assert_eq!(
1221                            edge.left_dual_node, None,
1222                            "edges incident to the vertex must have been vacated"
1223                        );
1224                        edge.left_dual_node = Some(dual_node_internal_ptr.downgrade());
1225                        edge.left_grandson_dual_node = Some(grandson_dual_node_internal_ptr.downgrade());
1226                    } else {
1227                        debug_assert_eq!(
1228                            edge.right_dual_node, None,
1229                            "edges incident to the vertex must have been vacated"
1230                        );
1231                        edge.right_dual_node = Some(dual_node_internal_ptr.downgrade());
1232                        edge.right_grandson_dual_node = Some(grandson_dual_node_internal_ptr.downgrade());
1233                    }
1234                    dual_node_internal.boundary.push((is_left, edge_weak.clone()));
1235                }
1236                self.active_list.push(dual_node_internal_ptr.downgrade());
1237            }
1238        }
1239    }
1240}
1241
1242/*
1243Implementing fast clear operations
1244*/
1245
1246impl FastClear for Edge {
1247    fn hard_clear(&mut self) {
1248        self.left_growth = 0;
1249        self.right_growth = 0;
1250        self.left_dual_node = None;
1251        self.left_grandson_dual_node = None;
1252        self.right_dual_node = None;
1253        self.right_grandson_dual_node = None;
1254    }
1255
1256    #[inline(always)]
1257    fn get_timestamp(&self) -> FastClearTimestamp {
1258        self.timestamp
1259    }
1260    #[inline(always)]
1261    fn set_timestamp(&mut self, timestamp: FastClearTimestamp) {
1262        self.timestamp = timestamp;
1263    }
1264}
1265
1266impl FastClear for Vertex {
1267    fn hard_clear(&mut self) {
1268        self.is_defect = false;
1269        self.propagated_dual_node = None;
1270        self.propagated_grandson_dual_node = None;
1271    }
1272
1273    #[inline(always)]
1274    fn get_timestamp(&self) -> FastClearTimestamp {
1275        self.timestamp
1276    }
1277    #[inline(always)]
1278    fn set_timestamp(&mut self, timestamp: FastClearTimestamp) {
1279        self.timestamp = timestamp;
1280    }
1281}
1282
1283impl Vertex {
1284    /// if this vertex is a mirrored vertex and it's disabled, it can be temporarily matched just like a virtual vertex
1285    pub fn is_mirror_blocked(&self) -> bool {
1286        if let Some(ref mirror_unit_ptr) = self.mirror_unit {
1287            let mirror_unit_ptr = mirror_unit_ptr.upgrade_force();
1288            let mirror_unit = mirror_unit_ptr.read_recursive();
1289            !mirror_unit.enabled
1290        } else {
1291            false
1292        }
1293    }
1294}
1295
1296impl DualModuleSerial {
1297    /// hard clear all growth (manual call not recommended due to performance drawback)
1298    pub fn hard_clear_graph(&mut self) {
1299        for edge in self.edges.iter() {
1300            let mut edge = edge.write_force();
1301            edge.hard_clear();
1302            edge.timestamp = 0;
1303        }
1304        for vertex in self.vertices.iter() {
1305            let mut vertex = vertex.write_force();
1306            vertex.hard_clear();
1307            vertex.timestamp = 0;
1308        }
1309        self.active_timestamp = 0;
1310    }
1311
1312    /// soft clear all growth
1313    pub fn clear_graph(&mut self) {
1314        if self.active_timestamp == FastClearTimestamp::MAX {
1315            // rarely happens
1316            self.hard_clear_graph();
1317        }
1318        self.active_timestamp += 1; // implicitly clear all edges growth
1319    }
1320
1321    /// necessary for boundary deduplicate when the unit is partitioned
1322    fn hard_clear_edge_dedup(&mut self) {
1323        for edge in self.edges.iter() {
1324            let mut edge = edge.write_force();
1325            edge.dedup_timestamp = (0, 0);
1326        }
1327        self.edge_dedup_timestamp = 0;
1328    }
1329
1330    fn clear_edge_dedup(&mut self) {
1331        if self.edge_dedup_timestamp == FastClearTimestamp::MAX {
1332            // rarely happens
1333            self.hard_clear_edge_dedup();
1334        }
1335        self.edge_dedup_timestamp += 1; // implicitly clear all edges growth
1336    }
1337
1338    /// increment the global cycle so that each node in the active list can be accessed exactly once
1339    #[allow(clippy::unnecessary_cast)]
1340    fn renew_active_list(&mut self) {
1341        if self.current_cycle == usize::MAX {
1342            for i in 0..self.nodes_length {
1343                let internal_dual_node_ptr = {
1344                    match self.nodes[i].as_ref() {
1345                        Some(internal_dual_node_ptr) => internal_dual_node_ptr.clone(),
1346                        _ => continue,
1347                    }
1348                };
1349                let mut internal_dual_node = internal_dual_node_ptr.write();
1350                internal_dual_node.last_visit_cycle = 0;
1351            }
1352            self.current_cycle = 0;
1353        }
1354        self.current_cycle += 1;
1355        // renew the active_list
1356        let mut updated_active_list = Vec::with_capacity(self.active_list.len());
1357        for i in 0..self.active_list.len() {
1358            let (dual_node_ptr, internal_dual_node_ptr) = {
1359                match self.active_list[i].upgrade() {
1360                    Some(internal_dual_node_ptr) => {
1361                        let mut dual_node_internal = internal_dual_node_ptr.write();
1362                        if self.nodes[dual_node_internal.index as usize].is_none() {
1363                            continue;
1364                        } // removed
1365                        if dual_node_internal.last_visit_cycle == self.current_cycle {
1366                            continue;
1367                        } // visited
1368                        dual_node_internal.last_visit_cycle = self.current_cycle; // mark as visited
1369                        (dual_node_internal.origin.upgrade_force(), internal_dual_node_ptr.clone())
1370                    }
1371                    _ => continue,
1372                }
1373            };
1374            let dual_node = dual_node_ptr.read_recursive();
1375            match dual_node.grow_state {
1376                DualNodeGrowState::Grow | DualNodeGrowState::Shrink => {
1377                    updated_active_list.push(internal_dual_node_ptr.downgrade());
1378                }
1379                DualNodeGrowState::Stay => {} // no longer in the active list
1380            };
1381        }
1382        self.active_list = updated_active_list;
1383    }
1384
1385    fn sanity_check_grandson(
1386        &self,
1387        propagated_dual_node_weak: &DualNodeInternalWeak,
1388        propagated_grandson_dual_node_weak: &DualNodeInternalWeak,
1389    ) -> Result<(), String> {
1390        let propagated_dual_node_ptr = propagated_dual_node_weak.upgrade_force();
1391        let propagated_grandson_dual_node_ptr = propagated_grandson_dual_node_weak.upgrade_force();
1392        let propagated_dual_node = propagated_dual_node_ptr.read_recursive();
1393        let propagated_grandson_dual_node = propagated_grandson_dual_node_ptr.read_recursive();
1394        let propagated_node_ptr = propagated_dual_node.origin.upgrade_force();
1395        let propagated_node = propagated_node_ptr.read_recursive();
1396        let propagated_grandson_ptr = propagated_grandson_dual_node.origin.upgrade_force();
1397        let propagated_grandson = propagated_grandson_ptr.read_recursive();
1398        if matches!(propagated_grandson.class, DualNodeClass::DefectVertex { .. }) {
1399            if matches!(propagated_node.class, DualNodeClass::DefectVertex { .. }) {
1400                if propagated_dual_node_ptr != propagated_grandson_dual_node_ptr {
1401                    return Err(format!(
1402                        "syndrome node {:?} must have grandson equal to itself {:?}",
1403                        propagated_dual_node_ptr, propagated_grandson_dual_node_ptr
1404                    ));
1405                }
1406            } else {
1407                // test if grandson is a real grandson
1408                drop(propagated_grandson);
1409                let mut descendant_ptr = propagated_grandson_ptr;
1410                loop {
1411                    let descendant = descendant_ptr.read_recursive();
1412                    if let Some(descendant_parent_ptr) = descendant.parent_blossom.as_ref() {
1413                        let descendant_parent_ptr = descendant_parent_ptr.upgrade_force();
1414                        if descendant_parent_ptr == propagated_node_ptr {
1415                            return Ok(());
1416                        }
1417                        drop(descendant);
1418                        descendant_ptr = descendant_parent_ptr;
1419                    } else {
1420                        return Err("grandson check failed".to_string());
1421                    }
1422                }
1423            }
1424        } else {
1425            return Err("grandson must be a vertex".to_string());
1426        }
1427        Ok(())
1428    }
1429
1430    /// do a sanity check of if all the nodes are in consistent state
1431    #[allow(clippy::unnecessary_cast)]
1432    pub fn sanity_check(&self) -> Result<(), String> {
1433        let active_timestamp = self.active_timestamp;
1434        for vertex_ptr in self.vertices.iter() {
1435            vertex_ptr.dynamic_clear(active_timestamp);
1436            let vertex = vertex_ptr.read_recursive(active_timestamp);
1437            if let Some(propagated_grandson_dual_node) = vertex.propagated_grandson_dual_node.as_ref() {
1438                if let Some(propagated_dual_node) = vertex.propagated_dual_node.as_ref() {
1439                    self.sanity_check_grandson(propagated_dual_node, propagated_grandson_dual_node)?;
1440                } else {
1441                    return Err(format!(
1442                        "vertex {} has propagated grandson dual node {:?} but missing propagated dual node",
1443                        vertex.vertex_index, vertex.propagated_grandson_dual_node
1444                    ));
1445                }
1446            }
1447            if vertex.propagated_dual_node.is_some() && vertex.propagated_grandson_dual_node.is_none() {
1448                return Err(format!(
1449                    "vertex {} has propagated dual node {:?} but missing grandson",
1450                    vertex.vertex_index, vertex.propagated_dual_node
1451                ));
1452            }
1453        }
1454        // sanity check that boundary doesn't include duplicate edges
1455        let mut duplicate_edges = HashMap::<(bool, EdgeIndex), NodeIndex>::new();
1456        for node_index in 0..self.nodes_length as NodeIndex {
1457            let node_ptr = &self.nodes[node_index as usize];
1458            if let Some(node_ptr) = node_ptr {
1459                let dual_node_internal = node_ptr.read_recursive();
1460                if dual_node_internal
1461                    .origin
1462                    .upgrade_force()
1463                    .read_recursive()
1464                    .parent_blossom
1465                    .is_some()
1466                {
1467                    continue; // skip this node, since it's inactive
1468                }
1469                for (is_left, edge_weak) in dual_node_internal.boundary.iter() {
1470                    let edge_ptr = edge_weak.upgrade_force();
1471                    let edge = edge_ptr.read_recursive(active_timestamp);
1472                    if duplicate_edges.contains_key(&(*is_left, edge.edge_index)) {
1473                        return Err(format!(
1474                            "boundary edge {:?} appears twice in node {} and {}",
1475                            (*is_left, edge.edge_index),
1476                            duplicate_edges.get(&(*is_left, edge.edge_index)).unwrap(),
1477                            node_index
1478                        ));
1479                    }
1480                    duplicate_edges.insert((*is_left, edge.edge_index), node_index);
1481                }
1482            }
1483        }
1484        Ok(())
1485    }
1486}
1487
1488/*
1489Implementing visualization functions
1490*/
1491
1492impl FusionVisualizer for DualModuleSerial {
1493    #[allow(clippy::unnecessary_cast)]
1494    fn snapshot(&self, abbrev: bool) -> serde_json::Value {
1495        // do the sanity check first before taking snapshot
1496        self.sanity_check().unwrap();
1497        let active_timestamp = self.active_timestamp;
1498        let mut vertices: Vec<serde_json::Value> = (0..self.vertex_num).map(|_| serde_json::Value::Null).collect();
1499        for vertex_ptr in self.vertices.iter() {
1500            vertex_ptr.dynamic_clear(active_timestamp);
1501            let vertex = vertex_ptr.read_recursive(active_timestamp);
1502            vertices[vertex.vertex_index as usize] = json!({
1503                if abbrev { "v" } else { "is_virtual" }: i32::from(vertex.is_virtual),
1504            });
1505            if self.owning_range.contains(vertex.vertex_index) {
1506                // otherwise I don't know whether it's syndrome or not
1507                vertices[vertex.vertex_index as usize].as_object_mut().unwrap().insert(
1508                    (if abbrev { "s" } else { "is_defect" }).to_string(),
1509                    json!(i32::from(vertex.is_defect)),
1510                );
1511            }
1512            if let Some(value) = vertex.propagated_dual_node.as_ref().map(|weak| {
1513                weak.upgrade_force()
1514                    .read_recursive()
1515                    .origin
1516                    .upgrade_force()
1517                    .read_recursive()
1518                    .index
1519            }) {
1520                vertices[vertex.vertex_index as usize]
1521                    .as_object_mut()
1522                    .unwrap()
1523                    .insert((if abbrev { "p" } else { "propagated_dual_node" }).to_string(), json!(value));
1524            }
1525            if let Some(value) = vertex.propagated_grandson_dual_node.as_ref().map(|weak| {
1526                weak.upgrade_force()
1527                    .read_recursive()
1528                    .origin
1529                    .upgrade_force()
1530                    .read_recursive()
1531                    .index
1532            }) {
1533                vertices[vertex.vertex_index as usize].as_object_mut().unwrap().insert(
1534                    (if abbrev { "pg" } else { "propagated_grandson_dual_node" }).to_string(),
1535                    json!(value),
1536                );
1537            }
1538            if let Some(mirror_unit_ptr) = vertex.mirror_unit.as_ref() {
1539                let mirror_unit_ptr = mirror_unit_ptr.upgrade_force();
1540                let mirror_unit = mirror_unit_ptr.read_recursive();
1541                vertices[vertex.vertex_index as usize].as_object_mut().unwrap().insert(
1542                    (if abbrev { "mi" } else { "mirror_unit_index" }).to_string(),
1543                    json!(mirror_unit.unit_index),
1544                );
1545                vertices[vertex.vertex_index as usize].as_object_mut().unwrap().insert(
1546                    (if abbrev { "me" } else { "mirror_enabled" }).to_string(),
1547                    json!(i32::from(mirror_unit.enabled)),
1548                );
1549            }
1550        }
1551        let mut edges: Vec<serde_json::Value> = (0..self.edge_num).map(|_| serde_json::Value::Null).collect();
1552        for edge_ptr in self.edges.iter() {
1553            edge_ptr.dynamic_clear(active_timestamp);
1554            let edge = edge_ptr.read_recursive(active_timestamp);
1555            edges[edge.edge_index as usize] = json!({
1556                if abbrev { "w" } else { "weight" }: edge.weight,
1557                if abbrev { "l" } else { "left" }: edge.left.upgrade_force().read_recursive(active_timestamp).vertex_index,
1558                if abbrev { "r" } else { "right" }: edge.right.upgrade_force().read_recursive(active_timestamp).vertex_index,
1559                if abbrev { "lg" } else { "left_growth" }: edge.left_growth,
1560                if abbrev { "rg" } else { "right_growth" }: edge.right_growth,
1561            });
1562            if let Some(value) = edge.left_dual_node.as_ref().map(|weak| {
1563                weak.upgrade_force()
1564                    .read_recursive()
1565                    .origin
1566                    .upgrade_force()
1567                    .read_recursive()
1568                    .index
1569            }) {
1570                edges[edge.edge_index as usize]
1571                    .as_object_mut()
1572                    .unwrap()
1573                    .insert((if abbrev { "ld" } else { "left_dual_node" }).to_string(), json!(value));
1574            }
1575            if let Some(value) = edge.left_grandson_dual_node.as_ref().map(|weak| {
1576                weak.upgrade_force()
1577                    .read_recursive()
1578                    .origin
1579                    .upgrade_force()
1580                    .read_recursive()
1581                    .index
1582            }) {
1583                edges[edge.edge_index as usize].as_object_mut().unwrap().insert(
1584                    (if abbrev { "lgd" } else { "left_grandson_dual_node" }).to_string(),
1585                    json!(value),
1586                );
1587            }
1588            if let Some(value) = edge.right_dual_node.as_ref().map(|weak| {
1589                weak.upgrade_force()
1590                    .read_recursive()
1591                    .origin
1592                    .upgrade_force()
1593                    .read_recursive()
1594                    .index
1595            }) {
1596                edges[edge.edge_index as usize]
1597                    .as_object_mut()
1598                    .unwrap()
1599                    .insert((if abbrev { "rd" } else { "right_dual_node" }).to_string(), json!(value));
1600            }
1601            if let Some(value) = edge.right_grandson_dual_node.as_ref().map(|weak| {
1602                weak.upgrade_force()
1603                    .read_recursive()
1604                    .origin
1605                    .upgrade_force()
1606                    .read_recursive()
1607                    .index
1608            }) {
1609                edges[edge.edge_index as usize].as_object_mut().unwrap().insert(
1610                    (if abbrev { "rgd" } else { "right_grandson_dual_node" }).to_string(),
1611                    json!(value),
1612                );
1613            }
1614        }
1615        let mut value = json!({
1616            "vertices": vertices,
1617            "edges": edges,
1618        });
1619        // TODO: since each serial module only processes a part of the dual nodes, it's not feasible to list them in a reasonable vector now...
1620        // update the visualizer to be able to join multiple dual nodes
1621        if self.owning_range.start() == 0 && self.owning_range.end() == self.vertex_num {
1622            let mut dual_nodes = Vec::<serde_json::Value>::new();
1623            for node_index in 0..self.nodes_length {
1624                let node_ptr = &self.nodes[node_index];
1625                if let Some(node_ptr) = node_ptr.as_ref() {
1626                    let node = node_ptr.read_recursive();
1627                    dual_nodes.push(json!({
1628                        if abbrev { "b" } else { "boundary" }: node.boundary.iter().map(|(is_left, edge_weak)|
1629                            (*is_left, edge_weak.upgrade_force().read_recursive(active_timestamp).edge_index)).collect::<Vec<(bool, EdgeIndex)>>(),
1630                        if abbrev { "d" } else { "dual_variable" }: node.dual_variable,
1631                    }));
1632                } else {
1633                    dual_nodes.push(json!(null));
1634                }
1635            }
1636            value
1637                .as_object_mut()
1638                .unwrap()
1639                .insert("dual_nodes".to_string(), json!(dual_nodes));
1640        }
1641        value
1642    }
1643}
1644
1645/*
1646Implement internal helper functions that maintains the state of dual clusters
1647*/
1648
1649impl DualModuleSerial {
1650    /// register a new dual node ptr, but not creating the internal dual node
1651    fn register_dual_node_ptr(&mut self, dual_node_ptr: &DualNodePtr) {
1652        // println!("unit {:?}, register_dual_node_ptr: {:?}", self.unit_module_info, dual_node_ptr);
1653        let node = dual_node_ptr.read_recursive();
1654        if let Some(unit_module_info) = self.unit_module_info.as_mut() {
1655            if unit_module_info.owning_dual_range.is_empty() {
1656                // set the range instead of inserting into the lookup table, to minimize table lookup
1657                unit_module_info.owning_dual_range = VertexRange::new(node.index, node.index);
1658            }
1659            if unit_module_info.owning_dual_range.end() == node.index
1660                && self.nodes_length == unit_module_info.owning_dual_range.len()
1661            {
1662                // it's able to append into the owning range, minimizing table lookup and thus better performance
1663                unit_module_info.owning_dual_range.append_by(1);
1664            } else {
1665                // will be inserted at this place
1666                unit_module_info
1667                    .dual_node_pointers
1668                    .insert(dual_node_ptr.clone(), self.nodes_length);
1669            }
1670        } else {
1671            debug_assert!(
1672                self.nodes_length as NodeIndex == node.index,
1673                "dual node must be created in a sequential manner: no missing or duplicating"
1674            );
1675        }
1676        // println!("unit {:?}, register_dual_node_ptr: {:?}", self.unit_module_info, dual_node_ptr);
1677    }
1678
1679    /// get the local index of a dual node, thus has usize type
1680    #[allow(clippy::unnecessary_cast)]
1681    pub fn get_dual_node_index(&self, dual_node_ptr: &DualNodePtr) -> Option<usize> {
1682        let dual_node = dual_node_ptr.read_recursive();
1683        if let Some(unit_module_info) = self.unit_module_info.as_ref() {
1684            if unit_module_info.owning_dual_range.contains(dual_node.index) {
1685                debug_assert!(
1686                    dual_node.belonging.upgrade_force().read_recursive().parent.is_none(),
1687                    "dual node is not updated"
1688                );
1689                Some((dual_node.index - unit_module_info.owning_dual_range.start()) as usize)
1690            } else {
1691                // println!("from unit {:?}, dual_node: {}", self.unit_module_info, dual_node.index);
1692                unit_module_info.dual_node_pointers.get(dual_node_ptr).copied()
1693            }
1694        } else {
1695            Some(dual_node.index as usize)
1696        }
1697    }
1698
1699    /// get the local index of a vertex, thus has usize type
1700    #[allow(clippy::unnecessary_cast)]
1701    pub fn get_vertex_index(&self, vertex_index: VertexIndex) -> Option<usize> {
1702        if self.owning_range.contains(vertex_index) {
1703            return Some((vertex_index - self.owning_range.start()) as usize);
1704        }
1705        if let Some(unit_module_info) = self.unit_module_info.as_ref() {
1706            if let Some(index) = unit_module_info.mirrored_vertices.get(&vertex_index) {
1707                return Some(*index as usize);
1708            }
1709        }
1710        None
1711    }
1712
1713    pub fn get_dual_node_internal_ptr(&self, dual_node_ptr: &DualNodePtr) -> DualNodeInternalPtr {
1714        self.get_dual_node_internal_ptr_optional(dual_node_ptr).unwrap()
1715    }
1716
1717    /// dual node ptr may not hold in this module
1718    pub fn get_dual_node_internal_ptr_optional(&self, dual_node_ptr: &DualNodePtr) -> Option<DualNodeInternalPtr> {
1719        self.get_dual_node_index(dual_node_ptr).map(|dual_node_index| {
1720            let dual_node_internal_ptr = self.nodes[dual_node_index].as_ref().expect("internal dual node must exists");
1721            debug_assert!(
1722                dual_node_ptr == &dual_node_internal_ptr.read_recursive().origin.upgrade_force(),
1723                "dual node and dual internal node must corresponds to each other"
1724            );
1725            dual_node_internal_ptr.clone()
1726        })
1727    }
1728
1729    /// possibly add dual node only when sync_event is provided
1730    #[allow(clippy::unnecessary_cast)]
1731    pub fn get_otherwise_add_dual_node(
1732        &mut self,
1733        dual_node_ptr: &DualNodePtr,
1734        dual_variable: Weight,
1735    ) -> DualNodeInternalPtr {
1736        let dual_node_index = self.get_dual_node_index(dual_node_ptr).unwrap_or_else(|| {
1737            // add a new internal dual node corresponding to the dual_node_ptr
1738            self.register_dual_node_ptr(dual_node_ptr);
1739            let node_index = self.nodes_length as NodeIndex;
1740            let node_internal_ptr =
1741                if node_index < self.nodes.len() as NodeIndex && self.nodes[node_index as usize].is_some() {
1742                    let node_ptr = self.nodes[node_index as usize].as_ref().unwrap().clone();
1743                    let mut node = node_ptr.write();
1744                    node.origin = dual_node_ptr.downgrade();
1745                    node.index = node_index;
1746                    node.dual_variable = dual_variable;
1747                    node.boundary.clear();
1748                    node.overgrown_stack.clear();
1749                    node.last_visit_cycle = 0;
1750                    drop(node);
1751                    node_ptr
1752                } else {
1753                    DualNodeInternalPtr::new_value(DualNodeInternal {
1754                        origin: dual_node_ptr.downgrade(),
1755                        index: node_index,
1756                        dual_variable,
1757                        boundary: Vec::new(),
1758                        overgrown_stack: Vec::new(),
1759                        last_visit_cycle: 0,
1760                    })
1761                };
1762            self.active_list.push(node_internal_ptr.downgrade());
1763            self.nodes_length += 1;
1764            if self.nodes.len() < self.nodes_length {
1765                self.nodes.push(None);
1766            }
1767            self.nodes[node_index as usize] = Some(node_internal_ptr);
1768            node_index as usize
1769        });
1770        let dual_node_internal_ptr = self.nodes[dual_node_index].as_ref().expect("internal dual node must exists");
1771        debug_assert!(
1772            dual_node_ptr == &dual_node_internal_ptr.read_recursive().origin.upgrade_force(),
1773            "dual node and dual internal node must corresponds to each other"
1774        );
1775        dual_node_internal_ptr.clone()
1776    }
1777
1778    /// this is equivalent to [`DualModuleSerial::prepare_dual_node_growth`] when there are no 0 weight edges, but when it encounters zero-weight edges, it will report `true`
1779    pub fn prepare_dual_node_growth_single(&mut self, dual_node_ptr: &DualNodePtr, is_grow: bool) -> bool {
1780        let active_timestamp = self.active_timestamp;
1781        self.updated_boundary.clear();
1782        self.propagating_vertices.clear();
1783        let dual_node_internal_ptr = self.get_dual_node_internal_ptr(dual_node_ptr);
1784        let mut newly_propagated_edge_has_zero_weight = false;
1785        if is_grow {
1786            // gracefully update the boundary to ease growing
1787            let dual_node_internal = dual_node_internal_ptr.read_recursive();
1788            for (is_left, edge_weak) in dual_node_internal.boundary.iter() {
1789                let edge_ptr = edge_weak.upgrade_force();
1790                let is_left = *is_left;
1791                let edge = edge_ptr.read_recursive(active_timestamp);
1792                let peer_dual_node: &Option<DualNodeInternalWeak> = if is_left {
1793                    &edge.right_dual_node
1794                } else {
1795                    &edge.left_dual_node
1796                };
1797                if edge.left_growth + edge.right_growth == edge.weight && peer_dual_node.is_none() {
1798                    // need to propagate to a new node
1799                    let peer_vertex_ptr = if is_left {
1800                        edge.right.upgrade_force()
1801                    } else {
1802                        edge.left.upgrade_force()
1803                    };
1804                    // to avoid already occupied node being propagated
1805                    peer_vertex_ptr.dynamic_clear(active_timestamp);
1806                    let peer_vertex = peer_vertex_ptr.read_recursive(active_timestamp);
1807                    if peer_vertex.is_virtual || peer_vertex.is_mirror_blocked() {
1808                        // virtual node is never propagated, so keep this edge in the boundary
1809                        self.updated_boundary.push((is_left, edge_weak.clone()));
1810                    } else {
1811                        debug_assert!(
1812                            peer_vertex.propagated_dual_node.is_none(),
1813                            "growing into another propagated vertex forbidden"
1814                        );
1815                        debug_assert!(
1816                            peer_vertex.propagated_grandson_dual_node.is_none(),
1817                            "growing into another propagated vertex forbidden"
1818                        );
1819                        self.propagating_vertices.push((
1820                            peer_vertex_ptr.downgrade(),
1821                            if is_left {
1822                                edge.left_grandson_dual_node.clone()
1823                            } else {
1824                                edge.right_grandson_dual_node.clone()
1825                            },
1826                        ));
1827                        // this edge is dropped, so we need to set both end of this edge to this dual node
1828                        drop(edge); // unlock read
1829                        let mut edge = edge_ptr.write(active_timestamp);
1830                        if is_left {
1831                            edge.right_dual_node = Some(dual_node_internal_ptr.downgrade());
1832                            debug_assert!(edge.left_grandson_dual_node.is_some());
1833                            edge.right_grandson_dual_node = edge.left_grandson_dual_node.clone();
1834                        } else {
1835                            edge.left_dual_node = Some(dual_node_internal_ptr.downgrade());
1836                            debug_assert!(edge.right_grandson_dual_node.is_some());
1837                            edge.left_grandson_dual_node = edge.right_grandson_dual_node.clone();
1838                        }
1839                    }
1840                } else {
1841                    // keep other edges
1842                    self.updated_boundary.push((is_left, edge_weak.clone()));
1843                }
1844            }
1845            drop(dual_node_internal); // unlock
1846                                      // propagating nodes may be duplicated, but it's easy to check by `propagated_dual_node`
1847            for (vertex_weak, grandson_dual_node) in self.propagating_vertices.iter() {
1848                let vertex_ptr = vertex_weak.upgrade_force();
1849                let mut vertex = vertex_ptr.write(active_timestamp);
1850                if vertex.propagated_dual_node.is_none() {
1851                    vertex.propagated_dual_node = Some(dual_node_internal_ptr.downgrade());
1852                    vertex.propagated_grandson_dual_node = grandson_dual_node.clone();
1853                    // add to the sync list
1854                    if let Some(mirror_unit_weak) = &vertex.mirror_unit {
1855                        self.sync_requests.push(SyncRequest {
1856                            mirror_unit_weak: mirror_unit_weak.clone(),
1857                            vertex_index: vertex.vertex_index,
1858                            propagated_dual_node: vertex.propagated_dual_node.clone().map(|weak| {
1859                                let dual_node_ptr = weak.upgrade_force();
1860                                let dual_node = dual_node_ptr.read_recursive();
1861                                (
1862                                    dual_node.origin.clone(),
1863                                    dual_node.dual_variable,
1864                                    dual_node.origin.upgrade_force().get_representative_vertex(),
1865                                )
1866                            }),
1867                            propagated_grandson_dual_node: vertex.propagated_grandson_dual_node.as_ref().map(|weak| {
1868                                let dual_node_ptr = weak.upgrade_force();
1869                                let dual_node = dual_node_ptr.read_recursive();
1870                                (
1871                                    dual_node.origin.clone(),
1872                                    dual_node.dual_variable,
1873                                    dual_node.origin.upgrade_force().get_representative_vertex(),
1874                                )
1875                            }),
1876                        });
1877                    }
1878                    let mut count_newly_propagated_edge = 0;
1879                    for edge_weak in vertex.edges.iter() {
1880                        let edge_ptr = edge_weak.upgrade_force();
1881                        let (is_left, newly_propagated_edge) = {
1882                            edge_ptr.dynamic_clear(active_timestamp);
1883                            let edge = edge_ptr.read_recursive(active_timestamp);
1884                            let is_left = vertex_ptr.downgrade() == edge.left;
1885                            let newly_propagated_edge = if is_left {
1886                                edge.left_dual_node.is_none()
1887                            } else {
1888                                edge.right_dual_node.is_none()
1889                            };
1890                            (is_left, newly_propagated_edge)
1891                        };
1892                        if newly_propagated_edge {
1893                            count_newly_propagated_edge += 1;
1894                            self.updated_boundary.push((is_left, edge_weak.clone()));
1895                            let mut edge = edge_ptr.write(active_timestamp);
1896                            if edge.weight == 0 {
1897                                newly_propagated_edge_has_zero_weight = true;
1898                            }
1899                            if is_left {
1900                                edge.left_dual_node = Some(dual_node_internal_ptr.downgrade());
1901                                edge.left_grandson_dual_node = grandson_dual_node.clone();
1902                            } else {
1903                                edge.right_dual_node = Some(dual_node_internal_ptr.downgrade());
1904                                edge.right_grandson_dual_node = grandson_dual_node.clone();
1905                            };
1906                        }
1907                    }
1908                    if count_newly_propagated_edge == 0 {
1909                        lock_write!(dual_node_internal, dual_node_internal_ptr);
1910                        dual_node_internal.overgrown_stack.push((vertex_ptr.downgrade(), 0));
1911                    }
1912                }
1913            }
1914        } else {
1915            // gracefully update the boundary to ease shrinking
1916            self.clear_edge_dedup();
1917            {
1918                lock_write!(dual_node_internal, dual_node_internal_ptr);
1919                while !dual_node_internal.overgrown_stack.is_empty() {
1920                    let last_index = dual_node_internal.overgrown_stack.len() - 1;
1921                    let (_, overgrown) = &dual_node_internal.overgrown_stack[last_index];
1922                    if *overgrown == 0 {
1923                        let (vertex_weak, _) = dual_node_internal.overgrown_stack.pop().unwrap();
1924                        let vertex_ptr = vertex_weak.upgrade_force();
1925                        // push the surrounding edges back to the boundary
1926                        let mut vertex = vertex_ptr.write(active_timestamp);
1927                        if vertex.propagated_dual_node == Some(dual_node_internal_ptr.downgrade()) {
1928                            vertex.propagated_dual_node = None;
1929                            vertex.propagated_grandson_dual_node = None;
1930                            // add to the sync list
1931                            if let Some(mirror_unit_weak) = &vertex.mirror_unit {
1932                                self.sync_requests.push(SyncRequest {
1933                                    mirror_unit_weak: mirror_unit_weak.clone(),
1934                                    vertex_index: vertex.vertex_index,
1935                                    propagated_dual_node: None,
1936                                    propagated_grandson_dual_node: None,
1937                                });
1938                            }
1939                            for edge_weak in vertex.edges.iter() {
1940                                let edge_ptr = edge_weak.upgrade_force();
1941                                let mut edge = edge_ptr.write(active_timestamp);
1942                                let is_left = vertex_ptr.downgrade() == edge.left;
1943                                if self.unit_module_info.is_none() {
1944                                    debug_assert!(
1945                                        if is_left {
1946                                            edge.left_dual_node == Some(dual_node_internal_ptr.downgrade())
1947                                                && edge.left_grandson_dual_node.is_some()
1948                                        } else {
1949                                            edge.right_dual_node == Some(dual_node_internal_ptr.downgrade())
1950                                                && edge.right_grandson_dual_node.is_some()
1951                                        },
1952                                        "overgrown vertices must be surrounded by the same propagated dual node"
1953                                    );
1954                                }
1955                                if is_left {
1956                                    edge.left_dual_node = None;
1957                                    edge.left_grandson_dual_node = None;
1958                                } else {
1959                                    edge.right_dual_node = None;
1960                                    edge.right_grandson_dual_node = None;
1961                                }
1962                                if (if !is_left {
1963                                    edge.dedup_timestamp.0
1964                                } else {
1965                                    edge.dedup_timestamp.1
1966                                }) != self.edge_dedup_timestamp
1967                                {
1968                                    if !is_left {
1969                                        edge.dedup_timestamp.0 = self.edge_dedup_timestamp;
1970                                    } else {
1971                                        edge.dedup_timestamp.1 = self.edge_dedup_timestamp;
1972                                    }
1973                                    self.updated_boundary.push((!is_left, edge_weak.clone()));
1974                                    // boundary has the opposite end
1975                                }
1976                            }
1977                        } else {
1978                            // this happens when sync request already vacate the vertex, thus no need to add edges to the boundary
1979                            // in fact, this will cause duplicate boundary edges if not skipped, leading to exceptions when creating blossoms
1980                            debug_assert!(self.unit_module_info.is_some(), "serial module itself cannot happen");
1981                        }
1982                    } else {
1983                        break; // found non-negative overgrown in the stack
1984                    }
1985                }
1986            }
1987            let dual_node_internal = dual_node_internal_ptr.read_recursive();
1988            for (is_left, edge_weak) in dual_node_internal.boundary.iter() {
1989                let edge_ptr = edge_weak.upgrade_force();
1990                let is_left = *is_left;
1991                let mut edge = edge_ptr.write(active_timestamp);
1992                let this_growth = if is_left { edge.left_growth } else { edge.right_growth };
1993                if this_growth == 0 {
1994                    // need to shrink before this vertex
1995                    let this_vertex_ptr = if is_left {
1996                        edge.left.upgrade_force()
1997                    } else {
1998                        edge.right.upgrade_force()
1999                    };
2000                    // to avoid already occupied node being propagated
2001                    let this_vertex = this_vertex_ptr.read_recursive(active_timestamp);
2002                    if this_vertex.is_defect {
2003                        // never shrink from the syndrome itself
2004                        if (if is_left {
2005                            edge.dedup_timestamp.0
2006                        } else {
2007                            edge.dedup_timestamp.1
2008                        }) != self.edge_dedup_timestamp
2009                        {
2010                            if is_left {
2011                                edge.dedup_timestamp.0 = self.edge_dedup_timestamp;
2012                            } else {
2013                                edge.dedup_timestamp.1 = self.edge_dedup_timestamp;
2014                            }
2015                            self.updated_boundary.push((is_left, edge_weak.clone()));
2016                        }
2017                    } else {
2018                        if edge.weight > 0 && self.unit_module_info.is_none() {
2019                            // do not check for 0-weight edges
2020                            debug_assert!(
2021                                this_vertex.propagated_dual_node.is_some(),
2022                                "unexpected shrink into an empty vertex"
2023                            );
2024                        }
2025                        self.propagating_vertices.push((this_vertex_ptr.downgrade(), None));
2026                    }
2027                } else {
2028                    // keep other edges
2029                    if (if is_left {
2030                        edge.dedup_timestamp.0
2031                    } else {
2032                        edge.dedup_timestamp.1
2033                    }) != self.edge_dedup_timestamp
2034                    {
2035                        if is_left {
2036                            edge.dedup_timestamp.0 = self.edge_dedup_timestamp;
2037                        } else {
2038                            edge.dedup_timestamp.1 = self.edge_dedup_timestamp;
2039                        }
2040                        self.updated_boundary.push((is_left, edge_weak.clone()));
2041                    }
2042                }
2043            }
2044            // propagating nodes may be duplicated, but it's easy to check by `propagated_dual_node`
2045            for (vertex_weak, _) in self.propagating_vertices.iter() {
2046                let vertex_ptr = vertex_weak.upgrade_force();
2047                let mut vertex = vertex_ptr.write(active_timestamp);
2048                if vertex.propagated_dual_node.is_some() {
2049                    vertex.propagated_dual_node = None;
2050                    vertex.propagated_grandson_dual_node = None;
2051                    // add to the sync list
2052                    if let Some(mirror_unit_weak) = &vertex.mirror_unit {
2053                        self.sync_requests.push(SyncRequest {
2054                            mirror_unit_weak: mirror_unit_weak.clone(),
2055                            vertex_index: vertex.vertex_index,
2056                            propagated_dual_node: None,
2057                            propagated_grandson_dual_node: None,
2058                        });
2059                    }
2060                    for edge_weak in vertex.edges.iter() {
2061                        let edge_ptr = edge_weak.upgrade_force();
2062                        let (is_left, newly_propagated_edge) = {
2063                            let edge = edge_ptr.read_recursive(active_timestamp);
2064                            let is_left = vertex_ptr.downgrade() == edge.left;
2065                            debug_assert!(if is_left {
2066                                edge.right != vertex_ptr.downgrade()
2067                            } else {
2068                                edge.right == vertex_ptr.downgrade()
2069                            });
2070                            // fully grown edge is where to shrink
2071                            let newly_propagated_edge = edge.left_dual_node == Some(dual_node_internal_ptr.downgrade())
2072                                && edge.right_dual_node == Some(dual_node_internal_ptr.downgrade())
2073                                && edge.left_growth + edge.right_growth >= edge.weight;
2074                            debug_assert!(
2075                                {
2076                                    newly_propagated_edge || {
2077                                        if is_left {
2078                                            edge.left_growth == 0
2079                                        } else {
2080                                            edge.right_growth == 0
2081                                        }
2082                                    }
2083                                },
2084                                "an edge must be either newly propagated or to be removed"
2085                            );
2086                            (is_left, newly_propagated_edge)
2087                        };
2088                        if newly_propagated_edge {
2089                            let mut edge = edge_ptr.write(active_timestamp);
2090                            if (if !is_left {
2091                                edge.dedup_timestamp.0
2092                            } else {
2093                                edge.dedup_timestamp.1
2094                            }) != self.edge_dedup_timestamp
2095                            {
2096                                if !is_left {
2097                                    edge.dedup_timestamp.0 = self.edge_dedup_timestamp;
2098                                } else {
2099                                    edge.dedup_timestamp.1 = self.edge_dedup_timestamp;
2100                                }
2101                                self.updated_boundary.push((!is_left, edge_weak.clone()));
2102                            } // otherwise it's duplicate and should not be added to the boundary list
2103                            if edge.weight == 0 {
2104                                newly_propagated_edge_has_zero_weight = true;
2105                            }
2106                            if is_left {
2107                                debug_assert!(edge.right_dual_node.is_some(), "unexpected shrinking to empty edge");
2108                                debug_assert!(
2109                                    edge.right_dual_node.as_ref().unwrap() == &dual_node_internal_ptr.downgrade(),
2110                                    "shrinking edge should be same tree node"
2111                                );
2112                                edge.left_dual_node = None;
2113                                edge.left_grandson_dual_node = None;
2114                            } else {
2115                                debug_assert!(edge.left_dual_node.is_some(), "unexpected shrinking to empty edge");
2116                                debug_assert!(
2117                                    edge.left_dual_node.as_ref().unwrap() == &dual_node_internal_ptr.downgrade(),
2118                                    "shrinking edge should be same tree node"
2119                                );
2120                                edge.right_dual_node = None;
2121                                edge.right_grandson_dual_node = None;
2122                            };
2123                        } else {
2124                            let mut edge = edge_ptr.write(active_timestamp);
2125                            if is_left {
2126                                edge.left_dual_node = None;
2127                                edge.left_grandson_dual_node = None;
2128                            } else {
2129                                edge.right_dual_node = None;
2130                                edge.right_grandson_dual_node = None;
2131                            }
2132                        }
2133                    }
2134                }
2135            }
2136        }
2137        // update the boundary
2138        lock_write!(dual_node_internal, dual_node_internal_ptr);
2139        std::mem::swap(&mut self.updated_boundary, &mut dual_node_internal.boundary);
2140        // println!("{} boundary: {:?}", tree_node.boundary.len(), tree_node.boundary);
2141        if self.unit_module_info.is_none() {
2142            debug_assert!(
2143                !dual_node_internal.boundary.is_empty(),
2144                "the boundary of a dual cluster is never empty"
2145            );
2146        }
2147        newly_propagated_edge_has_zero_weight
2148    }
2149
2150    /// adjust the boundary of each dual node to fit into the need of growing (`length` > 0) or shrinking (`length` < 0)
2151    pub fn prepare_dual_node_growth(&mut self, dual_node_ptr: &DualNodePtr, is_grow: bool) {
2152        let mut need_another = self.prepare_dual_node_growth_single(dual_node_ptr, is_grow);
2153        while need_another {
2154            // when there are 0 weight edges, one may need to run multiple iterations to get it prepared in a proper state
2155            need_another = self.prepare_dual_node_growth_single(dual_node_ptr, is_grow);
2156        }
2157    }
2158}
2159
2160#[cfg(test)]
2161mod tests {
2162    use super::super::example_codes::*;
2163    use super::super::primal_module_serial::tests::*;
2164    use super::*;
2165
2166    #[allow(dead_code)]
2167    fn debug_print_dual_node(dual_module: &DualModuleSerial, dual_node_ptr: &DualNodePtr) {
2168        println!("boundary:");
2169        let dual_node_internal_ptr = dual_module.get_dual_node_internal_ptr(dual_node_ptr);
2170        let dual_node_internal = dual_node_internal_ptr.read_recursive();
2171        for (is_left, edge_weak) in dual_node_internal.boundary.iter() {
2172            let edge_ptr = edge_weak.upgrade_force();
2173            let edge = edge_ptr.read_recursive_force();
2174            println!("    {} {:?}", if *is_left { " left" } else { "right" }, edge);
2175        }
2176    }
2177
2178    #[test]
2179    fn dual_module_serial_basics() {
2180        // cargo test dual_module_serial_basics -- --nocapture
2181        let visualize_filename = "dual_module_serial_basics.json".to_string();
2182        let half_weight = 500;
2183        let mut code = CodeCapacityPlanarCode::new(7, 0.1, half_weight);
2184        let mut visualizer = Visualizer::new(
2185            Some(visualize_data_folder() + visualize_filename.as_str()),
2186            code.get_positions(),
2187            true,
2188        )
2189        .unwrap();
2190        print_visualize_link(visualize_filename.clone());
2191        // create dual module
2192        let initializer = code.get_initializer();
2193        let mut dual_module = DualModuleSerial::new_empty(&initializer);
2194        // try to work on a simple syndrome
2195        code.vertices[19].is_defect = true;
2196        code.vertices[25].is_defect = true;
2197        let interface_ptr = DualModuleInterfacePtr::new_load(&code.get_syndrome(), &mut dual_module);
2198        visualizer
2199            .snapshot_combined("syndrome".to_string(), vec![&interface_ptr, &dual_module])
2200            .unwrap();
2201        // create dual nodes and grow them by half length
2202        let dual_node_19_ptr = interface_ptr.read_recursive().nodes[0].clone().unwrap();
2203        let dual_node_25_ptr = interface_ptr.read_recursive().nodes[1].clone().unwrap();
2204        dual_module.grow_dual_node(&dual_node_19_ptr, half_weight);
2205        dual_module.grow_dual_node(&dual_node_25_ptr, half_weight);
2206        visualizer
2207            .snapshot_combined("grow to 0.5".to_string(), vec![&interface_ptr, &dual_module])
2208            .unwrap();
2209        dual_module.grow_dual_node(&dual_node_19_ptr, half_weight);
2210        dual_module.grow_dual_node(&dual_node_25_ptr, half_weight);
2211        visualizer
2212            .snapshot_combined("grow to 1".to_string(), vec![&interface_ptr, &dual_module])
2213            .unwrap();
2214        dual_module.grow_dual_node(&dual_node_19_ptr, half_weight);
2215        dual_module.grow_dual_node(&dual_node_25_ptr, half_weight);
2216        visualizer
2217            .snapshot_combined("grow to 1.5".to_string(), vec![&interface_ptr, &dual_module])
2218            .unwrap();
2219        dual_module.grow_dual_node(&dual_node_19_ptr, -half_weight);
2220        dual_module.grow_dual_node(&dual_node_25_ptr, -half_weight);
2221        visualizer
2222            .snapshot_combined("shrink to 1".to_string(), vec![&interface_ptr, &dual_module])
2223            .unwrap();
2224        dual_module.grow_dual_node(&dual_node_19_ptr, -half_weight);
2225        dual_module.grow_dual_node(&dual_node_25_ptr, -half_weight);
2226        visualizer
2227            .snapshot_combined("shrink to 0.5".to_string(), vec![&interface_ptr, &dual_module])
2228            .unwrap();
2229        dual_module.grow_dual_node(&dual_node_19_ptr, -half_weight);
2230        dual_module.grow_dual_node(&dual_node_25_ptr, -half_weight);
2231        visualizer
2232            .snapshot_combined("shrink to 0".to_string(), vec![&interface_ptr, &dual_module])
2233            .unwrap();
2234    }
2235
2236    #[test]
2237    fn dual_module_serial_blossom_basics() {
2238        // cargo test dual_module_serial_blossom_basics -- --nocapture
2239        let visualize_filename = "dual_module_serial_blossom_basics.json".to_string();
2240        let half_weight = 500;
2241        let mut code = CodeCapacityPlanarCode::new(7, 0.1, half_weight);
2242        let mut visualizer = Visualizer::new(
2243            Some(visualize_data_folder() + visualize_filename.as_str()),
2244            code.get_positions(),
2245            true,
2246        )
2247        .unwrap();
2248        print_visualize_link(visualize_filename.clone());
2249        // create dual module
2250        let initializer = code.get_initializer();
2251        let mut dual_module = DualModuleSerial::new_empty(&initializer);
2252        // try to work on a simple syndrome
2253        code.vertices[19].is_defect = true;
2254        code.vertices[26].is_defect = true;
2255        code.vertices[35].is_defect = true;
2256        let interface_ptr = DualModuleInterfacePtr::new_load(&code.get_syndrome(), &mut dual_module);
2257        visualizer
2258            .snapshot_combined("syndrome".to_string(), vec![&interface_ptr, &dual_module])
2259            .unwrap();
2260        // create dual nodes and grow them by half length
2261        let dual_node_19_ptr = interface_ptr.read_recursive().nodes[0].clone().unwrap();
2262        let dual_node_26_ptr = interface_ptr.read_recursive().nodes[1].clone().unwrap();
2263        let dual_node_35_ptr = interface_ptr.read_recursive().nodes[2].clone().unwrap();
2264        interface_ptr.grow(2 * half_weight, &mut dual_module);
2265        assert_eq!(interface_ptr.sum_dual_variables(), 6 * half_weight);
2266        visualizer
2267            .snapshot_combined("before create blossom".to_string(), vec![&interface_ptr, &dual_module])
2268            .unwrap();
2269        let nodes_circle = vec![dual_node_19_ptr.clone(), dual_node_26_ptr.clone(), dual_node_35_ptr.clone()];
2270        interface_ptr.set_grow_state(&dual_node_26_ptr, DualNodeGrowState::Shrink, &mut dual_module);
2271        let dual_node_blossom = interface_ptr.create_blossom(nodes_circle, vec![], &mut dual_module);
2272        interface_ptr.grow(half_weight, &mut dual_module);
2273        assert_eq!(interface_ptr.sum_dual_variables(), 7 * half_weight);
2274        visualizer
2275            .snapshot_combined("blossom grow half weight".to_string(), vec![&interface_ptr, &dual_module])
2276            .unwrap();
2277        interface_ptr.grow(half_weight, &mut dual_module);
2278        assert_eq!(interface_ptr.sum_dual_variables(), 8 * half_weight);
2279        visualizer
2280            .snapshot_combined("blossom grow half weight".to_string(), vec![&interface_ptr, &dual_module])
2281            .unwrap();
2282        interface_ptr.grow(half_weight, &mut dual_module);
2283        assert_eq!(interface_ptr.sum_dual_variables(), 9 * half_weight);
2284        visualizer
2285            .snapshot_combined("blossom grow half weight".to_string(), vec![&interface_ptr, &dual_module])
2286            .unwrap();
2287        interface_ptr.set_grow_state(&dual_node_blossom, DualNodeGrowState::Shrink, &mut dual_module);
2288        interface_ptr.grow(half_weight, &mut dual_module);
2289        assert_eq!(interface_ptr.sum_dual_variables(), 8 * half_weight);
2290        visualizer
2291            .snapshot_combined("blossom shrink half weight".to_string(), vec![&interface_ptr, &dual_module])
2292            .unwrap();
2293        interface_ptr.grow(2 * half_weight, &mut dual_module);
2294        assert_eq!(interface_ptr.sum_dual_variables(), 6 * half_weight);
2295        visualizer
2296            .snapshot_combined("blossom shrink weight".to_string(), vec![&interface_ptr, &dual_module])
2297            .unwrap();
2298        interface_ptr.expand_blossom(dual_node_blossom, &mut dual_module);
2299        interface_ptr.set_grow_state(&dual_node_19_ptr, DualNodeGrowState::Shrink, &mut dual_module);
2300        interface_ptr.set_grow_state(&dual_node_26_ptr, DualNodeGrowState::Shrink, &mut dual_module);
2301        interface_ptr.set_grow_state(&dual_node_35_ptr, DualNodeGrowState::Shrink, &mut dual_module);
2302        interface_ptr.grow(half_weight, &mut dual_module);
2303        assert_eq!(interface_ptr.sum_dual_variables(), 3 * half_weight);
2304        visualizer
2305            .snapshot_combined(
2306                "individual shrink half weight".to_string(),
2307                vec![&interface_ptr, &dual_module],
2308            )
2309            .unwrap();
2310    }
2311
2312    #[test]
2313    fn dual_module_serial_stop_reason_1() {
2314        // cargo test dual_module_serial_stop_reason_1 -- --nocapture
2315        let visualize_filename = "dual_module_serial_stop_reason_1.json".to_string();
2316        let half_weight = 500;
2317        let mut code = CodeCapacityPlanarCode::new(7, 0.1, half_weight);
2318        let mut visualizer = Visualizer::new(
2319            Some(visualize_data_folder() + visualize_filename.as_str()),
2320            code.get_positions(),
2321            true,
2322        )
2323        .unwrap();
2324        print_visualize_link(visualize_filename.clone());
2325        // create dual module
2326        let initializer = code.get_initializer();
2327        let mut dual_module = DualModuleSerial::new_empty(&initializer);
2328        // try to work on a simple syndrome
2329        code.vertices[19].is_defect = true;
2330        code.vertices[25].is_defect = true;
2331        let interface_ptr = DualModuleInterfacePtr::new_load(&code.get_syndrome(), &mut dual_module);
2332        visualizer
2333            .snapshot_combined("syndrome".to_string(), vec![&interface_ptr, &dual_module])
2334            .unwrap();
2335        // create dual nodes and grow them by half length
2336        let dual_node_19_ptr = interface_ptr.read_recursive().nodes[0].clone().unwrap();
2337        let dual_node_25_ptr = interface_ptr.read_recursive().nodes[1].clone().unwrap();
2338        // grow the maximum
2339        let group_max_update_length = dual_module.compute_maximum_update_length();
2340        assert_eq!(
2341            group_max_update_length.get_none_zero_growth(),
2342            Some(2 * half_weight),
2343            "unexpected: {:?}",
2344            group_max_update_length
2345        );
2346        interface_ptr.grow(2 * half_weight, &mut dual_module);
2347        assert_eq!(interface_ptr.sum_dual_variables(), 4 * half_weight);
2348        visualizer
2349            .snapshot_combined("grow".to_string(), vec![&interface_ptr, &dual_module])
2350            .unwrap();
2351        // grow the maximum
2352        let group_max_update_length = dual_module.compute_maximum_update_length();
2353        assert_eq!(
2354            group_max_update_length.get_none_zero_growth(),
2355            Some(half_weight),
2356            "unexpected: {:?}",
2357            group_max_update_length
2358        );
2359        interface_ptr.grow(half_weight, &mut dual_module);
2360        assert_eq!(interface_ptr.sum_dual_variables(), 6 * half_weight);
2361        visualizer
2362            .snapshot_combined("grow".to_string(), vec![&interface_ptr, &dual_module])
2363            .unwrap();
2364        // cannot grow anymore, find out the reason
2365        let group_max_update_length = dual_module.compute_maximum_update_length();
2366        assert!(
2367            group_max_update_length
2368                .peek()
2369                .unwrap()
2370                .is_conflicting(&dual_node_19_ptr, &dual_node_25_ptr),
2371            "unexpected: {:?}",
2372            group_max_update_length
2373        );
2374    }
2375
2376    #[test]
2377    fn dual_module_serial_stop_reason_2() {
2378        // cargo test dual_module_serial_stop_reason_2 -- --nocapture
2379        let visualize_filename = "dual_module_serial_stop_reason_2.json".to_string();
2380        let half_weight = 500;
2381        let mut code = CodeCapacityPlanarCode::new(7, 0.1, half_weight);
2382        let mut visualizer = Visualizer::new(
2383            Some(visualize_data_folder() + visualize_filename.as_str()),
2384            code.get_positions(),
2385            true,
2386        )
2387        .unwrap();
2388        print_visualize_link(visualize_filename.clone());
2389        // create dual module
2390        let initializer = code.get_initializer();
2391        let mut dual_module = DualModuleSerial::new_empty(&initializer);
2392        // try to work on a simple syndrome
2393        code.vertices[18].is_defect = true;
2394        code.vertices[26].is_defect = true;
2395        code.vertices[34].is_defect = true;
2396        let interface_ptr = DualModuleInterfacePtr::new_load(&code.get_syndrome(), &mut dual_module);
2397        visualizer
2398            .snapshot_combined("syndrome".to_string(), vec![&interface_ptr, &dual_module])
2399            .unwrap();
2400        // create dual nodes and grow them by half length
2401        let dual_node_18_ptr = interface_ptr.read_recursive().nodes[0].clone().unwrap();
2402        let dual_node_26_ptr = interface_ptr.read_recursive().nodes[1].clone().unwrap();
2403        let dual_node_34_ptr = interface_ptr.read_recursive().nodes[2].clone().unwrap();
2404        // grow the maximum
2405        let group_max_update_length = dual_module.compute_maximum_update_length();
2406        assert_eq!(
2407            group_max_update_length.get_none_zero_growth(),
2408            Some(half_weight),
2409            "unexpected: {:?}",
2410            group_max_update_length
2411        );
2412        interface_ptr.grow(half_weight, &mut dual_module);
2413        assert_eq!(interface_ptr.sum_dual_variables(), 3 * half_weight);
2414        visualizer
2415            .snapshot_combined("grow".to_string(), vec![&interface_ptr, &dual_module])
2416            .unwrap();
2417        // cannot grow anymore, find out the reason
2418        let group_max_update_length = dual_module.compute_maximum_update_length();
2419        assert!(
2420            group_max_update_length
2421                .peek()
2422                .unwrap()
2423                .is_conflicting(&dual_node_18_ptr, &dual_node_26_ptr)
2424                || group_max_update_length
2425                    .peek()
2426                    .unwrap()
2427                    .is_conflicting(&dual_node_26_ptr, &dual_node_34_ptr),
2428            "unexpected: {:?}",
2429            group_max_update_length
2430        );
2431        // first match 18 and 26
2432        interface_ptr.set_grow_state(&dual_node_18_ptr, DualNodeGrowState::Stay, &mut dual_module);
2433        interface_ptr.set_grow_state(&dual_node_26_ptr, DualNodeGrowState::Stay, &mut dual_module);
2434        // cannot grow anymore, find out the reason
2435        let group_max_update_length = dual_module.compute_maximum_update_length();
2436        assert!(
2437            group_max_update_length
2438                .peek()
2439                .unwrap()
2440                .is_conflicting(&dual_node_26_ptr, &dual_node_34_ptr),
2441            "unexpected: {:?}",
2442            group_max_update_length
2443        );
2444        // 34 touches 26, so it will grow the tree by absorbing 18 and 26
2445        interface_ptr.set_grow_state(&dual_node_18_ptr, DualNodeGrowState::Grow, &mut dual_module);
2446        interface_ptr.set_grow_state(&dual_node_26_ptr, DualNodeGrowState::Shrink, &mut dual_module);
2447        // grow the maximum
2448        let group_max_update_length = dual_module.compute_maximum_update_length();
2449        assert_eq!(
2450            group_max_update_length.get_none_zero_growth(),
2451            Some(half_weight),
2452            "unexpected: {:?}",
2453            group_max_update_length
2454        );
2455        interface_ptr.grow(half_weight, &mut dual_module);
2456        assert_eq!(interface_ptr.sum_dual_variables(), 4 * half_weight);
2457        visualizer
2458            .snapshot_combined("grow".to_string(), vec![&interface_ptr, &dual_module])
2459            .unwrap();
2460        // cannot grow anymore, find out the reason
2461        let group_max_update_length = dual_module.compute_maximum_update_length();
2462        assert!(
2463            group_max_update_length
2464                .peek()
2465                .unwrap()
2466                .is_conflicting(&dual_node_18_ptr, &dual_node_34_ptr),
2467            "unexpected: {:?}",
2468            group_max_update_length
2469        );
2470        // for a blossom because 18 and 34 come from the same alternating tree
2471        let dual_node_blossom = interface_ptr.create_blossom(
2472            vec![dual_node_18_ptr.clone(), dual_node_26_ptr.clone(), dual_node_34_ptr.clone()],
2473            vec![],
2474            &mut dual_module,
2475        );
2476        // grow the maximum
2477        let group_max_update_length = dual_module.compute_maximum_update_length();
2478        assert_eq!(
2479            group_max_update_length.get_none_zero_growth(),
2480            Some(2 * half_weight),
2481            "unexpected: {:?}",
2482            group_max_update_length
2483        );
2484        interface_ptr.grow(2 * half_weight, &mut dual_module);
2485        assert_eq!(interface_ptr.sum_dual_variables(), 6 * half_weight);
2486        visualizer
2487            .snapshot_combined("grow blossom".to_string(), vec![&interface_ptr, &dual_module])
2488            .unwrap();
2489        // grow the maximum
2490        let group_max_update_length = dual_module.compute_maximum_update_length();
2491        assert_eq!(
2492            group_max_update_length.get_none_zero_growth(),
2493            Some(2 * half_weight),
2494            "unexpected: {:?}",
2495            group_max_update_length
2496        );
2497        interface_ptr.grow(2 * half_weight, &mut dual_module);
2498        assert_eq!(interface_ptr.sum_dual_variables(), 8 * half_weight);
2499        visualizer
2500            .snapshot_combined("grow blossom".to_string(), vec![&interface_ptr, &dual_module])
2501            .unwrap();
2502        // cannot grow anymore, find out the reason
2503        let group_max_update_length = dual_module.compute_maximum_update_length();
2504        assert!(
2505            group_max_update_length.peek().unwrap().get_touching_virtual() == Some((dual_node_blossom.clone(), 23))
2506                || group_max_update_length.peek().unwrap().get_touching_virtual() == Some((dual_node_blossom.clone(), 39)),
2507            "unexpected: {:?}",
2508            group_max_update_length
2509        );
2510        // blossom touches virtual boundary, so it's matched
2511        interface_ptr.set_grow_state(&dual_node_blossom, DualNodeGrowState::Stay, &mut dual_module);
2512        let group_max_update_length = dual_module.compute_maximum_update_length();
2513        assert!(
2514            group_max_update_length.is_empty(),
2515            "unexpected: {:?}",
2516            group_max_update_length
2517        );
2518        // also test the reverse procedure: shrinking and expanding blossom
2519        interface_ptr.set_grow_state(&dual_node_blossom, DualNodeGrowState::Shrink, &mut dual_module);
2520        let group_max_update_length = dual_module.compute_maximum_update_length();
2521        assert_eq!(
2522            group_max_update_length.get_none_zero_growth(),
2523            Some(2 * half_weight),
2524            "unexpected: {:?}",
2525            group_max_update_length
2526        );
2527        interface_ptr.grow(2 * half_weight, &mut dual_module);
2528        assert_eq!(interface_ptr.sum_dual_variables(), 6 * half_weight);
2529        visualizer
2530            .snapshot_combined("shrink blossom".to_string(), vec![&interface_ptr, &dual_module])
2531            .unwrap();
2532        // before expand
2533        interface_ptr.set_grow_state(&dual_node_blossom, DualNodeGrowState::Shrink, &mut dual_module);
2534        let group_max_update_length = dual_module.compute_maximum_update_length();
2535        assert_eq!(
2536            group_max_update_length.get_none_zero_growth(),
2537            Some(2 * half_weight),
2538            "unexpected: {:?}",
2539            group_max_update_length
2540        );
2541        interface_ptr.grow(2 * half_weight, &mut dual_module);
2542        assert_eq!(interface_ptr.sum_dual_variables(), 4 * half_weight);
2543        visualizer
2544            .snapshot_combined("shrink blossom".to_string(), vec![&interface_ptr, &dual_module])
2545            .unwrap();
2546        // cannot shrink anymore, find out the reason
2547        let group_max_update_length = dual_module.compute_maximum_update_length();
2548        assert!(
2549            group_max_update_length.peek().unwrap() == &MaxUpdateLength::BlossomNeedExpand(dual_node_blossom.clone()),
2550            "unexpected: {:?}",
2551            group_max_update_length
2552        );
2553        // expand blossom
2554        interface_ptr.expand_blossom(dual_node_blossom, &mut dual_module);
2555        // regain access to underlying nodes
2556        interface_ptr.set_grow_state(&dual_node_18_ptr, DualNodeGrowState::Shrink, &mut dual_module);
2557        interface_ptr.set_grow_state(&dual_node_26_ptr, DualNodeGrowState::Grow, &mut dual_module);
2558        interface_ptr.set_grow_state(&dual_node_34_ptr, DualNodeGrowState::Shrink, &mut dual_module);
2559        let group_max_update_length = dual_module.compute_maximum_update_length();
2560        assert_eq!(
2561            group_max_update_length.get_none_zero_growth(),
2562            Some(2 * half_weight),
2563            "unexpected: {:?}",
2564            group_max_update_length
2565        );
2566        interface_ptr.grow(2 * half_weight, &mut dual_module);
2567        assert_eq!(interface_ptr.sum_dual_variables(), 2 * half_weight);
2568        visualizer
2569            .snapshot_combined("shrink".to_string(), vec![&interface_ptr, &dual_module])
2570            .unwrap();
2571    }
2572
2573    /// this test helps observe bugs of fast clear, by removing snapshot: snapshot will do the clear automatically
2574    #[test]
2575    fn dual_module_serial_fast_clear_1() {
2576        // cargo test dual_module_serial_fast_clear_1 -- --nocapture
2577        let half_weight = 500;
2578        let mut code = CodeCapacityPlanarCode::new(7, 0.1, half_weight);
2579        // create dual module
2580        let initializer = code.get_initializer();
2581        let mut dual_module = DualModuleSerial::new_empty(&initializer);
2582        // try to work on a simple syndrome
2583        code.vertices[18].is_defect = true;
2584        code.vertices[26].is_defect = true;
2585        code.vertices[34].is_defect = true;
2586        let interface_ptr = DualModuleInterfacePtr::new_load(&code.get_syndrome(), &mut dual_module);
2587        // create dual nodes and grow them by half length
2588        let dual_node_18_ptr = interface_ptr.read_recursive().nodes[0].clone().unwrap();
2589        let dual_node_26_ptr = interface_ptr.read_recursive().nodes[1].clone().unwrap();
2590        let dual_node_34_ptr = interface_ptr.read_recursive().nodes[2].clone().unwrap();
2591        // grow the maximum
2592        let group_max_update_length = dual_module.compute_maximum_update_length();
2593        assert_eq!(
2594            group_max_update_length.get_none_zero_growth(),
2595            Some(half_weight),
2596            "unexpected: {:?}",
2597            group_max_update_length
2598        );
2599        interface_ptr.grow(half_weight, &mut dual_module);
2600        assert_eq!(interface_ptr.sum_dual_variables(), 3 * half_weight);
2601        // cannot grow anymore, find out the reason
2602        let group_max_update_length = dual_module.compute_maximum_update_length();
2603        assert!(
2604            group_max_update_length
2605                .peek()
2606                .unwrap()
2607                .is_conflicting(&dual_node_18_ptr, &dual_node_26_ptr)
2608                || group_max_update_length
2609                    .peek()
2610                    .unwrap()
2611                    .is_conflicting(&dual_node_26_ptr, &dual_node_34_ptr),
2612            "unexpected: {:?}",
2613            group_max_update_length
2614        );
2615        // first match 18 and 26
2616        interface_ptr.set_grow_state(&dual_node_18_ptr, DualNodeGrowState::Stay, &mut dual_module);
2617        interface_ptr.set_grow_state(&dual_node_26_ptr, DualNodeGrowState::Stay, &mut dual_module);
2618        // cannot grow anymore, find out the reason
2619        let group_max_update_length = dual_module.compute_maximum_update_length();
2620        assert!(
2621            group_max_update_length
2622                .peek()
2623                .unwrap()
2624                .is_conflicting(&dual_node_26_ptr, &dual_node_34_ptr),
2625            "unexpected: {:?}",
2626            group_max_update_length
2627        );
2628        // 34 touches 26, so it will grow the tree by absorbing 18 and 26
2629        interface_ptr.set_grow_state(&dual_node_18_ptr, DualNodeGrowState::Grow, &mut dual_module);
2630        interface_ptr.set_grow_state(&dual_node_26_ptr, DualNodeGrowState::Shrink, &mut dual_module);
2631        // grow the maximum
2632        let group_max_update_length = dual_module.compute_maximum_update_length();
2633        assert_eq!(
2634            group_max_update_length.get_none_zero_growth(),
2635            Some(half_weight),
2636            "unexpected: {:?}",
2637            group_max_update_length
2638        );
2639        interface_ptr.grow(half_weight, &mut dual_module);
2640        assert_eq!(interface_ptr.sum_dual_variables(), 4 * half_weight);
2641        // cannot grow anymore, find out the reason
2642        let group_max_update_length = dual_module.compute_maximum_update_length();
2643        assert!(
2644            group_max_update_length
2645                .peek()
2646                .unwrap()
2647                .is_conflicting(&dual_node_18_ptr, &dual_node_34_ptr),
2648            "unexpected: {:?}",
2649            group_max_update_length
2650        );
2651        // for a blossom because 18 and 34 come from the same alternating tree
2652        let dual_node_blossom = interface_ptr.create_blossom(
2653            vec![dual_node_18_ptr.clone(), dual_node_26_ptr.clone(), dual_node_34_ptr.clone()],
2654            vec![],
2655            &mut dual_module,
2656        );
2657        // grow the maximum
2658        let group_max_update_length = dual_module.compute_maximum_update_length();
2659        assert_eq!(
2660            group_max_update_length.get_none_zero_growth(),
2661            Some(2 * half_weight),
2662            "unexpected: {:?}",
2663            group_max_update_length
2664        );
2665        interface_ptr.grow(2 * half_weight, &mut dual_module);
2666        // grow the maximum
2667        let group_max_update_length = dual_module.compute_maximum_update_length();
2668        assert_eq!(
2669            group_max_update_length.get_none_zero_growth(),
2670            Some(2 * half_weight),
2671            "unexpected: {:?}",
2672            group_max_update_length
2673        );
2674        interface_ptr.grow(2 * half_weight, &mut dual_module);
2675        // cannot grow anymore, find out the reason
2676        let group_max_update_length = dual_module.compute_maximum_update_length();
2677        assert!(
2678            group_max_update_length.peek().unwrap().get_touching_virtual() == Some((dual_node_blossom.clone(), 23))
2679                || group_max_update_length.peek().unwrap().get_touching_virtual() == Some((dual_node_blossom.clone(), 39)),
2680            "unexpected: {:?}",
2681            group_max_update_length
2682        );
2683        // blossom touches virtual boundary, so it's matched
2684        interface_ptr.set_grow_state(&dual_node_blossom, DualNodeGrowState::Stay, &mut dual_module);
2685        let group_max_update_length = dual_module.compute_maximum_update_length();
2686        assert!(
2687            group_max_update_length.is_empty(),
2688            "unexpected: {:?}",
2689            group_max_update_length
2690        );
2691        // also test the reverse procedure: shrinking and expanding blossom
2692        interface_ptr.set_grow_state(&dual_node_blossom, DualNodeGrowState::Shrink, &mut dual_module);
2693        let group_max_update_length = dual_module.compute_maximum_update_length();
2694        assert_eq!(
2695            group_max_update_length.get_none_zero_growth(),
2696            Some(2 * half_weight),
2697            "unexpected: {:?}",
2698            group_max_update_length
2699        );
2700        interface_ptr.grow(2 * half_weight, &mut dual_module);
2701        // before expand
2702        interface_ptr.set_grow_state(&dual_node_blossom, DualNodeGrowState::Shrink, &mut dual_module);
2703        let group_max_update_length = dual_module.compute_maximum_update_length();
2704        assert_eq!(
2705            group_max_update_length.get_none_zero_growth(),
2706            Some(2 * half_weight),
2707            "unexpected: {:?}",
2708            group_max_update_length
2709        );
2710        interface_ptr.grow(2 * half_weight, &mut dual_module);
2711        // cannot shrink anymore, find out the reason
2712        let group_max_update_length = dual_module.compute_maximum_update_length();
2713        assert!(
2714            group_max_update_length.peek().unwrap() == &MaxUpdateLength::BlossomNeedExpand(dual_node_blossom.clone()),
2715            "unexpected: {:?}",
2716            group_max_update_length
2717        );
2718        // expand blossom
2719        interface_ptr.expand_blossom(dual_node_blossom, &mut dual_module);
2720        // regain access to underlying nodes
2721        interface_ptr.set_grow_state(&dual_node_18_ptr, DualNodeGrowState::Shrink, &mut dual_module);
2722        interface_ptr.set_grow_state(&dual_node_26_ptr, DualNodeGrowState::Grow, &mut dual_module);
2723        interface_ptr.set_grow_state(&dual_node_34_ptr, DualNodeGrowState::Shrink, &mut dual_module);
2724        let group_max_update_length = dual_module.compute_maximum_update_length();
2725        assert_eq!(
2726            group_max_update_length.get_none_zero_growth(),
2727            Some(2 * half_weight),
2728            "unexpected: {:?}",
2729            group_max_update_length
2730        );
2731        interface_ptr.grow(2 * half_weight, &mut dual_module);
2732        assert_eq!(interface_ptr.sum_dual_variables(), 2 * half_weight);
2733    }
2734
2735    #[test]
2736    fn dual_module_grow_iterative_1() {
2737        // cargo test dual_module_grow_iterative_1 -- --nocapture
2738        let visualize_filename = "dual_module_grow_iterative_1.json".to_string();
2739        let half_weight = 500;
2740        let mut code = CodeCapacityPlanarCode::new(11, 0.1, half_weight);
2741        let mut visualizer = Visualizer::new(
2742            Some(visualize_data_folder() + visualize_filename.as_str()),
2743            code.get_positions(),
2744            true,
2745        )
2746        .unwrap();
2747        print_visualize_link(visualize_filename.clone());
2748        // create dual module
2749        let initializer = code.get_initializer();
2750        let mut dual_module = DualModuleSerial::new_empty(&initializer);
2751        // try to work on a simple syndrome
2752        code.vertices[39].is_defect = true;
2753        code.vertices[65].is_defect = true;
2754        code.vertices[87].is_defect = true;
2755        let interface_ptr = DualModuleInterfacePtr::new_load(&code.get_syndrome(), &mut dual_module);
2756        visualizer
2757            .snapshot_combined("syndrome".to_string(), vec![&interface_ptr, &dual_module])
2758            .unwrap();
2759        // create dual nodes and grow them by half length
2760        interface_ptr.grow_iterative(4 * half_weight, &mut dual_module);
2761        visualizer
2762            .snapshot_combined("grow".to_string(), vec![&interface_ptr, &dual_module])
2763            .unwrap();
2764        assert_eq!(interface_ptr.sum_dual_variables(), 3 * 4 * half_weight);
2765        let dual_node_39_ptr = interface_ptr.read_recursive().nodes[0].clone().unwrap();
2766        let dual_node_65_ptr = interface_ptr.read_recursive().nodes[1].clone().unwrap();
2767        let dual_node_87_ptr = interface_ptr.read_recursive().nodes[2].clone().unwrap();
2768        interface_ptr.set_grow_state(&dual_node_39_ptr, DualNodeGrowState::Shrink, &mut dual_module);
2769        interface_ptr.set_grow_state(&dual_node_65_ptr, DualNodeGrowState::Shrink, &mut dual_module);
2770        interface_ptr.set_grow_state(&dual_node_87_ptr, DualNodeGrowState::Shrink, &mut dual_module);
2771        interface_ptr.grow_iterative(4 * half_weight, &mut dual_module);
2772        visualizer
2773            .snapshot_combined("shrink".to_string(), vec![&interface_ptr, &dual_module])
2774            .unwrap();
2775        assert_eq!(interface_ptr.sum_dual_variables(), 0);
2776    }
2777
2778    #[test]
2779    fn dual_module_debug_1() {
2780        // cargo test dual_module_debug_1 -- --nocapture
2781        let visualize_filename = "dual_module_debug_1.json".to_string();
2782        let defect_vertices = vec![
2783            6, 7, 17, 18, 21, 27, 28, 42, 43, 49, 51, 52, 54, 55, 61, 63, 65, 67, 76, 78, 80, 86, 103, 110, 113, 114, 116,
2784            122, 125, 127,
2785        ];
2786        primal_module_serial_basic_standard_syndrome(11, visualize_filename, defect_vertices, 23);
2787    }
2788
2789    #[test]
2790    fn dual_module_debug_2() {
2791        // cargo test dual_module_debug_2 -- --nocapture
2792        let visualize_filename = "dual_module_debug_2.json".to_string();
2793        let defect_vertices = vec![
2794            5, 12, 16, 19, 21, 38, 42, 43, 49, 56, 61, 67, 72, 73, 74, 75, 76, 88, 89, 92, 93, 99, 105, 112, 117, 120, 124,
2795            129,
2796        ];
2797        primal_module_serial_basic_standard_syndrome(11, visualize_filename, defect_vertices, 22);
2798    }
2799
2800    #[test]
2801    fn dual_module_erasure_1() {
2802        // cargo test dual_module_erasure_1 -- --nocapture
2803        let visualize_filename = "dual_module_erasure_1.json".to_string();
2804        let half_weight = 500;
2805        let mut code = CodeCapacityPlanarCode::new(11, 0.1, half_weight);
2806        let mut visualizer = Visualizer::new(
2807            Some(visualize_data_folder() + visualize_filename.as_str()),
2808            code.get_positions(),
2809            true,
2810        )
2811        .unwrap();
2812        print_visualize_link(visualize_filename.clone());
2813        // create dual module
2814        let initializer = code.get_initializer();
2815        let mut dual_module = DualModuleSerial::new_empty(&initializer);
2816        // try to work on a simple syndrome
2817        code.vertices[64].is_defect = true;
2818        code.set_erasures(&[110, 78, 57, 142, 152, 163, 164]);
2819        let interface_ptr = DualModuleInterfacePtr::new_load(&code.get_syndrome(), &mut dual_module);
2820        visualizer
2821            .snapshot_combined("syndrome".to_string(), vec![&interface_ptr, &dual_module])
2822            .unwrap();
2823        // create dual nodes and grow them by half length
2824        for _ in 0..3 {
2825            interface_ptr.grow_iterative(2 * half_weight, &mut dual_module);
2826            visualizer
2827                .snapshot_combined("grow".to_string(), vec![&interface_ptr, &dual_module])
2828                .unwrap();
2829        }
2830        // set them to shrink
2831        let dual_node_ptr = interface_ptr.read_recursive().nodes[0].clone().unwrap();
2832        interface_ptr.set_grow_state(&dual_node_ptr, DualNodeGrowState::Shrink, &mut dual_module);
2833        // shrink them back, to make sure the operation is reversible
2834        for _ in 0..3 {
2835            interface_ptr.grow_iterative(2 * half_weight, &mut dual_module);
2836            visualizer
2837                .snapshot_combined("shrink".to_string(), vec![&interface_ptr, &dual_module])
2838                .unwrap();
2839        }
2840        // cancel the erasures and grow the dual module in normal case, this should automatically clear the erasures
2841        dual_module.clear();
2842        // no erasures this time, to test if the module recovers correctly
2843        let interface_ptr = DualModuleInterfacePtr::new_load(
2844            &SyndromePattern::new_vertices(code.get_syndrome().defect_vertices),
2845            &mut dual_module,
2846        );
2847        visualizer
2848            .snapshot_combined("after clear".to_string(), vec![&interface_ptr, &dual_module])
2849            .unwrap();
2850        for _ in 0..3 {
2851            interface_ptr.grow_iterative(2 * half_weight, &mut dual_module);
2852            visualizer
2853                .snapshot_combined("grow".to_string(), vec![&interface_ptr, &dual_module])
2854                .unwrap();
2855        }
2856    }
2857}