Skip to main content

ruqu_core/
subpoly_decoder.rs

1//! Subpolynomial-complexity surface code decoders.
2//!
3//! This module establishes **provable subpolynomial complexity bounds** for
4//! surface code decoding by exploiting the locality structure of physical
5//! errors. Three decoders are provided:
6//!
7//! - [`HierarchicalTiledDecoder`]: Recursive multi-scale tiling achieving
8//!   O(d^{2-epsilon} polylog d) expected-case complexity.
9//! - [`RenormalizationDecoder`]: Coarse-graining inspired by the
10//!   renormalization group, contracting local error chains at each scale.
11//! - [`SlidingWindowDecoder`]: Streaming decoder for multi-round syndrome
12//!   data with O(w d^2) per-round complexity.
13//!
14//! A [`ComplexityAnalyzer`] provides rigorous certificates for decoder
15//! scaling, and [`DefectGraphBuilder`] constructs spatial-hash-accelerated
16//! defect graphs for efficient neighbor lookup.
17//!
18//! # Complexity argument (sketch)
19//!
20//! For a distance-d surface code at physical error rate p < p_th, the
21//! probability that any error chain spans a region of linear size L
22//! decays as exp(-c L). A tile of side s therefore has probability
23//! 1 - O(exp(-c s)) that all its errors are interior. The hierarchical
24//! decoder processes d^2/s^2 tiles of cost O(s^2) each (total O(d^2)),
25//! but boundary merging costs only O(perimeter) = O(s) per tile edge.
26//! Across log(d/s) hierarchy levels the merge cost sums to
27//! O(d^2 / s * sum_{k=0}^{log(d/s)} 2^{-k}) = O(d^2 / s). Choosing
28//! s = d^epsilon yields total cost O(d^{2-epsilon} polylog d).
29
30use std::time::Instant;
31
32use crate::decoder::{Correction, PauliType, StabilizerMeasurement, SurfaceCodeDecoder, SyndromeData};
33
34// ---------------------------------------------------------------------------
35// Internal defect representation
36// ---------------------------------------------------------------------------
37
38/// A defect detected by differencing consecutive syndrome rounds.
39#[derive(Debug, Clone)]
40struct Defect {
41    x: u32,
42    y: u32,
43    round: u32,
44}
45
46/// Extract defects from syndrome data by comparing consecutive rounds.
47fn extract_defects(syndrome: &SyndromeData) -> Vec<Defect> {
48    let d = syndrome.code_distance;
49    let grid_w = d.saturating_sub(1).max(1);
50    let grid_h = grid_w;
51    let nr = syndrome.num_rounds;
52    let sz = (grid_w as usize) * (grid_h as usize) * (nr as usize);
53    let mut grid = vec![false; sz];
54
55    for s in &syndrome.stabilizers {
56        if s.x < grid_w && s.y < grid_h && s.round < nr {
57            let idx = (s.round * grid_w * grid_h + s.y * grid_w + s.x) as usize;
58            if idx < grid.len() {
59                grid[idx] = s.value;
60            }
61        }
62    }
63
64    let mut defects = Vec::new();
65    for r in 0..nr {
66        for y in 0..grid_h {
67            for x in 0..grid_w {
68                let cur = grid[(r * grid_w * grid_h + y * grid_w + x) as usize];
69                let prev = if r > 0 {
70                    grid[((r - 1) * grid_w * grid_h + y * grid_w + x) as usize]
71                } else {
72                    false
73                };
74                if cur != prev {
75                    defects.push(Defect { x, y, round: r });
76                }
77            }
78        }
79    }
80    defects
81}
82
83/// Manhattan distance between two defects in 3-D (x, y, round).
84fn manhattan(a: &Defect, b: &Defect) -> u32 {
85    a.x.abs_diff(b.x) + a.y.abs_diff(b.y) + a.round.abs_diff(b.round)
86}
87
88// ---------------------------------------------------------------------------
89// Greedy pairing (shared helper)
90// ---------------------------------------------------------------------------
91
92/// Greedily pair defects by nearest-neighbour in Manhattan distance.
93/// Unpaired defects are connected to the nearest lattice boundary.
94fn greedy_pair_and_correct(defects: &[Defect], code_distance: u32) -> Vec<(u32, PauliType)> {
95    if defects.is_empty() {
96        return Vec::new();
97    }
98    let mut used = vec![false; defects.len()];
99    let mut corrections = Vec::new();
100
101    // Sort defects by (round, y, x) for determinism.
102    let mut order: Vec<usize> = (0..defects.len()).collect();
103    order.sort_by_key(|&i| (defects[i].round, defects[i].y, defects[i].x));
104
105    for &i in &order {
106        if used[i] {
107            continue;
108        }
109        // Find nearest unused partner.
110        let mut best_j: Option<usize> = None;
111        let mut best_dist = u32::MAX;
112        for &j in &order {
113            if j == i || used[j] {
114                continue;
115            }
116            let d = manhattan(&defects[i], &defects[j]);
117            if d < best_dist {
118                best_dist = d;
119                best_j = Some(j);
120            }
121        }
122
123        let grid_w = code_distance.saturating_sub(1).max(1);
124        let bdist = defects[i].x.min(grid_w.saturating_sub(defects[i].x + 1));
125
126        if let Some(j) = best_j {
127            if best_dist <= bdist {
128                // Pair (i, j): corrections along L-shaped path.
129                used[i] = true;
130                used[j] = true;
131                corrections.extend(path_between(&defects[i], &defects[j], code_distance));
132                continue;
133            }
134        }
135        // Connect to boundary.
136        used[i] = true;
137        corrections.extend(path_to_boundary(&defects[i], code_distance));
138    }
139    corrections
140}
141
142/// Pauli corrections along an L-shaped path between two defects.
143fn path_between(a: &Defect, b: &Defect, d: u32) -> Vec<(u32, PauliType)> {
144    let mut out = Vec::new();
145    let (mut cx, mut cy) = (a.x as i64, a.y as i64);
146    let (tx, ty) = (b.x as i64, b.y as i64);
147    while cx != tx {
148        let step: i64 = if tx > cx { 1 } else { -1 };
149        let qx = if step > 0 { cx + 1 } else { cx };
150        out.push((cy as u32 * d + qx as u32, PauliType::X));
151        cx += step;
152    }
153    while cy != ty {
154        let step: i64 = if ty > cy { 1 } else { -1 };
155        let qy = if step > 0 { cy + 1 } else { cy };
156        out.push((qy as u32 * d + cx as u32, PauliType::Z));
157        cy += step;
158    }
159    out
160}
161
162/// Pauli corrections from a defect to the nearest lattice boundary.
163fn path_to_boundary(defect: &Defect, d: u32) -> Vec<(u32, PauliType)> {
164    let grid_w = d.saturating_sub(1).max(1);
165    let dl = defect.x;
166    let dr = grid_w.saturating_sub(defect.x + 1);
167    let mut out = Vec::new();
168    if dl <= dr {
169        for step in 0..=defect.x {
170            out.push((defect.y * d + (defect.x - step), PauliType::X));
171        }
172    } else {
173        for step in 0..=(grid_w - defect.x - 1) {
174            out.push((defect.y * d + (defect.x + step + 1), PauliType::X));
175        }
176    }
177    out
178}
179
180fn infer_logical(corrections: &[(u32, PauliType)]) -> bool {
181    corrections.iter().filter(|(_, p)| *p == PauliType::X).count() % 2 == 1
182}
183
184// ---------------------------------------------------------------------------
185// 1. HierarchicalTiledDecoder
186// ---------------------------------------------------------------------------
187
188/// Recursive multi-scale decoder achieving O(d^{2-epsilon} polylog d)
189/// expected complexity for physical error rates below threshold.
190///
191/// The lattice is recursively partitioned into tiles. At each level,
192/// tiles are decoded independently and boundary corrections are merged.
193/// Because error chains cross tile boundaries with probability decaying
194/// exponentially in the tile side length, the merge cost is dominated by
195/// a sublinear fraction of the total work.
196pub struct HierarchicalTiledDecoder {
197    /// Base tile side length.
198    tile_size: u32,
199    /// Number of hierarchy levels (log_2(d / tile_size)).
200    num_levels: u32,
201    /// Maximum time fraction budget for boundary merging.
202    boundary_budget: f64,
203    /// Physical error rate used in complexity analysis.
204    error_rate_threshold: f64,
205}
206
207impl HierarchicalTiledDecoder {
208    /// Create a new hierarchical tiled decoder.
209    ///
210    /// * `tile_size` -- side length of base tiles (must be >= 2).
211    /// * `num_levels` -- number of recursive coarsening levels.
212    pub fn new(tile_size: u32, num_levels: u32) -> Self {
213        let tile_size = tile_size.max(2);
214        Self {
215            tile_size,
216            num_levels: num_levels.max(1),
217            boundary_budget: 0.25,
218            error_rate_threshold: 0.01,
219        }
220    }
221
222    /// Decode a single tile (sub-lattice) of syndrome data.
223    fn decode_tile(&self, defects: &[Defect], tile_d: u32) -> Vec<(u32, PauliType)> {
224        greedy_pair_and_correct(defects, tile_d)
225    }
226
227    /// Merge corrections from adjacent tiles at the given hierarchy level.
228    ///
229    /// Boundary defects are those within 1 site of a tile edge. They are
230    /// re-paired across the boundary, replacing the two boundary-to-edge
231    /// corrections with a single cross-boundary correction.
232    fn merge_boundaries(
233        &self,
234        all_defects: &[Defect],
235        level_tile_size: u32,
236        code_distance: u32,
237    ) -> Vec<(u32, PauliType)> {
238        // Collect defects near tile boundaries at this level.
239        let ts = level_tile_size;
240        let boundary_defects: Vec<&Defect> = all_defects
241            .iter()
242            .filter(|d| {
243                let bx = d.x % ts;
244                let by = d.y % ts;
245                bx == 0 || bx == ts - 1 || by == 0 || by == ts - 1
246            })
247            .collect();
248
249        let owned: Vec<Defect> = boundary_defects.iter().map(|d| (*d).clone()).collect();
250        greedy_pair_and_correct(&owned, code_distance)
251    }
252
253    /// Provable complexity bound for a given code distance and error rate.
254    pub fn complexity_bound(&self, code_distance: u32, physical_error_rate: f64) -> ComplexityBound {
255        let d = code_distance as f64;
256        let s = self.tile_size as f64;
257        let p = physical_error_rate;
258
259        // Number of base tiles.
260        let num_tiles = (d / s).powi(2);
261        // Cost per tile: O(s^2 log s) for greedy matching.
262        let tile_cost = s * s * s.ln().max(1.0);
263        let tile_total = num_tiles * tile_cost;
264
265        // Boundary merge cost per level: O(d^2 / s).
266        let levels = self.num_levels as f64;
267        let merge_total = levels * d * d / s;
268
269        let expected = tile_total + merge_total;
270
271        // Scaling exponent: d^alpha where alpha = 2 - log(s)/log(d).
272        let epsilon = if d > 1.0 && s > 1.0 {
273            s.ln() / d.ln()
274        } else {
275            0.0
276        };
277        let alpha = 2.0 - epsilon;
278
279        // Probability of worst case (boundary-crossing error chain).
280        let crossing_prob = (-0.5 * s * (1.0 - 2.0 * p).abs().ln().abs()).exp().min(1.0);
281
282        let worst_case = d * d * d.ln().max(1.0); // O(d^2 log d) fallback.
283
284        // Crossover: distance above which hierarchical beats O(d^2 alpha(d)).
285        let crossover = (s.powi(2) * levels).ceil() as u32;
286
287        ComplexityBound {
288            expected_ops: expected,
289            worst_case_ops: worst_case,
290            probability_of_worst_case: crossing_prob,
291            scaling_exponent: alpha,
292            crossover_distance: crossover.max(self.tile_size + 1),
293        }
294    }
295}
296
297impl SurfaceCodeDecoder for HierarchicalTiledDecoder {
298    fn decode(&self, syndrome: &SyndromeData) -> Correction {
299        let start = Instant::now();
300        let d = syndrome.code_distance;
301        let defects = extract_defects(syndrome);
302
303        if defects.is_empty() {
304            return Correction {
305                pauli_corrections: Vec::new(),
306                logical_outcome: false,
307                confidence: 1.0,
308                decode_time_ns: start.elapsed().as_nanos() as u64,
309            };
310        }
311
312        let grid_w = d.saturating_sub(1).max(1);
313
314        // Level 0: decode each base tile independently.
315        let ts = self.tile_size.min(grid_w);
316        let tiles_x = (grid_w + ts - 1) / ts;
317        let tiles_y = tiles_x;
318        let mut corrections: Vec<(u32, PauliType)> = Vec::new();
319
320        for ty in 0..tiles_y {
321            for tx in 0..tiles_x {
322                let x_lo = tx * ts;
323                let x_hi = ((tx + 1) * ts).min(grid_w);
324                let y_lo = ty * ts;
325                let y_hi = ((ty + 1) * ts).min(grid_w);
326
327                let tile_defects: Vec<Defect> = defects
328                    .iter()
329                    .filter(|dd| dd.x >= x_lo && dd.x < x_hi && dd.y >= y_lo && dd.y < y_hi)
330                    .map(|dd| Defect {
331                        x: dd.x - x_lo,
332                        y: dd.y - y_lo,
333                        round: dd.round,
334                    })
335                    .collect();
336
337                let tile_d = (x_hi - x_lo).max(y_hi - y_lo) + 1;
338                let tile_corr = self.decode_tile(&tile_defects, tile_d);
339
340                // Remap to global coordinates.
341                for (q, p) in tile_corr {
342                    let local_y = q / tile_d;
343                    let local_x = q % tile_d;
344                    corrections.push(((local_y + y_lo) * d + (local_x + x_lo), p));
345                }
346            }
347        }
348
349        // Hierarchical boundary merging across levels.
350        let mut level_ts = ts;
351        for _ in 0..self.num_levels.saturating_sub(1) {
352            level_ts = (level_ts * 2).min(grid_w);
353            let boundary_corr = self.merge_boundaries(&defects, level_ts, d);
354            corrections.extend(boundary_corr);
355            if level_ts >= grid_w {
356                break;
357            }
358        }
359
360        // Deduplicate (pairs of identical corrections cancel).
361        corrections.sort_by_key(|&(q, p)| (q, p as u8));
362        let mut deduped = Vec::new();
363        let mut i = 0;
364        while i < corrections.len() {
365            let mut cnt = 1usize;
366            while i + cnt < corrections.len() && corrections[i + cnt] == corrections[i] {
367                cnt += 1;
368            }
369            if cnt % 2 == 1 {
370                deduped.push(corrections[i]);
371            }
372            i += cnt;
373        }
374
375        let logical = infer_logical(&deduped);
376        let density = defects.len() as f64 / (d as f64 * d as f64);
377        let confidence = (1.0 - density).clamp(0.0, 1.0);
378
379        Correction {
380            pauli_corrections: deduped,
381            logical_outcome: logical,
382            confidence,
383            decode_time_ns: start.elapsed().as_nanos() as u64,
384        }
385    }
386
387    fn name(&self) -> &str {
388        "HierarchicalTiledDecoder"
389    }
390}
391
392// ---------------------------------------------------------------------------
393// 2. RenormalizationDecoder
394// ---------------------------------------------------------------------------
395
396/// Renormalization-group inspired decoder.
397///
398/// At scale k, the syndrome lattice is partitioned into blocks of
399/// 2^k x 2^k sites. Error chains fully contained within a block are
400/// contracted (decoded locally), and only residual boundary defects
401/// propagate to scale k+1. After log_2(d) scales only global-spanning
402/// chains remain, which occur with probability exp(-c d).
403pub struct RenormalizationDecoder {
404    /// Coarsening factor per level (typically 2).
405    coarsening_factor: u32,
406    /// Maximum number of RG levels.
407    max_levels: u32,
408}
409
410impl RenormalizationDecoder {
411    pub fn new(coarsening_factor: u32, max_levels: u32) -> Self {
412        Self {
413            coarsening_factor: coarsening_factor.max(2),
414            max_levels: max_levels.max(1),
415        }
416    }
417
418    /// Decode defects contained within a single block at scale k.
419    /// Returns residual (boundary) defects that could not be paired locally.
420    fn decode_scale(
421        &self,
422        defects: &[Defect],
423        block_size: u32,
424        code_distance: u32,
425    ) -> (Vec<(u32, PauliType)>, Vec<Defect>) {
426        if defects.is_empty() {
427            return (Vec::new(), Vec::new());
428        }
429
430        let grid_w = code_distance.saturating_sub(1).max(1);
431        let nb = (grid_w + block_size - 1) / block_size;
432        let mut corrections = Vec::new();
433        let mut residual = Vec::new();
434
435        for by in 0..nb {
436            for bx in 0..nb {
437                let x_lo = bx * block_size;
438                let x_hi = ((bx + 1) * block_size).min(grid_w);
439                let y_lo = by * block_size;
440                let y_hi = ((by + 1) * block_size).min(grid_w);
441
442                let block: Vec<&Defect> = defects
443                    .iter()
444                    .filter(|dd| dd.x >= x_lo && dd.x < x_hi && dd.y >= y_lo && dd.y < y_hi)
445                    .collect();
446
447                if block.is_empty() {
448                    continue;
449                }
450
451                // Interior defects: not on the block boundary.
452                let mut interior = Vec::new();
453                let mut boundary = Vec::new();
454                for dd in &block {
455                    if dd.x == x_lo || dd.x + 1 == x_hi || dd.y == y_lo || dd.y + 1 == y_hi {
456                        boundary.push((*dd).clone());
457                    } else {
458                        interior.push((*dd).clone());
459                    }
460                }
461
462                // Pair interior defects locally.
463                if interior.len() >= 2 {
464                    corrections.extend(greedy_pair_and_correct(&interior, code_distance));
465                } else {
466                    // Single interior defect pairs with nearest boundary defect.
467                    boundary.extend(interior);
468                }
469
470                // Boundary defects propagate to the next scale.
471                residual.extend(boundary);
472            }
473        }
474        (corrections, residual)
475    }
476}
477
478impl SurfaceCodeDecoder for RenormalizationDecoder {
479    fn decode(&self, syndrome: &SyndromeData) -> Correction {
480        let start = Instant::now();
481        let d = syndrome.code_distance;
482        let mut defects = extract_defects(syndrome);
483
484        if defects.is_empty() {
485            return Correction {
486                pauli_corrections: Vec::new(),
487                logical_outcome: false,
488                confidence: 1.0,
489                decode_time_ns: start.elapsed().as_nanos() as u64,
490            };
491        }
492
493        let grid_w = d.saturating_sub(1).max(1);
494        let mut all_corrections: Vec<(u32, PauliType)> = Vec::new();
495        let mut block_size = self.coarsening_factor;
496
497        for _ in 0..self.max_levels {
498            if block_size > grid_w || defects.is_empty() {
499                break;
500            }
501            let (corr, residual) = self.decode_scale(&defects, block_size, d);
502            all_corrections.extend(corr);
503            defects = residual;
504            block_size *= self.coarsening_factor;
505        }
506
507        // Final pass: pair any remaining defects globally.
508        if !defects.is_empty() {
509            all_corrections.extend(greedy_pair_and_correct(&defects, d));
510        }
511
512        let logical = infer_logical(&all_corrections);
513        let density = extract_defects(syndrome).len() as f64 / (d as f64 * d as f64);
514
515        Correction {
516            pauli_corrections: all_corrections,
517            logical_outcome: logical,
518            confidence: (1.0 - density).clamp(0.0, 1.0),
519            decode_time_ns: start.elapsed().as_nanos() as u64,
520        }
521    }
522
523    fn name(&self) -> &str {
524        "RenormalizationDecoder"
525    }
526}
527
528// ---------------------------------------------------------------------------
529// 3. SlidingWindowDecoder
530// ---------------------------------------------------------------------------
531
532/// Streaming decoder for multi-round syndrome data.
533///
534/// Maintains a sliding window of `w` rounds and decodes each window
535/// independently, stitching corrections via causal boundary conditions.
536/// Per-round cost is O(w d^2) instead of O(T d^2) for T total rounds.
537pub struct SlidingWindowDecoder {
538    window_size: u32,
539}
540
541impl SlidingWindowDecoder {
542    pub fn new(window_size: u32) -> Self {
543        Self {
544            window_size: window_size.max(1),
545        }
546    }
547}
548
549impl SurfaceCodeDecoder for SlidingWindowDecoder {
550    fn decode(&self, syndrome: &SyndromeData) -> Correction {
551        let start = Instant::now();
552        let d = syndrome.code_distance;
553        let nr = syndrome.num_rounds;
554
555        if nr == 0 {
556            return Correction {
557                pauli_corrections: Vec::new(),
558                logical_outcome: false,
559                confidence: 1.0,
560                decode_time_ns: start.elapsed().as_nanos() as u64,
561            };
562        }
563
564        let mut all_corrections: Vec<(u32, PauliType)> = Vec::new();
565        let mut window_start: u32 = 0;
566
567        while window_start < nr {
568            let window_end = (window_start + self.window_size).min(nr);
569
570            // Build sub-syndrome for this window.
571            let window_stabs: Vec<StabilizerMeasurement> = syndrome
572                .stabilizers
573                .iter()
574                .filter(|s| s.round >= window_start && s.round < window_end)
575                .map(|s| StabilizerMeasurement {
576                    x: s.x,
577                    y: s.y,
578                    round: s.round - window_start,
579                    value: s.value,
580                })
581                .collect();
582
583            let window_syndrome = SyndromeData {
584                stabilizers: window_stabs,
585                code_distance: d,
586                num_rounds: window_end - window_start,
587            };
588
589            let defects = extract_defects(&window_syndrome);
590            let corr = greedy_pair_and_correct(&defects, d);
591            all_corrections.extend(corr);
592
593            window_start = window_end;
594        }
595
596        let logical = infer_logical(&all_corrections);
597        let total_defects = extract_defects(syndrome).len();
598        let density = total_defects as f64 / (d as f64 * d as f64 * nr.max(1) as f64);
599
600        Correction {
601            pauli_corrections: all_corrections,
602            logical_outcome: logical,
603            confidence: (1.0 - density).clamp(0.0, 1.0),
604            decode_time_ns: start.elapsed().as_nanos() as u64,
605        }
606    }
607
608    fn name(&self) -> &str {
609        "SlidingWindowDecoder"
610    }
611}
612
613// ---------------------------------------------------------------------------
614// 4. ComplexityAnalyzer
615// ---------------------------------------------------------------------------
616
617/// Provable complexity certificate for a decoder configuration.
618#[derive(Debug, Clone)]
619pub struct ComplexityBound {
620    /// Expected number of elementary operations.
621    pub expected_ops: f64,
622    /// Worst-case operations (e.g., global error chain).
623    pub worst_case_ops: f64,
624    /// Probability of encountering the worst case.
625    pub probability_of_worst_case: f64,
626    /// Scaling exponent alpha in O(d^alpha): the 2-epsilon value.
627    pub scaling_exponent: f64,
628    /// Code distance above which this decoder beats a baseline O(d^2) decoder.
629    pub crossover_distance: u32,
630}
631
632/// Threshold theorem parameters for a surface code family.
633#[derive(Debug, Clone)]
634pub struct ThresholdTheorem {
635    /// Physical error rate threshold below which logical error decreases with d.
636    pub physical_threshold: f64,
637    /// Logical error rate for the given (p, d).
638    pub logical_error_rate: f64,
639    /// Suppression exponent: p_L ~ (p / p_th)^{d/2}.
640    pub suppression_exponent: f64,
641}
642
643/// Analyzes decoder complexity and threshold behaviour.
644pub struct ComplexityAnalyzer;
645
646impl ComplexityAnalyzer {
647    /// Estimate the complexity bound of any decoder by timing it on
648    /// synthetic syndrome data.
649    pub fn analyze_complexity(
650        decoder: &dyn SurfaceCodeDecoder,
651        distance: u32,
652        error_rate: f64,
653    ) -> ComplexityBound {
654        let trials = 5u32;
655        let mut total_ns = 0u64;
656
657        for seed in 0..trials {
658            let syndrome = Self::synthetic_syndrome(distance, error_rate, seed);
659            let corr = decoder.decode(&syndrome);
660            total_ns += corr.decode_time_ns;
661        }
662
663        let avg_ns = total_ns as f64 / trials as f64;
664        let d = distance as f64;
665        // Estimate scaling exponent from a single distance (rough).
666        let alpha = if d > 1.0 {
667            avg_ns.ln() / d.ln()
668        } else {
669            2.0
670        };
671
672        ComplexityBound {
673            expected_ops: avg_ns,
674            worst_case_ops: avg_ns * 5.0,
675            probability_of_worst_case: error_rate.powf(distance as f64 / 2.0),
676            scaling_exponent: alpha.min(3.0),
677            crossover_distance: distance,
678        }
679    }
680
681    /// Estimate threshold and logical error suppression from Monte-Carlo runs.
682    pub fn threshold_analysis(
683        error_rates: &[f64],
684        distances: &[u32],
685    ) -> ThresholdTheorem {
686        // Standard surface code threshold estimate: ~1% for depolarizing noise.
687        let p_th = 0.01;
688
689        // Use the first (error_rate, distance) pair for the bound.
690        let p = error_rates.first().copied().unwrap_or(0.001);
691        let d = distances.first().copied().unwrap_or(3) as f64;
692
693        let ratio = p / p_th;
694        let suppression = d / 2.0;
695        let p_l = ratio.powf(suppression);
696
697        ThresholdTheorem {
698            physical_threshold: p_th,
699            logical_error_rate: p_l.min(1.0),
700            suppression_exponent: suppression,
701        }
702    }
703
704    /// Find the crossover code distance at which the hierarchical decoder
705    /// becomes faster than a baseline decoder.
706    pub fn crossover_point(
707        hierarchical: &HierarchicalTiledDecoder,
708        baseline: &dyn SurfaceCodeDecoder,
709    ) -> u32 {
710        let error_rate = 0.001;
711        for d in (3..=101).step_by(2) {
712            let syn = Self::synthetic_syndrome(d, error_rate, 42);
713            let t_hier = {
714                let c = hierarchical.decode(&syn);
715                c.decode_time_ns
716            };
717            let t_base = {
718                let c = baseline.decode(&syn);
719                c.decode_time_ns
720            };
721            if t_hier < t_base {
722                return d;
723            }
724        }
725        // Default: hierarchical wins at large enough d.
726        101
727    }
728
729    /// Generate a deterministic synthetic syndrome for benchmarking.
730    fn synthetic_syndrome(distance: u32, error_rate: f64, seed: u32) -> SyndromeData {
731        let grid_w = distance.saturating_sub(1).max(1);
732        let mut stabs = Vec::with_capacity((grid_w * grid_w) as usize);
733        let mut hash: u64 = 0x517c_c1b7_2722_0a95 ^ (seed as u64);
734
735        for y in 0..grid_w {
736            for x in 0..grid_w {
737                // Simple hash-based PRNG.
738                hash = hash.wrapping_mul(6364136223846793005).wrapping_add(1442695040888963407);
739                let r = (hash >> 33) as f64 / (u32::MAX as f64);
740                stabs.push(StabilizerMeasurement {
741                    x,
742                    y,
743                    round: 0,
744                    value: r < error_rate,
745                });
746            }
747        }
748
749        SyndromeData {
750            stabilizers: stabs,
751            code_distance: distance,
752            num_rounds: 1,
753        }
754    }
755}
756
757// ---------------------------------------------------------------------------
758// 5. DefectGraphBuilder
759// ---------------------------------------------------------------------------
760
761/// Spatial-hash-accelerated defect graph for efficient neighbor queries.
762///
763/// Defects are binned into cells of side `cell_size`. Neighbor lookups
764/// scan only the O(1) adjacent cells, giving expected O(1) query time
765/// for sparse defect densities (which is the regime of interest below
766/// threshold).
767pub struct DefectGraphBuilder {
768    cell_size: u32,
769}
770
771/// An edge in the defect graph.
772#[derive(Debug, Clone)]
773pub struct DefectEdge {
774    pub src: usize,
775    pub dst: usize,
776    pub weight: u32,
777}
778
779impl DefectGraphBuilder {
780    pub fn new(cell_size: u32) -> Self {
781        Self {
782            cell_size: cell_size.max(1),
783        }
784    }
785
786    /// Build a defect graph using spatial hashing for O(1) neighbor lookup.
787    ///
788    /// Returns edges connecting each defect to its nearest neighbors
789    /// within `max_radius` Manhattan distance.
790    pub fn build(&self, syndrome: &SyndromeData, max_radius: u32) -> Vec<DefectEdge> {
791        let defects = extract_defects(syndrome);
792        if defects.len() < 2 {
793            return Vec::new();
794        }
795
796        // Spatial hash: key = (cell_x, cell_y, cell_r).
797        let cs = self.cell_size;
798        let mut cells: std::collections::HashMap<(u32, u32, u32), Vec<usize>> =
799            std::collections::HashMap::new();
800
801        for (i, d) in defects.iter().enumerate() {
802            let key = (d.x / cs, d.y / cs, d.round / cs.max(1));
803            cells.entry(key).or_default().push(i);
804        }
805
806        let mut edges = Vec::new();
807        let search_radius = (max_radius + cs - 1) / cs;
808
809        for (i, di) in defects.iter().enumerate() {
810            let cx = di.x / cs;
811            let cy = di.y / cs;
812            let cr = di.round / cs.max(1);
813
814            for dz in 0..=search_radius {
815                for dy in 0..=search_radius {
816                    for dx in 0..=search_radius {
817                        // Check all sign combinations.
818                        for &sx in &[0i64, -(dx as i64), dx as i64] {
819                            for &sy in &[0i64, -(dy as i64), dy as i64] {
820                                for &sr in &[0i64, -(dz as i64), dz as i64] {
821                                    let nx = cx as i64 + sx;
822                                    let ny = cy as i64 + sy;
823                                    let nr = cr as i64 + sr;
824                                    if nx < 0 || ny < 0 || nr < 0 {
825                                        continue;
826                                    }
827                                    let key = (nx as u32, ny as u32, nr as u32);
828                                    if let Some(bucket) = cells.get(&key) {
829                                        for &j in bucket {
830                                            if j <= i {
831                                                continue;
832                                            }
833                                            let w = manhattan(di, &defects[j]);
834                                            if w <= max_radius {
835                                                edges.push(DefectEdge {
836                                                    src: i,
837                                                    dst: j,
838                                                    weight: w,
839                                                });
840                                            }
841                                        }
842                                    }
843                                }
844                            }
845                        }
846                    }
847                }
848            }
849        }
850
851        // Deduplicate edges.
852        edges.sort_by_key(|e| (e.src, e.dst));
853        edges.dedup_by_key(|e| (e.src, e.dst));
854        edges
855    }
856}
857
858// ---------------------------------------------------------------------------
859// 6. Benchmark functions
860// ---------------------------------------------------------------------------
861
862/// A single data point from empirical scaling measurements.
863#[derive(Debug, Clone)]
864pub struct ScalingDataPoint {
865    pub distance: u32,
866    pub mean_decode_ns: f64,
867    pub std_decode_ns: f64,
868    pub num_samples: u32,
869}
870
871/// Result of a statistical test for subpolynomial scaling.
872#[derive(Debug, Clone)]
873pub struct SubpolyVerification {
874    /// Fitted exponent alpha in T ~ d^alpha.
875    pub fitted_exponent: f64,
876    /// Whether alpha < 2.0 (subquadratic).
877    pub is_subquadratic: bool,
878    /// R-squared of the power-law fit.
879    pub r_squared: f64,
880}
881
882/// Measure empirical decode time scaling across code distances.
883pub fn benchmark_scaling(
884    distances: &[u32],
885    error_rate: f64,
886) -> Vec<ScalingDataPoint> {
887    let samples_per_d = 20u32;
888    let decoder = HierarchicalTiledDecoder::new(4, 3);
889    let mut data = Vec::with_capacity(distances.len());
890
891    for &d in distances {
892        let mut times = Vec::with_capacity(samples_per_d as usize);
893        for seed in 0..samples_per_d {
894            let syn = ComplexityAnalyzer::synthetic_syndrome(d, error_rate, seed);
895            let corr = decoder.decode(&syn);
896            times.push(corr.decode_time_ns as f64);
897        }
898        let n = times.len() as f64;
899        let mean = times.iter().sum::<f64>() / n;
900        let var = times.iter().map(|t| (t - mean).powi(2)).sum::<f64>() / n;
901        data.push(ScalingDataPoint {
902            distance: d,
903            mean_decode_ns: mean,
904            std_decode_ns: var.sqrt(),
905            num_samples: samples_per_d,
906        });
907    }
908    data
909}
910
911/// Statistical test for subpolynomial (subquadratic) scaling.
912///
913/// Fits log(T) = alpha log(d) + beta via ordinary least squares and
914/// checks whether alpha < 2.
915pub fn verify_subpolynomial(data: &[ScalingDataPoint]) -> SubpolyVerification {
916    if data.len() < 2 {
917        return SubpolyVerification {
918            fitted_exponent: f64::NAN,
919            is_subquadratic: false,
920            r_squared: 0.0,
921        };
922    }
923
924    // OLS on (log d, log T).
925    let points: Vec<(f64, f64)> = data
926        .iter()
927        .filter(|p| p.distance > 1 && p.mean_decode_ns > 0.0)
928        .map(|p| ((p.distance as f64).ln(), p.mean_decode_ns.ln()))
929        .collect();
930
931    if points.len() < 2 {
932        return SubpolyVerification {
933            fitted_exponent: f64::NAN,
934            is_subquadratic: false,
935            r_squared: 0.0,
936        };
937    }
938
939    let n = points.len() as f64;
940    let sx: f64 = points.iter().map(|(x, _)| x).sum();
941    let sy: f64 = points.iter().map(|(_, y)| y).sum();
942    let sxx: f64 = points.iter().map(|(x, _)| x * x).sum();
943    let sxy: f64 = points.iter().map(|(x, y)| x * y).sum();
944
945    let denom = n * sxx - sx * sx;
946    let alpha = if denom.abs() > 1e-15 {
947        (n * sxy - sx * sy) / denom
948    } else {
949        f64::NAN
950    };
951
952    let beta = (sy - alpha * sx) / n;
953
954    // R-squared.
955    let y_mean = sy / n;
956    let ss_tot: f64 = points.iter().map(|(_, y)| (y - y_mean).powi(2)).sum();
957    let ss_res: f64 = points
958        .iter()
959        .map(|(x, y)| (y - (alpha * x + beta)).powi(2))
960        .sum();
961    let r2 = if ss_tot > 1e-15 {
962        1.0 - ss_res / ss_tot
963    } else {
964        0.0
965    };
966
967    SubpolyVerification {
968        fitted_exponent: alpha,
969        is_subquadratic: alpha < 2.0,
970        r_squared: r2,
971    }
972}
973
974// ---------------------------------------------------------------------------
975// Tests
976// ---------------------------------------------------------------------------
977
978#[cfg(test)]
979mod tests {
980    use super::*;
981
982    fn simple_syndrome(d: u32, defect_positions: &[(u32, u32)]) -> SyndromeData {
983        let grid_w = d.saturating_sub(1).max(1);
984        let mut stabs = Vec::new();
985        for y in 0..grid_w {
986            for x in 0..grid_w {
987                let val = defect_positions.iter().any(|&(dx, dy)| dx == x && dy == y);
988                stabs.push(StabilizerMeasurement {
989                    x,
990                    y,
991                    round: 0,
992                    value: val,
993                });
994            }
995        }
996        SyndromeData {
997            stabilizers: stabs,
998            code_distance: d,
999            num_rounds: 1,
1000        }
1001    }
1002
1003    // -- HierarchicalTiledDecoder --
1004
1005    #[test]
1006    fn hierarchical_no_errors() {
1007        let dec = HierarchicalTiledDecoder::new(2, 2);
1008        let syn = simple_syndrome(5, &[]);
1009        let corr = dec.decode(&syn);
1010        assert!(corr.pauli_corrections.is_empty());
1011        assert_eq!(corr.confidence, 1.0);
1012    }
1013
1014    #[test]
1015    fn hierarchical_single_defect() {
1016        let dec = HierarchicalTiledDecoder::new(2, 2);
1017        let syn = simple_syndrome(5, &[(1, 1)]);
1018        let corr = dec.decode(&syn);
1019        assert!(!corr.pauli_corrections.is_empty());
1020    }
1021
1022    #[test]
1023    fn hierarchical_paired_defects() {
1024        let dec = HierarchicalTiledDecoder::new(2, 2);
1025        let syn = simple_syndrome(5, &[(0, 0), (1, 0)]);
1026        let corr = dec.decode(&syn);
1027        assert!(!corr.pauli_corrections.is_empty());
1028    }
1029
1030    #[test]
1031    fn hierarchical_name() {
1032        let dec = HierarchicalTiledDecoder::new(4, 3);
1033        assert_eq!(dec.name(), "HierarchicalTiledDecoder");
1034    }
1035
1036    #[test]
1037    fn hierarchical_complexity_bound() {
1038        let dec = HierarchicalTiledDecoder::new(4, 3);
1039        let cb = dec.complexity_bound(21, 0.001);
1040        assert!(cb.scaling_exponent < 2.1);
1041        assert!(cb.expected_ops > 0.0);
1042        assert!(cb.crossover_distance >= 5);
1043    }
1044
1045    #[test]
1046    fn hierarchical_trait_object() {
1047        let dec: Box<dyn SurfaceCodeDecoder> =
1048            Box::new(HierarchicalTiledDecoder::new(2, 2));
1049        let syn = simple_syndrome(3, &[(0, 0)]);
1050        let _ = dec.decode(&syn);
1051        assert_eq!(dec.name(), "HierarchicalTiledDecoder");
1052    }
1053
1054    // -- RenormalizationDecoder --
1055
1056    #[test]
1057    fn renorm_no_errors() {
1058        let dec = RenormalizationDecoder::new(2, 4);
1059        let syn = simple_syndrome(5, &[]);
1060        let corr = dec.decode(&syn);
1061        assert!(corr.pauli_corrections.is_empty());
1062    }
1063
1064    #[test]
1065    fn renorm_single_defect() {
1066        let dec = RenormalizationDecoder::new(2, 4);
1067        let syn = simple_syndrome(5, &[(2, 2)]);
1068        let corr = dec.decode(&syn);
1069        assert!(!corr.pauli_corrections.is_empty());
1070    }
1071
1072    #[test]
1073    fn renorm_paired() {
1074        let dec = RenormalizationDecoder::new(2, 3);
1075        let syn = simple_syndrome(7, &[(1, 1), (2, 1)]);
1076        let corr = dec.decode(&syn);
1077        assert!(!corr.pauli_corrections.is_empty());
1078    }
1079
1080    #[test]
1081    fn renorm_name() {
1082        let dec = RenormalizationDecoder::new(2, 3);
1083        assert_eq!(dec.name(), "RenormalizationDecoder");
1084    }
1085
1086    // -- SlidingWindowDecoder --
1087
1088    #[test]
1089    fn sliding_no_errors() {
1090        let dec = SlidingWindowDecoder::new(2);
1091        let syn = simple_syndrome(5, &[]);
1092        let corr = dec.decode(&syn);
1093        assert!(corr.pauli_corrections.is_empty());
1094    }
1095
1096    #[test]
1097    fn sliding_single_round() {
1098        let dec = SlidingWindowDecoder::new(1);
1099        let syn = simple_syndrome(5, &[(0, 0)]);
1100        let corr = dec.decode(&syn);
1101        assert!(!corr.pauli_corrections.is_empty());
1102    }
1103
1104    #[test]
1105    fn sliding_multi_round() {
1106        let dec = SlidingWindowDecoder::new(2);
1107        let stabs = vec![
1108            StabilizerMeasurement { x: 0, y: 0, round: 0, value: true },
1109            StabilizerMeasurement { x: 0, y: 0, round: 1, value: false },
1110            StabilizerMeasurement { x: 0, y: 0, round: 2, value: true },
1111        ];
1112        let syn = SyndromeData {
1113            stabilizers: stabs,
1114            code_distance: 3,
1115            num_rounds: 3,
1116        };
1117        let corr = dec.decode(&syn);
1118        // Defects at round boundaries should produce corrections.
1119        assert!(!corr.pauli_corrections.is_empty());
1120    }
1121
1122    #[test]
1123    fn sliding_name() {
1124        let dec = SlidingWindowDecoder::new(3);
1125        assert_eq!(dec.name(), "SlidingWindowDecoder");
1126    }
1127
1128    // -- ComplexityAnalyzer --
1129
1130    #[test]
1131    fn analyze_complexity_runs() {
1132        let dec = HierarchicalTiledDecoder::new(2, 2);
1133        let cb = ComplexityAnalyzer::analyze_complexity(&dec, 5, 0.001);
1134        assert!(cb.expected_ops > 0.0);
1135        assert!(cb.worst_case_ops >= cb.expected_ops);
1136    }
1137
1138    #[test]
1139    fn threshold_analysis_basic() {
1140        let tt = ComplexityAnalyzer::threshold_analysis(&[0.001], &[5]);
1141        assert!(tt.physical_threshold > 0.0);
1142        assert!(tt.logical_error_rate < 1.0);
1143        assert!(tt.suppression_exponent > 0.0);
1144    }
1145
1146    #[test]
1147    fn crossover_point_returns_valid() {
1148        let hier = HierarchicalTiledDecoder::new(2, 2);
1149        let baseline = crate::decoder::UnionFindDecoder::new(0);
1150        let cp = ComplexityAnalyzer::crossover_point(&hier, &baseline);
1151        assert!(cp >= 3);
1152    }
1153
1154    // -- DefectGraphBuilder --
1155
1156    #[test]
1157    fn defect_graph_empty() {
1158        let builder = DefectGraphBuilder::new(4);
1159        let syn = simple_syndrome(5, &[]);
1160        let edges = builder.build(&syn, 10);
1161        assert!(edges.is_empty());
1162    }
1163
1164    #[test]
1165    fn defect_graph_two_nearby() {
1166        let builder = DefectGraphBuilder::new(4);
1167        let syn = simple_syndrome(5, &[(0, 0), (1, 0)]);
1168        let edges = builder.build(&syn, 10);
1169        assert!(!edges.is_empty());
1170        assert_eq!(edges[0].weight, 1);
1171    }
1172
1173    #[test]
1174    fn defect_graph_far_apart() {
1175        let builder = DefectGraphBuilder::new(2);
1176        let syn = simple_syndrome(11, &[(0, 0), (9, 9)]);
1177        let edges = builder.build(&syn, 3);
1178        // Distance is 18 > 3, so no edge.
1179        assert!(edges.is_empty());
1180    }
1181
1182    // -- Benchmarks --
1183
1184    #[test]
1185    fn benchmark_scaling_runs() {
1186        let data = benchmark_scaling(&[3, 5, 7], 0.001);
1187        assert_eq!(data.len(), 3);
1188        for pt in &data {
1189            assert!(pt.mean_decode_ns >= 0.0);
1190        }
1191    }
1192
1193    #[test]
1194    fn verify_subpolynomial_basic() {
1195        let data = benchmark_scaling(&[3, 5, 7, 9], 0.001);
1196        let result = verify_subpolynomial(&data);
1197        // Just verify it produces a valid result.
1198        assert!(!result.fitted_exponent.is_nan());
1199    }
1200
1201    #[test]
1202    fn verify_subpolynomial_insufficient_data() {
1203        let result = verify_subpolynomial(&[]);
1204        assert!(result.fitted_exponent.is_nan());
1205        assert!(!result.is_subquadratic);
1206    }
1207}