1use std::time::Instant;
31
32use crate::decoder::{Correction, PauliType, StabilizerMeasurement, SurfaceCodeDecoder, SyndromeData};
33
34#[derive(Debug, Clone)]
40struct Defect {
41 x: u32,
42 y: u32,
43 round: u32,
44}
45
46fn extract_defects(syndrome: &SyndromeData) -> Vec<Defect> {
48 let d = syndrome.code_distance;
49 let grid_w = d.saturating_sub(1).max(1);
50 let grid_h = grid_w;
51 let nr = syndrome.num_rounds;
52 let sz = (grid_w as usize) * (grid_h as usize) * (nr as usize);
53 let mut grid = vec![false; sz];
54
55 for s in &syndrome.stabilizers {
56 if s.x < grid_w && s.y < grid_h && s.round < nr {
57 let idx = (s.round * grid_w * grid_h + s.y * grid_w + s.x) as usize;
58 if idx < grid.len() {
59 grid[idx] = s.value;
60 }
61 }
62 }
63
64 let mut defects = Vec::new();
65 for r in 0..nr {
66 for y in 0..grid_h {
67 for x in 0..grid_w {
68 let cur = grid[(r * grid_w * grid_h + y * grid_w + x) as usize];
69 let prev = if r > 0 {
70 grid[((r - 1) * grid_w * grid_h + y * grid_w + x) as usize]
71 } else {
72 false
73 };
74 if cur != prev {
75 defects.push(Defect { x, y, round: r });
76 }
77 }
78 }
79 }
80 defects
81}
82
83fn manhattan(a: &Defect, b: &Defect) -> u32 {
85 a.x.abs_diff(b.x) + a.y.abs_diff(b.y) + a.round.abs_diff(b.round)
86}
87
88fn greedy_pair_and_correct(defects: &[Defect], code_distance: u32) -> Vec<(u32, PauliType)> {
95 if defects.is_empty() {
96 return Vec::new();
97 }
98 let mut used = vec![false; defects.len()];
99 let mut corrections = Vec::new();
100
101 let mut order: Vec<usize> = (0..defects.len()).collect();
103 order.sort_by_key(|&i| (defects[i].round, defects[i].y, defects[i].x));
104
105 for &i in &order {
106 if used[i] {
107 continue;
108 }
109 let mut best_j: Option<usize> = None;
111 let mut best_dist = u32::MAX;
112 for &j in &order {
113 if j == i || used[j] {
114 continue;
115 }
116 let d = manhattan(&defects[i], &defects[j]);
117 if d < best_dist {
118 best_dist = d;
119 best_j = Some(j);
120 }
121 }
122
123 let grid_w = code_distance.saturating_sub(1).max(1);
124 let bdist = defects[i].x.min(grid_w.saturating_sub(defects[i].x + 1));
125
126 if let Some(j) = best_j {
127 if best_dist <= bdist {
128 used[i] = true;
130 used[j] = true;
131 corrections.extend(path_between(&defects[i], &defects[j], code_distance));
132 continue;
133 }
134 }
135 used[i] = true;
137 corrections.extend(path_to_boundary(&defects[i], code_distance));
138 }
139 corrections
140}
141
142fn path_between(a: &Defect, b: &Defect, d: u32) -> Vec<(u32, PauliType)> {
144 let mut out = Vec::new();
145 let (mut cx, mut cy) = (a.x as i64, a.y as i64);
146 let (tx, ty) = (b.x as i64, b.y as i64);
147 while cx != tx {
148 let step: i64 = if tx > cx { 1 } else { -1 };
149 let qx = if step > 0 { cx + 1 } else { cx };
150 out.push((cy as u32 * d + qx as u32, PauliType::X));
151 cx += step;
152 }
153 while cy != ty {
154 let step: i64 = if ty > cy { 1 } else { -1 };
155 let qy = if step > 0 { cy + 1 } else { cy };
156 out.push((qy as u32 * d + cx as u32, PauliType::Z));
157 cy += step;
158 }
159 out
160}
161
162fn path_to_boundary(defect: &Defect, d: u32) -> Vec<(u32, PauliType)> {
164 let grid_w = d.saturating_sub(1).max(1);
165 let dl = defect.x;
166 let dr = grid_w.saturating_sub(defect.x + 1);
167 let mut out = Vec::new();
168 if dl <= dr {
169 for step in 0..=defect.x {
170 out.push((defect.y * d + (defect.x - step), PauliType::X));
171 }
172 } else {
173 for step in 0..=(grid_w - defect.x - 1) {
174 out.push((defect.y * d + (defect.x + step + 1), PauliType::X));
175 }
176 }
177 out
178}
179
180fn infer_logical(corrections: &[(u32, PauliType)]) -> bool {
181 corrections.iter().filter(|(_, p)| *p == PauliType::X).count() % 2 == 1
182}
183
184pub struct HierarchicalTiledDecoder {
197 tile_size: u32,
199 num_levels: u32,
201 boundary_budget: f64,
203 error_rate_threshold: f64,
205}
206
207impl HierarchicalTiledDecoder {
208 pub fn new(tile_size: u32, num_levels: u32) -> Self {
213 let tile_size = tile_size.max(2);
214 Self {
215 tile_size,
216 num_levels: num_levels.max(1),
217 boundary_budget: 0.25,
218 error_rate_threshold: 0.01,
219 }
220 }
221
222 fn decode_tile(&self, defects: &[Defect], tile_d: u32) -> Vec<(u32, PauliType)> {
224 greedy_pair_and_correct(defects, tile_d)
225 }
226
227 fn merge_boundaries(
233 &self,
234 all_defects: &[Defect],
235 level_tile_size: u32,
236 code_distance: u32,
237 ) -> Vec<(u32, PauliType)> {
238 let ts = level_tile_size;
240 let boundary_defects: Vec<&Defect> = all_defects
241 .iter()
242 .filter(|d| {
243 let bx = d.x % ts;
244 let by = d.y % ts;
245 bx == 0 || bx == ts - 1 || by == 0 || by == ts - 1
246 })
247 .collect();
248
249 let owned: Vec<Defect> = boundary_defects.iter().map(|d| (*d).clone()).collect();
250 greedy_pair_and_correct(&owned, code_distance)
251 }
252
253 pub fn complexity_bound(&self, code_distance: u32, physical_error_rate: f64) -> ComplexityBound {
255 let d = code_distance as f64;
256 let s = self.tile_size as f64;
257 let p = physical_error_rate;
258
259 let num_tiles = (d / s).powi(2);
261 let tile_cost = s * s * s.ln().max(1.0);
263 let tile_total = num_tiles * tile_cost;
264
265 let levels = self.num_levels as f64;
267 let merge_total = levels * d * d / s;
268
269 let expected = tile_total + merge_total;
270
271 let epsilon = if d > 1.0 && s > 1.0 {
273 s.ln() / d.ln()
274 } else {
275 0.0
276 };
277 let alpha = 2.0 - epsilon;
278
279 let crossing_prob = (-0.5 * s * (1.0 - 2.0 * p).abs().ln().abs()).exp().min(1.0);
281
282 let worst_case = d * d * d.ln().max(1.0); let crossover = (s.powi(2) * levels).ceil() as u32;
286
287 ComplexityBound {
288 expected_ops: expected,
289 worst_case_ops: worst_case,
290 probability_of_worst_case: crossing_prob,
291 scaling_exponent: alpha,
292 crossover_distance: crossover.max(self.tile_size + 1),
293 }
294 }
295}
296
297impl SurfaceCodeDecoder for HierarchicalTiledDecoder {
298 fn decode(&self, syndrome: &SyndromeData) -> Correction {
299 let start = Instant::now();
300 let d = syndrome.code_distance;
301 let defects = extract_defects(syndrome);
302
303 if defects.is_empty() {
304 return Correction {
305 pauli_corrections: Vec::new(),
306 logical_outcome: false,
307 confidence: 1.0,
308 decode_time_ns: start.elapsed().as_nanos() as u64,
309 };
310 }
311
312 let grid_w = d.saturating_sub(1).max(1);
313
314 let ts = self.tile_size.min(grid_w);
316 let tiles_x = (grid_w + ts - 1) / ts;
317 let tiles_y = tiles_x;
318 let mut corrections: Vec<(u32, PauliType)> = Vec::new();
319
320 for ty in 0..tiles_y {
321 for tx in 0..tiles_x {
322 let x_lo = tx * ts;
323 let x_hi = ((tx + 1) * ts).min(grid_w);
324 let y_lo = ty * ts;
325 let y_hi = ((ty + 1) * ts).min(grid_w);
326
327 let tile_defects: Vec<Defect> = defects
328 .iter()
329 .filter(|dd| dd.x >= x_lo && dd.x < x_hi && dd.y >= y_lo && dd.y < y_hi)
330 .map(|dd| Defect {
331 x: dd.x - x_lo,
332 y: dd.y - y_lo,
333 round: dd.round,
334 })
335 .collect();
336
337 let tile_d = (x_hi - x_lo).max(y_hi - y_lo) + 1;
338 let tile_corr = self.decode_tile(&tile_defects, tile_d);
339
340 for (q, p) in tile_corr {
342 let local_y = q / tile_d;
343 let local_x = q % tile_d;
344 corrections.push(((local_y + y_lo) * d + (local_x + x_lo), p));
345 }
346 }
347 }
348
349 let mut level_ts = ts;
351 for _ in 0..self.num_levels.saturating_sub(1) {
352 level_ts = (level_ts * 2).min(grid_w);
353 let boundary_corr = self.merge_boundaries(&defects, level_ts, d);
354 corrections.extend(boundary_corr);
355 if level_ts >= grid_w {
356 break;
357 }
358 }
359
360 corrections.sort_by_key(|&(q, p)| (q, p as u8));
362 let mut deduped = Vec::new();
363 let mut i = 0;
364 while i < corrections.len() {
365 let mut cnt = 1usize;
366 while i + cnt < corrections.len() && corrections[i + cnt] == corrections[i] {
367 cnt += 1;
368 }
369 if cnt % 2 == 1 {
370 deduped.push(corrections[i]);
371 }
372 i += cnt;
373 }
374
375 let logical = infer_logical(&deduped);
376 let density = defects.len() as f64 / (d as f64 * d as f64);
377 let confidence = (1.0 - density).clamp(0.0, 1.0);
378
379 Correction {
380 pauli_corrections: deduped,
381 logical_outcome: logical,
382 confidence,
383 decode_time_ns: start.elapsed().as_nanos() as u64,
384 }
385 }
386
387 fn name(&self) -> &str {
388 "HierarchicalTiledDecoder"
389 }
390}
391
392pub struct RenormalizationDecoder {
404 coarsening_factor: u32,
406 max_levels: u32,
408}
409
410impl RenormalizationDecoder {
411 pub fn new(coarsening_factor: u32, max_levels: u32) -> Self {
412 Self {
413 coarsening_factor: coarsening_factor.max(2),
414 max_levels: max_levels.max(1),
415 }
416 }
417
418 fn decode_scale(
421 &self,
422 defects: &[Defect],
423 block_size: u32,
424 code_distance: u32,
425 ) -> (Vec<(u32, PauliType)>, Vec<Defect>) {
426 if defects.is_empty() {
427 return (Vec::new(), Vec::new());
428 }
429
430 let grid_w = code_distance.saturating_sub(1).max(1);
431 let nb = (grid_w + block_size - 1) / block_size;
432 let mut corrections = Vec::new();
433 let mut residual = Vec::new();
434
435 for by in 0..nb {
436 for bx in 0..nb {
437 let x_lo = bx * block_size;
438 let x_hi = ((bx + 1) * block_size).min(grid_w);
439 let y_lo = by * block_size;
440 let y_hi = ((by + 1) * block_size).min(grid_w);
441
442 let block: Vec<&Defect> = defects
443 .iter()
444 .filter(|dd| dd.x >= x_lo && dd.x < x_hi && dd.y >= y_lo && dd.y < y_hi)
445 .collect();
446
447 if block.is_empty() {
448 continue;
449 }
450
451 let mut interior = Vec::new();
453 let mut boundary = Vec::new();
454 for dd in &block {
455 if dd.x == x_lo || dd.x + 1 == x_hi || dd.y == y_lo || dd.y + 1 == y_hi {
456 boundary.push((*dd).clone());
457 } else {
458 interior.push((*dd).clone());
459 }
460 }
461
462 if interior.len() >= 2 {
464 corrections.extend(greedy_pair_and_correct(&interior, code_distance));
465 } else {
466 boundary.extend(interior);
468 }
469
470 residual.extend(boundary);
472 }
473 }
474 (corrections, residual)
475 }
476}
477
478impl SurfaceCodeDecoder for RenormalizationDecoder {
479 fn decode(&self, syndrome: &SyndromeData) -> Correction {
480 let start = Instant::now();
481 let d = syndrome.code_distance;
482 let mut defects = extract_defects(syndrome);
483
484 if defects.is_empty() {
485 return Correction {
486 pauli_corrections: Vec::new(),
487 logical_outcome: false,
488 confidence: 1.0,
489 decode_time_ns: start.elapsed().as_nanos() as u64,
490 };
491 }
492
493 let grid_w = d.saturating_sub(1).max(1);
494 let mut all_corrections: Vec<(u32, PauliType)> = Vec::new();
495 let mut block_size = self.coarsening_factor;
496
497 for _ in 0..self.max_levels {
498 if block_size > grid_w || defects.is_empty() {
499 break;
500 }
501 let (corr, residual) = self.decode_scale(&defects, block_size, d);
502 all_corrections.extend(corr);
503 defects = residual;
504 block_size *= self.coarsening_factor;
505 }
506
507 if !defects.is_empty() {
509 all_corrections.extend(greedy_pair_and_correct(&defects, d));
510 }
511
512 let logical = infer_logical(&all_corrections);
513 let density = extract_defects(syndrome).len() as f64 / (d as f64 * d as f64);
514
515 Correction {
516 pauli_corrections: all_corrections,
517 logical_outcome: logical,
518 confidence: (1.0 - density).clamp(0.0, 1.0),
519 decode_time_ns: start.elapsed().as_nanos() as u64,
520 }
521 }
522
523 fn name(&self) -> &str {
524 "RenormalizationDecoder"
525 }
526}
527
528pub struct SlidingWindowDecoder {
538 window_size: u32,
539}
540
541impl SlidingWindowDecoder {
542 pub fn new(window_size: u32) -> Self {
543 Self {
544 window_size: window_size.max(1),
545 }
546 }
547}
548
549impl SurfaceCodeDecoder for SlidingWindowDecoder {
550 fn decode(&self, syndrome: &SyndromeData) -> Correction {
551 let start = Instant::now();
552 let d = syndrome.code_distance;
553 let nr = syndrome.num_rounds;
554
555 if nr == 0 {
556 return Correction {
557 pauli_corrections: Vec::new(),
558 logical_outcome: false,
559 confidence: 1.0,
560 decode_time_ns: start.elapsed().as_nanos() as u64,
561 };
562 }
563
564 let mut all_corrections: Vec<(u32, PauliType)> = Vec::new();
565 let mut window_start: u32 = 0;
566
567 while window_start < nr {
568 let window_end = (window_start + self.window_size).min(nr);
569
570 let window_stabs: Vec<StabilizerMeasurement> = syndrome
572 .stabilizers
573 .iter()
574 .filter(|s| s.round >= window_start && s.round < window_end)
575 .map(|s| StabilizerMeasurement {
576 x: s.x,
577 y: s.y,
578 round: s.round - window_start,
579 value: s.value,
580 })
581 .collect();
582
583 let window_syndrome = SyndromeData {
584 stabilizers: window_stabs,
585 code_distance: d,
586 num_rounds: window_end - window_start,
587 };
588
589 let defects = extract_defects(&window_syndrome);
590 let corr = greedy_pair_and_correct(&defects, d);
591 all_corrections.extend(corr);
592
593 window_start = window_end;
594 }
595
596 let logical = infer_logical(&all_corrections);
597 let total_defects = extract_defects(syndrome).len();
598 let density = total_defects as f64 / (d as f64 * d as f64 * nr.max(1) as f64);
599
600 Correction {
601 pauli_corrections: all_corrections,
602 logical_outcome: logical,
603 confidence: (1.0 - density).clamp(0.0, 1.0),
604 decode_time_ns: start.elapsed().as_nanos() as u64,
605 }
606 }
607
608 fn name(&self) -> &str {
609 "SlidingWindowDecoder"
610 }
611}
612
613#[derive(Debug, Clone)]
619pub struct ComplexityBound {
620 pub expected_ops: f64,
622 pub worst_case_ops: f64,
624 pub probability_of_worst_case: f64,
626 pub scaling_exponent: f64,
628 pub crossover_distance: u32,
630}
631
632#[derive(Debug, Clone)]
634pub struct ThresholdTheorem {
635 pub physical_threshold: f64,
637 pub logical_error_rate: f64,
639 pub suppression_exponent: f64,
641}
642
643pub struct ComplexityAnalyzer;
645
646impl ComplexityAnalyzer {
647 pub fn analyze_complexity(
650 decoder: &dyn SurfaceCodeDecoder,
651 distance: u32,
652 error_rate: f64,
653 ) -> ComplexityBound {
654 let trials = 5u32;
655 let mut total_ns = 0u64;
656
657 for seed in 0..trials {
658 let syndrome = Self::synthetic_syndrome(distance, error_rate, seed);
659 let corr = decoder.decode(&syndrome);
660 total_ns += corr.decode_time_ns;
661 }
662
663 let avg_ns = total_ns as f64 / trials as f64;
664 let d = distance as f64;
665 let alpha = if d > 1.0 {
667 avg_ns.ln() / d.ln()
668 } else {
669 2.0
670 };
671
672 ComplexityBound {
673 expected_ops: avg_ns,
674 worst_case_ops: avg_ns * 5.0,
675 probability_of_worst_case: error_rate.powf(distance as f64 / 2.0),
676 scaling_exponent: alpha.min(3.0),
677 crossover_distance: distance,
678 }
679 }
680
681 pub fn threshold_analysis(
683 error_rates: &[f64],
684 distances: &[u32],
685 ) -> ThresholdTheorem {
686 let p_th = 0.01;
688
689 let p = error_rates.first().copied().unwrap_or(0.001);
691 let d = distances.first().copied().unwrap_or(3) as f64;
692
693 let ratio = p / p_th;
694 let suppression = d / 2.0;
695 let p_l = ratio.powf(suppression);
696
697 ThresholdTheorem {
698 physical_threshold: p_th,
699 logical_error_rate: p_l.min(1.0),
700 suppression_exponent: suppression,
701 }
702 }
703
704 pub fn crossover_point(
707 hierarchical: &HierarchicalTiledDecoder,
708 baseline: &dyn SurfaceCodeDecoder,
709 ) -> u32 {
710 let error_rate = 0.001;
711 for d in (3..=101).step_by(2) {
712 let syn = Self::synthetic_syndrome(d, error_rate, 42);
713 let t_hier = {
714 let c = hierarchical.decode(&syn);
715 c.decode_time_ns
716 };
717 let t_base = {
718 let c = baseline.decode(&syn);
719 c.decode_time_ns
720 };
721 if t_hier < t_base {
722 return d;
723 }
724 }
725 101
727 }
728
729 fn synthetic_syndrome(distance: u32, error_rate: f64, seed: u32) -> SyndromeData {
731 let grid_w = distance.saturating_sub(1).max(1);
732 let mut stabs = Vec::with_capacity((grid_w * grid_w) as usize);
733 let mut hash: u64 = 0x517c_c1b7_2722_0a95 ^ (seed as u64);
734
735 for y in 0..grid_w {
736 for x in 0..grid_w {
737 hash = hash.wrapping_mul(6364136223846793005).wrapping_add(1442695040888963407);
739 let r = (hash >> 33) as f64 / (u32::MAX as f64);
740 stabs.push(StabilizerMeasurement {
741 x,
742 y,
743 round: 0,
744 value: r < error_rate,
745 });
746 }
747 }
748
749 SyndromeData {
750 stabilizers: stabs,
751 code_distance: distance,
752 num_rounds: 1,
753 }
754 }
755}
756
757pub struct DefectGraphBuilder {
768 cell_size: u32,
769}
770
771#[derive(Debug, Clone)]
773pub struct DefectEdge {
774 pub src: usize,
775 pub dst: usize,
776 pub weight: u32,
777}
778
779impl DefectGraphBuilder {
780 pub fn new(cell_size: u32) -> Self {
781 Self {
782 cell_size: cell_size.max(1),
783 }
784 }
785
786 pub fn build(&self, syndrome: &SyndromeData, max_radius: u32) -> Vec<DefectEdge> {
791 let defects = extract_defects(syndrome);
792 if defects.len() < 2 {
793 return Vec::new();
794 }
795
796 let cs = self.cell_size;
798 let mut cells: std::collections::HashMap<(u32, u32, u32), Vec<usize>> =
799 std::collections::HashMap::new();
800
801 for (i, d) in defects.iter().enumerate() {
802 let key = (d.x / cs, d.y / cs, d.round / cs.max(1));
803 cells.entry(key).or_default().push(i);
804 }
805
806 let mut edges = Vec::new();
807 let search_radius = (max_radius + cs - 1) / cs;
808
809 for (i, di) in defects.iter().enumerate() {
810 let cx = di.x / cs;
811 let cy = di.y / cs;
812 let cr = di.round / cs.max(1);
813
814 for dz in 0..=search_radius {
815 for dy in 0..=search_radius {
816 for dx in 0..=search_radius {
817 for &sx in &[0i64, -(dx as i64), dx as i64] {
819 for &sy in &[0i64, -(dy as i64), dy as i64] {
820 for &sr in &[0i64, -(dz as i64), dz as i64] {
821 let nx = cx as i64 + sx;
822 let ny = cy as i64 + sy;
823 let nr = cr as i64 + sr;
824 if nx < 0 || ny < 0 || nr < 0 {
825 continue;
826 }
827 let key = (nx as u32, ny as u32, nr as u32);
828 if let Some(bucket) = cells.get(&key) {
829 for &j in bucket {
830 if j <= i {
831 continue;
832 }
833 let w = manhattan(di, &defects[j]);
834 if w <= max_radius {
835 edges.push(DefectEdge {
836 src: i,
837 dst: j,
838 weight: w,
839 });
840 }
841 }
842 }
843 }
844 }
845 }
846 }
847 }
848 }
849 }
850
851 edges.sort_by_key(|e| (e.src, e.dst));
853 edges.dedup_by_key(|e| (e.src, e.dst));
854 edges
855 }
856}
857
858#[derive(Debug, Clone)]
864pub struct ScalingDataPoint {
865 pub distance: u32,
866 pub mean_decode_ns: f64,
867 pub std_decode_ns: f64,
868 pub num_samples: u32,
869}
870
871#[derive(Debug, Clone)]
873pub struct SubpolyVerification {
874 pub fitted_exponent: f64,
876 pub is_subquadratic: bool,
878 pub r_squared: f64,
880}
881
882pub fn benchmark_scaling(
884 distances: &[u32],
885 error_rate: f64,
886) -> Vec<ScalingDataPoint> {
887 let samples_per_d = 20u32;
888 let decoder = HierarchicalTiledDecoder::new(4, 3);
889 let mut data = Vec::with_capacity(distances.len());
890
891 for &d in distances {
892 let mut times = Vec::with_capacity(samples_per_d as usize);
893 for seed in 0..samples_per_d {
894 let syn = ComplexityAnalyzer::synthetic_syndrome(d, error_rate, seed);
895 let corr = decoder.decode(&syn);
896 times.push(corr.decode_time_ns as f64);
897 }
898 let n = times.len() as f64;
899 let mean = times.iter().sum::<f64>() / n;
900 let var = times.iter().map(|t| (t - mean).powi(2)).sum::<f64>() / n;
901 data.push(ScalingDataPoint {
902 distance: d,
903 mean_decode_ns: mean,
904 std_decode_ns: var.sqrt(),
905 num_samples: samples_per_d,
906 });
907 }
908 data
909}
910
911pub fn verify_subpolynomial(data: &[ScalingDataPoint]) -> SubpolyVerification {
916 if data.len() < 2 {
917 return SubpolyVerification {
918 fitted_exponent: f64::NAN,
919 is_subquadratic: false,
920 r_squared: 0.0,
921 };
922 }
923
924 let points: Vec<(f64, f64)> = data
926 .iter()
927 .filter(|p| p.distance > 1 && p.mean_decode_ns > 0.0)
928 .map(|p| ((p.distance as f64).ln(), p.mean_decode_ns.ln()))
929 .collect();
930
931 if points.len() < 2 {
932 return SubpolyVerification {
933 fitted_exponent: f64::NAN,
934 is_subquadratic: false,
935 r_squared: 0.0,
936 };
937 }
938
939 let n = points.len() as f64;
940 let sx: f64 = points.iter().map(|(x, _)| x).sum();
941 let sy: f64 = points.iter().map(|(_, y)| y).sum();
942 let sxx: f64 = points.iter().map(|(x, _)| x * x).sum();
943 let sxy: f64 = points.iter().map(|(x, y)| x * y).sum();
944
945 let denom = n * sxx - sx * sx;
946 let alpha = if denom.abs() > 1e-15 {
947 (n * sxy - sx * sy) / denom
948 } else {
949 f64::NAN
950 };
951
952 let beta = (sy - alpha * sx) / n;
953
954 let y_mean = sy / n;
956 let ss_tot: f64 = points.iter().map(|(_, y)| (y - y_mean).powi(2)).sum();
957 let ss_res: f64 = points
958 .iter()
959 .map(|(x, y)| (y - (alpha * x + beta)).powi(2))
960 .sum();
961 let r2 = if ss_tot > 1e-15 {
962 1.0 - ss_res / ss_tot
963 } else {
964 0.0
965 };
966
967 SubpolyVerification {
968 fitted_exponent: alpha,
969 is_subquadratic: alpha < 2.0,
970 r_squared: r2,
971 }
972}
973
974#[cfg(test)]
979mod tests {
980 use super::*;
981
982 fn simple_syndrome(d: u32, defect_positions: &[(u32, u32)]) -> SyndromeData {
983 let grid_w = d.saturating_sub(1).max(1);
984 let mut stabs = Vec::new();
985 for y in 0..grid_w {
986 for x in 0..grid_w {
987 let val = defect_positions.iter().any(|&(dx, dy)| dx == x && dy == y);
988 stabs.push(StabilizerMeasurement {
989 x,
990 y,
991 round: 0,
992 value: val,
993 });
994 }
995 }
996 SyndromeData {
997 stabilizers: stabs,
998 code_distance: d,
999 num_rounds: 1,
1000 }
1001 }
1002
1003 #[test]
1006 fn hierarchical_no_errors() {
1007 let dec = HierarchicalTiledDecoder::new(2, 2);
1008 let syn = simple_syndrome(5, &[]);
1009 let corr = dec.decode(&syn);
1010 assert!(corr.pauli_corrections.is_empty());
1011 assert_eq!(corr.confidence, 1.0);
1012 }
1013
1014 #[test]
1015 fn hierarchical_single_defect() {
1016 let dec = HierarchicalTiledDecoder::new(2, 2);
1017 let syn = simple_syndrome(5, &[(1, 1)]);
1018 let corr = dec.decode(&syn);
1019 assert!(!corr.pauli_corrections.is_empty());
1020 }
1021
1022 #[test]
1023 fn hierarchical_paired_defects() {
1024 let dec = HierarchicalTiledDecoder::new(2, 2);
1025 let syn = simple_syndrome(5, &[(0, 0), (1, 0)]);
1026 let corr = dec.decode(&syn);
1027 assert!(!corr.pauli_corrections.is_empty());
1028 }
1029
1030 #[test]
1031 fn hierarchical_name() {
1032 let dec = HierarchicalTiledDecoder::new(4, 3);
1033 assert_eq!(dec.name(), "HierarchicalTiledDecoder");
1034 }
1035
1036 #[test]
1037 fn hierarchical_complexity_bound() {
1038 let dec = HierarchicalTiledDecoder::new(4, 3);
1039 let cb = dec.complexity_bound(21, 0.001);
1040 assert!(cb.scaling_exponent < 2.1);
1041 assert!(cb.expected_ops > 0.0);
1042 assert!(cb.crossover_distance >= 5);
1043 }
1044
1045 #[test]
1046 fn hierarchical_trait_object() {
1047 let dec: Box<dyn SurfaceCodeDecoder> =
1048 Box::new(HierarchicalTiledDecoder::new(2, 2));
1049 let syn = simple_syndrome(3, &[(0, 0)]);
1050 let _ = dec.decode(&syn);
1051 assert_eq!(dec.name(), "HierarchicalTiledDecoder");
1052 }
1053
1054 #[test]
1057 fn renorm_no_errors() {
1058 let dec = RenormalizationDecoder::new(2, 4);
1059 let syn = simple_syndrome(5, &[]);
1060 let corr = dec.decode(&syn);
1061 assert!(corr.pauli_corrections.is_empty());
1062 }
1063
1064 #[test]
1065 fn renorm_single_defect() {
1066 let dec = RenormalizationDecoder::new(2, 4);
1067 let syn = simple_syndrome(5, &[(2, 2)]);
1068 let corr = dec.decode(&syn);
1069 assert!(!corr.pauli_corrections.is_empty());
1070 }
1071
1072 #[test]
1073 fn renorm_paired() {
1074 let dec = RenormalizationDecoder::new(2, 3);
1075 let syn = simple_syndrome(7, &[(1, 1), (2, 1)]);
1076 let corr = dec.decode(&syn);
1077 assert!(!corr.pauli_corrections.is_empty());
1078 }
1079
1080 #[test]
1081 fn renorm_name() {
1082 let dec = RenormalizationDecoder::new(2, 3);
1083 assert_eq!(dec.name(), "RenormalizationDecoder");
1084 }
1085
1086 #[test]
1089 fn sliding_no_errors() {
1090 let dec = SlidingWindowDecoder::new(2);
1091 let syn = simple_syndrome(5, &[]);
1092 let corr = dec.decode(&syn);
1093 assert!(corr.pauli_corrections.is_empty());
1094 }
1095
1096 #[test]
1097 fn sliding_single_round() {
1098 let dec = SlidingWindowDecoder::new(1);
1099 let syn = simple_syndrome(5, &[(0, 0)]);
1100 let corr = dec.decode(&syn);
1101 assert!(!corr.pauli_corrections.is_empty());
1102 }
1103
1104 #[test]
1105 fn sliding_multi_round() {
1106 let dec = SlidingWindowDecoder::new(2);
1107 let stabs = vec![
1108 StabilizerMeasurement { x: 0, y: 0, round: 0, value: true },
1109 StabilizerMeasurement { x: 0, y: 0, round: 1, value: false },
1110 StabilizerMeasurement { x: 0, y: 0, round: 2, value: true },
1111 ];
1112 let syn = SyndromeData {
1113 stabilizers: stabs,
1114 code_distance: 3,
1115 num_rounds: 3,
1116 };
1117 let corr = dec.decode(&syn);
1118 assert!(!corr.pauli_corrections.is_empty());
1120 }
1121
1122 #[test]
1123 fn sliding_name() {
1124 let dec = SlidingWindowDecoder::new(3);
1125 assert_eq!(dec.name(), "SlidingWindowDecoder");
1126 }
1127
1128 #[test]
1131 fn analyze_complexity_runs() {
1132 let dec = HierarchicalTiledDecoder::new(2, 2);
1133 let cb = ComplexityAnalyzer::analyze_complexity(&dec, 5, 0.001);
1134 assert!(cb.expected_ops > 0.0);
1135 assert!(cb.worst_case_ops >= cb.expected_ops);
1136 }
1137
1138 #[test]
1139 fn threshold_analysis_basic() {
1140 let tt = ComplexityAnalyzer::threshold_analysis(&[0.001], &[5]);
1141 assert!(tt.physical_threshold > 0.0);
1142 assert!(tt.logical_error_rate < 1.0);
1143 assert!(tt.suppression_exponent > 0.0);
1144 }
1145
1146 #[test]
1147 fn crossover_point_returns_valid() {
1148 let hier = HierarchicalTiledDecoder::new(2, 2);
1149 let baseline = crate::decoder::UnionFindDecoder::new(0);
1150 let cp = ComplexityAnalyzer::crossover_point(&hier, &baseline);
1151 assert!(cp >= 3);
1152 }
1153
1154 #[test]
1157 fn defect_graph_empty() {
1158 let builder = DefectGraphBuilder::new(4);
1159 let syn = simple_syndrome(5, &[]);
1160 let edges = builder.build(&syn, 10);
1161 assert!(edges.is_empty());
1162 }
1163
1164 #[test]
1165 fn defect_graph_two_nearby() {
1166 let builder = DefectGraphBuilder::new(4);
1167 let syn = simple_syndrome(5, &[(0, 0), (1, 0)]);
1168 let edges = builder.build(&syn, 10);
1169 assert!(!edges.is_empty());
1170 assert_eq!(edges[0].weight, 1);
1171 }
1172
1173 #[test]
1174 fn defect_graph_far_apart() {
1175 let builder = DefectGraphBuilder::new(2);
1176 let syn = simple_syndrome(11, &[(0, 0), (9, 9)]);
1177 let edges = builder.build(&syn, 3);
1178 assert!(edges.is_empty());
1180 }
1181
1182 #[test]
1185 fn benchmark_scaling_runs() {
1186 let data = benchmark_scaling(&[3, 5, 7], 0.001);
1187 assert_eq!(data.len(), 3);
1188 for pt in &data {
1189 assert!(pt.mean_decode_ns >= 0.0);
1190 }
1191 }
1192
1193 #[test]
1194 fn verify_subpolynomial_basic() {
1195 let data = benchmark_scaling(&[3, 5, 7, 9], 0.001);
1196 let result = verify_subpolynomial(&data);
1197 assert!(!result.fitted_exponent.is_nan());
1199 }
1200
1201 #[test]
1202 fn verify_subpolynomial_insufficient_data() {
1203 let result = verify_subpolynomial(&[]);
1204 assert!(result.fitted_exponent.is_nan());
1205 assert!(!result.is_subquadratic);
1206 }
1207}