quantrs2_device/process_tomography/analysis/
structural.rs1use scirs2_core::ndarray::{Array1, Array2, Array4};
4use scirs2_core::Complex64;
5use std::collections::HashMap;
6
7use super::super::core::SciRS2ProcessTomographer;
8use super::super::results::*;
9use crate::DeviceResult;
10
11#[cfg(feature = "scirs2")]
13use scirs2_linalg::{eig, svd};
14
15#[cfg(not(feature = "scirs2"))]
16use super::super::fallback::{eig, svd};
17
18impl SciRS2ProcessTomographer {
19 pub(crate) fn perform_kraus_decomposition(
21 &self,
22 process_matrix: &Array4<Complex64>,
23 ) -> DeviceResult<KrausDecomposition> {
24 let choi_matrix = self.convert_to_choi_matrix(process_matrix)?;
25
26 #[cfg(feature = "scirs2")]
27 {
28 let real_choi = choi_matrix.mapv(|x| x.re);
30
31 if let Ok((u, s, vt)) = svd(&real_choi.view(), true, None) {
32 let mut kraus_operators = Vec::new();
33 let tolerance = 1e-12;
34 let mut rank = 0;
35
36 for (i, &singular_value) in s.iter().enumerate() {
38 if singular_value > tolerance {
39 rank += 1;
40
41 let sqrt_s = singular_value.sqrt();
43 let dim = (choi_matrix.dim().0 as f64).sqrt() as usize;
44 let mut kraus_op = Array2::zeros((dim, dim));
45
46 for j in 0..dim.min(u.ncols()) {
48 for k in 0..dim.min(vt.nrows()) {
49 if j < dim && k < dim {
50 kraus_op[[j, k]] =
51 Complex64::new(sqrt_s * u[[j, i]] * vt[[i, k]], 0.0);
52 }
53 }
54 }
55
56 kraus_operators.push(kraus_op);
57 }
58 }
59
60 let decomposition_fidelity =
62 calculate_kraus_fidelity(&kraus_operators, process_matrix)?;
63
64 return Ok(KrausDecomposition {
65 kraus_operators,
66 decomposition_fidelity,
67 rank,
68 });
69 }
70 }
71
72 let dim = (process_matrix.dim().0 as f64).sqrt() as usize;
74 let identity = Array2::eye(dim);
75
76 Ok(KrausDecomposition {
77 kraus_operators: vec![identity],
78 decomposition_fidelity: 0.9,
79 rank: 1,
80 })
81 }
82
83 pub(crate) fn analyze_noise_decomposition(
85 &self,
86 process_matrix: &Array4<Complex64>,
87 ) -> DeviceResult<NoiseDecomposition> {
88 let dim = process_matrix.dim().0;
89
90 let mut coherent_error = Array2::zeros((dim, dim));
92 let mut incoherent_errors = HashMap::new();
93
94 for i in 0..dim {
96 for j in 0..dim {
97 if i != j {
98 let mut coherent_sum = Complex64::new(0.0, 0.0);
100 for k in 0..dim {
101 for l in 0..dim {
102 coherent_sum += process_matrix[[i, j, k, l]];
103 }
104 }
105 coherent_error[[i, j]] = coherent_sum / (dim * dim) as f64;
106 }
107 }
108 }
109
110 let pauli_names = ["dephasing", "bit_flip", "phase_flip", "depolarizing"];
112 for (idx, name) in pauli_names.iter().enumerate() {
113 let error_rate = 0.01 * (idx + 1) as f64; incoherent_errors.insert(name.to_string(), error_rate);
116 }
117
118 let coherent_strength = coherent_error
120 .iter()
121 .map(|x| x.norm_sqr())
122 .sum::<f64>()
123 .sqrt();
124 let incoherent_strength: f64 = incoherent_errors.values().sum();
125 let total_error_strength = coherent_strength + incoherent_strength;
126
127 Ok(NoiseDecomposition {
128 coherent_error,
129 incoherent_errors,
130 total_error_strength,
131 })
132 }
133
134 pub(crate) fn analyze_coherence(
136 &self,
137 process_matrix: &Array4<Complex64>,
138 ) -> DeviceResult<CoherenceAnalysis> {
139 let dim = process_matrix.dim().0;
140 let mut coherence_measures = HashMap::new();
141 let mut decoherence_times = HashMap::new();
142
143 let l1_coherence = self.calculate_l1_coherence(process_matrix);
145 let relative_entropy_coherence = self.calculate_relative_entropy_coherence(process_matrix);
146 let robustness_coherence = self.calculate_robustness_coherence(process_matrix);
147
148 coherence_measures.insert("l1_coherence".to_string(), l1_coherence);
149 coherence_measures.insert("relative_entropy".to_string(), relative_entropy_coherence);
150 coherence_measures.insert("robustness".to_string(), robustness_coherence);
151
152 decoherence_times.insert("t1".to_string(), 50.0); decoherence_times.insert("t2".to_string(), 25.0); decoherence_times.insert("t2_echo".to_string(), 75.0); let coherence_matrix = self.construct_coherence_matrix(process_matrix)?;
159
160 Ok(CoherenceAnalysis {
161 coherence_measures,
162 decoherence_times,
163 coherence_matrix,
164 })
165 }
166
167 pub(crate) fn analyze_symmetries(
169 &self,
170 process_matrix: &Array4<Complex64>,
171 ) -> DeviceResult<SymmetryAnalysis> {
172 let mut symmetries = Vec::new();
173 let mut symmetry_violations = HashMap::new();
174 let mut preservation_scores = HashMap::new();
175
176 let hermiticity_preservation = self.check_hermiticity_preservation(process_matrix);
178 let trace_preservation = self.check_trace_preservation(process_matrix);
179 let unitarity_preservation = self.check_unitarity_preservation(process_matrix);
180
181 if hermiticity_preservation > 0.9 {
182 symmetries.push("hermiticity_preserving".to_string());
183 }
184 if trace_preservation > 0.9 {
185 symmetries.push("trace_preserving".to_string());
186 }
187 if unitarity_preservation > 0.9 {
188 symmetries.push("unitary".to_string());
189 }
190
191 symmetry_violations.insert("hermiticity".to_string(), 1.0 - hermiticity_preservation);
192 symmetry_violations.insert("trace".to_string(), 1.0 - trace_preservation);
193 symmetry_violations.insert("unitarity".to_string(), 1.0 - unitarity_preservation);
194
195 preservation_scores.insert("hermiticity".to_string(), hermiticity_preservation);
196 preservation_scores.insert("trace".to_string(), trace_preservation);
197 preservation_scores.insert("unitarity".to_string(), unitarity_preservation);
198
199 Ok(SymmetryAnalysis {
200 symmetries,
201 symmetry_violations,
202 preservation_scores,
203 })
204 }
205
206 pub(crate) fn construct_process_graph(
208 &self,
209 process_matrix: &Array4<Complex64>,
210 ) -> DeviceResult<ProcessGraph> {
211 let dim = process_matrix.dim().0;
212 let graph_size = dim * dim; let mut adjacency_matrix = Array2::zeros((graph_size, graph_size));
216
217 for i in 0..dim {
218 for j in 0..dim {
219 for k in 0..dim {
220 for l in 0..dim {
221 let node1 = i * dim + j;
222 let node2 = k * dim + l;
223
224 if node1 < graph_size && node2 < graph_size {
225 let coupling_strength = process_matrix[[i, j, k, l]].norm();
226 adjacency_matrix[[node1, node2]] = coupling_strength;
227 }
228 }
229 }
230 }
231 }
232
233 let mut node_properties = Vec::new();
235 for node_idx in 0..graph_size {
236 let strength = adjacency_matrix.row(node_idx).sum();
237 let clustering_coefficient =
238 self.calculate_clustering_coefficient(&adjacency_matrix, node_idx);
239 let betweenness_centrality =
240 self.calculate_betweenness_centrality(&adjacency_matrix, node_idx);
241
242 node_properties.push(NodeProperties {
243 index: node_idx,
244 strength,
245 clustering_coefficient,
246 betweenness_centrality,
247 });
248 }
249
250 let num_nodes = graph_size;
252 let num_edges = adjacency_matrix.iter().filter(|&&x| x > 1e-12).count() / 2; let density = 2.0 * num_edges as f64 / (num_nodes * (num_nodes - 1)) as f64;
254 let average_clustering = node_properties
255 .iter()
256 .map(|n| n.clustering_coefficient)
257 .sum::<f64>()
258 / num_nodes as f64;
259 let average_path_length = self.calculate_average_path_length(&adjacency_matrix);
260
261 let graph_metrics = GraphMetrics {
262 num_nodes,
263 num_edges,
264 density,
265 average_clustering,
266 average_path_length,
267 };
268
269 Ok(ProcessGraph {
270 adjacency_matrix,
271 node_properties,
272 graph_metrics,
273 })
274 }
275
276 fn calculate_l1_coherence(&self, process_matrix: &Array4<Complex64>) -> f64 {
278 let dim = process_matrix.dim().0;
279 let mut l1_sum = 0.0;
280
281 for i in 0..dim {
282 for j in 0..dim {
283 for k in 0..dim {
284 for l in 0..dim {
285 if i != k || j != l {
286 l1_sum += process_matrix[[i, j, k, l]].norm();
287 }
288 }
289 }
290 }
291 }
292
293 l1_sum
294 }
295
296 fn calculate_relative_entropy_coherence(&self, process_matrix: &Array4<Complex64>) -> f64 {
298 let dim = process_matrix.dim().0;
300 let mut entropy = 0.0;
301
302 for i in 0..dim {
303 for j in 0..dim {
304 for k in 0..dim {
305 for l in 0..dim {
306 let prob = process_matrix[[i, j, k, l]].norm_sqr();
307 if prob > 1e-12 {
308 entropy -= prob * prob.ln();
309 }
310 }
311 }
312 }
313 }
314
315 entropy
316 }
317
318 fn calculate_robustness_coherence(&self, process_matrix: &Array4<Complex64>) -> f64 {
320 let dim = process_matrix.dim().0;
322 let mut min_distance = f64::INFINITY;
323
324 for i in 0..dim {
326 for j in 0..dim {
327 for k in 0..dim {
328 for l in 0..dim {
329 if i != k || j != l {
330 let distance = process_matrix[[i, j, k, l]].norm();
331 min_distance = min_distance.min(distance);
332 }
333 }
334 }
335 }
336 }
337
338 min_distance
339 }
340
341 fn construct_coherence_matrix(
343 &self,
344 process_matrix: &Array4<Complex64>,
345 ) -> DeviceResult<Array2<f64>> {
346 let dim = process_matrix.dim().0;
347 let coherence_dim = dim * dim;
348 let mut coherence_matrix = Array2::zeros((coherence_dim, coherence_dim));
349
350 for i in 0..dim {
352 for j in 0..dim {
353 for k in 0..dim {
354 for l in 0..dim {
355 let row = i * dim + j;
356 let col = k * dim + l;
357
358 if row < coherence_dim && col < coherence_dim && (i != k || j != l) {
359 coherence_matrix[[row, col]] = process_matrix[[i, j, k, l]].norm();
360 }
361 }
362 }
363 }
364 }
365
366 Ok(coherence_matrix)
367 }
368
369 fn check_hermiticity_preservation(&self, process_matrix: &Array4<Complex64>) -> f64 {
371 let dim = process_matrix.dim().0;
372 let mut deviation = 0.0;
373 let mut total_elements = 0;
374
375 for i in 0..dim {
376 for j in 0..dim {
377 for k in 0..dim {
378 for l in 0..dim {
379 let element1 = process_matrix[[i, j, k, l]];
381 let element2 = process_matrix[[k, l, i, j]].conj();
382 deviation += (element1 - element2).norm();
383 total_elements += 1;
384 }
385 }
386 }
387 }
388
389 1.0 - (deviation / total_elements as f64).min(1.0)
390 }
391
392 fn check_trace_preservation(&self, process_matrix: &Array4<Complex64>) -> f64 {
394 let dim = process_matrix.dim().0;
395 let mut trace = Complex64::new(0.0, 0.0);
396
397 for i in 0..dim {
398 for j in 0..dim {
399 trace += process_matrix[[i, j, i, j]];
400 }
401 }
402
403 let trace_deviation = (trace.re - 1.0).abs() + trace.im.abs();
404 1.0 - trace_deviation.min(1.0)
405 }
406
407 fn check_unitarity_preservation(&self, process_matrix: &Array4<Complex64>) -> f64 {
409 let dim = process_matrix.dim().0;
411 let mut unitarity_measure = 0.0;
412
413 for i in 0..dim {
415 for j in 0..dim {
416 let mut diagonal_sum = Complex64::new(0.0, 0.0);
417 for k in 0..dim {
418 diagonal_sum += process_matrix[[i, i, k, k]];
419 }
420 unitarity_measure += (diagonal_sum.re - 1.0).abs();
421 }
422 }
423
424 1.0 - (unitarity_measure / (dim * dim) as f64).min(1.0)
425 }
426
427 fn calculate_clustering_coefficient(
429 &self,
430 adjacency_matrix: &Array2<f64>,
431 node_idx: usize,
432 ) -> f64 {
433 let num_nodes = adjacency_matrix.nrows();
434 let threshold = 1e-12;
435
436 let neighbors: Vec<usize> = (0..num_nodes)
438 .filter(|&i| i != node_idx && adjacency_matrix[[node_idx, i]] > threshold)
439 .collect();
440
441 if neighbors.len() < 2 {
442 return 0.0;
443 }
444
445 let mut triangles = 0;
447 for i in 0..neighbors.len() {
448 for j in (i + 1)..neighbors.len() {
449 if adjacency_matrix[[neighbors[i], neighbors[j]]] > threshold {
450 triangles += 1;
451 }
452 }
453 }
454
455 let possible_triangles = neighbors.len() * (neighbors.len() - 1) / 2;
456 if possible_triangles > 0 {
457 triangles as f64 / possible_triangles as f64
458 } else {
459 0.0
460 }
461 }
462
463 fn calculate_betweenness_centrality(
465 &self,
466 adjacency_matrix: &Array2<f64>,
467 node_idx: usize,
468 ) -> f64 {
469 let num_nodes = adjacency_matrix.nrows();
472 let degree = adjacency_matrix
473 .row(node_idx)
474 .iter()
475 .filter(|&&x| x > 1e-12)
476 .count();
477
478 degree as f64 / num_nodes as f64
480 }
481
482 fn calculate_average_path_length(&self, adjacency_matrix: &Array2<f64>) -> f64 {
484 let num_nodes = adjacency_matrix.nrows();
486 let mut total_path_length = 0.0;
487 let mut path_count = 0;
488
489 for i in 0..num_nodes {
490 for j in (i + 1)..num_nodes {
491 if adjacency_matrix[[i, j]] > 1e-12 {
493 total_path_length += 1.0;
494 } else {
495 total_path_length += 2.0; }
497 path_count += 1;
498 }
499 }
500
501 if path_count > 0 {
502 total_path_length / path_count as f64
503 } else {
504 0.0
505 }
506 }
507}
508
509fn calculate_kraus_fidelity(
511 kraus_operators: &[Array2<Complex64>],
512 original_process: &Array4<Complex64>,
513) -> DeviceResult<f64> {
514 let dim = original_process.dim().0;
516 let mut reconstructed_process = Array4::zeros((dim, dim, dim, dim));
517
518 for kraus_op in kraus_operators {
520 for i in 0..dim {
521 for j in 0..dim {
522 for k in 0..dim {
523 for l in 0..dim {
524 if i < kraus_op.nrows()
525 && j < kraus_op.ncols()
526 && k < kraus_op.nrows()
527 && l < kraus_op.ncols()
528 {
529 reconstructed_process[[i, j, k, l]] +=
530 kraus_op[[i, k]] * kraus_op[[j, l]].conj();
531 }
532 }
533 }
534 }
535 }
536 }
537
538 let mut fidelity = 0.0;
540 let mut norm_original = 0.0;
541 let mut norm_reconstructed = 0.0;
542
543 for i in 0..dim {
544 for j in 0..dim {
545 for k in 0..dim {
546 for l in 0..dim {
547 let orig: Complex64 = original_process[[i, j, k, l]];
548 let recon: Complex64 = reconstructed_process[[i, j, k, l]];
549
550 fidelity += (orig.conj() * recon).re;
551 norm_original += orig.norm_sqr();
552 norm_reconstructed += recon.norm_sqr();
553 }
554 }
555 }
556 }
557
558 if norm_original > 1e-12 && norm_reconstructed > 1e-12 {
559 Ok(fidelity / (norm_original * norm_reconstructed).sqrt())
560 } else {
561 Ok(0.0)
562 }
563}