use scirs2_core::ndarray::Array2;
use serde::{Deserialize, Serialize};
use std::collections::{HashMap, HashSet, VecDeque};
use crate::circuit_interfaces::{
CircuitInterface, InterfaceCircuit, InterfaceGate, InterfaceGateType,
};
use crate::error::Result;
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub enum LDPCConstructionMethod {
RandomRegular,
ProgressiveEdgeGrowth,
Gallager,
MacKay,
QuantumBicycle,
SurfaceCode,
}
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub enum BeliefPropagationAlgorithm {
SumProduct,
MinSum,
NormalizedMinSum,
OffsetMinSum,
LayeredBP,
}
#[derive(Debug, Clone)]
pub struct LDPCConfig {
pub k: usize,
pub n: usize,
pub m: usize,
pub dv: usize,
pub dc: usize,
pub construction_method: LDPCConstructionMethod,
pub bp_algorithm: BeliefPropagationAlgorithm,
pub max_bp_iterations: usize,
pub convergence_threshold: f64,
pub damping_factor: f64,
pub early_termination: bool,
pub noise_variance: f64,
}
impl Default for LDPCConfig {
fn default() -> Self {
Self {
k: 10,
n: 20,
m: 10,
dv: 3,
dc: 6,
construction_method: LDPCConstructionMethod::ProgressiveEdgeGrowth,
bp_algorithm: BeliefPropagationAlgorithm::SumProduct,
max_bp_iterations: 50,
convergence_threshold: 1e-6,
damping_factor: 0.8,
early_termination: true,
noise_variance: 0.1,
}
}
}
#[derive(Debug, Clone)]
pub struct TannerGraph {
pub variable_nodes: Vec<VariableNode>,
pub check_nodes: Vec<CheckNode>,
pub adjacency_matrix: Array2<bool>,
pub parity_check_matrix: Array2<u8>,
}
#[derive(Debug, Clone)]
pub struct VariableNode {
pub id: usize,
pub connected_checks: Vec<usize>,
pub belief: f64,
pub channel_llr: f64,
pub incoming_messages: HashMap<usize, f64>,
}
#[derive(Debug, Clone)]
pub struct CheckNode {
pub id: usize,
pub connected_variables: Vec<usize>,
pub incoming_messages: HashMap<usize, f64>,
pub syndrome: bool,
}
impl VariableNode {
#[must_use]
pub fn new(id: usize) -> Self {
Self {
id,
connected_checks: Vec::new(),
belief: 0.0,
channel_llr: 0.0,
incoming_messages: HashMap::new(),
}
}
pub fn update_belief(&mut self) {
self.belief = self.channel_llr + self.incoming_messages.values().sum::<f64>();
}
#[must_use]
pub fn compute_outgoing_message(&self, check_id: usize) -> f64 {
let message_sum: f64 = self
.incoming_messages
.iter()
.filter(|(&id, _)| id != check_id)
.map(|(_, &value)| value)
.sum();
self.channel_llr + message_sum
}
}
impl CheckNode {
#[must_use]
pub fn new(id: usize) -> Self {
Self {
id,
connected_variables: Vec::new(),
incoming_messages: HashMap::new(),
syndrome: false,
}
}
#[must_use]
pub fn compute_outgoing_message_sum_product(&self, var_id: usize) -> f64 {
let product: f64 = self
.incoming_messages
.iter()
.filter(|(&id, _)| id != var_id)
.map(|(_, &msg)| msg.tanh() / 2.0)
.product();
2.0 * product.atanh()
}
#[must_use]
pub fn compute_outgoing_message_min_sum(&self, var_id: usize) -> f64 {
let other_messages: Vec<f64> = self
.incoming_messages
.iter()
.filter(|(&id, _)| id != var_id)
.map(|(_, &msg)| msg)
.collect();
if other_messages.is_empty() {
return 0.0;
}
let sign_product: f64 = other_messages
.iter()
.map(|&msg| if msg >= 0.0 { 1.0 } else { -1.0 })
.product();
let min_magnitude = other_messages
.iter()
.map(|&msg| msg.abs())
.fold(f64::INFINITY, f64::min);
sign_product * min_magnitude
}
}
pub struct QuantumLDPCCode {
config: LDPCConfig,
tanner_graph: TannerGraph,
#[allow(dead_code)]
x_stabilizers: Array2<u8>,
#[allow(dead_code)]
z_stabilizers: Array2<u8>,
#[allow(dead_code)]
logical_x_ops: Array2<u8>,
#[allow(dead_code)]
logical_z_ops: Array2<u8>,
#[allow(dead_code)]
circuit_interface: CircuitInterface,
stats: LDPCStats,
}
#[derive(Debug, Clone, Default, Serialize, Deserialize)]
pub struct LDPCStats {
pub total_decodings: usize,
pub successful_decodings: usize,
pub avg_bp_iterations: f64,
pub block_error_rate: f64,
pub bit_error_rate: f64,
pub avg_decoding_time_ms: f64,
pub threshold_estimate: f64,
pub convergence_rate: f64,
}
impl LDPCStats {
pub fn update_after_decoding(&mut self, success: bool, iterations: usize, time_ms: f64) {
self.total_decodings += 1;
if success {
self.successful_decodings += 1;
}
let prev_avg_iter = self.avg_bp_iterations;
self.avg_bp_iterations = prev_avg_iter
.mul_add((self.total_decodings - 1) as f64, iterations as f64)
/ self.total_decodings as f64;
let prev_avg_time = self.avg_decoding_time_ms;
self.avg_decoding_time_ms = prev_avg_time
.mul_add((self.total_decodings - 1) as f64, time_ms)
/ self.total_decodings as f64;
self.block_error_rate =
1.0 - (self.successful_decodings as f64 / self.total_decodings as f64);
self.convergence_rate = self.successful_decodings as f64 / self.total_decodings as f64;
}
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct BPDecodingResult {
pub decoded_bits: Vec<bool>,
pub final_syndrome: Vec<bool>,
pub iterations: usize,
pub converged: bool,
pub final_llrs: Vec<f64>,
pub decoding_time_ms: f64,
pub success: bool,
}
impl QuantumLDPCCode {
pub fn new(config: LDPCConfig) -> Result<Self> {
let circuit_interface = CircuitInterface::new(Default::default())?;
let tanner_graph = Self::construct_tanner_graph(&config)?;
let (x_stabilizers, z_stabilizers) =
Self::generate_stabilizer_matrices(&config, &tanner_graph)?;
let (logical_x_ops, logical_z_ops) =
Self::generate_logical_operators(&config, &x_stabilizers, &z_stabilizers)?;
Ok(Self {
config,
tanner_graph,
x_stabilizers,
z_stabilizers,
logical_x_ops,
logical_z_ops,
circuit_interface,
stats: LDPCStats::default(),
})
}
fn construct_tanner_graph(config: &LDPCConfig) -> Result<TannerGraph> {
match config.construction_method {
LDPCConstructionMethod::RandomRegular => Self::construct_random_regular(config),
LDPCConstructionMethod::ProgressiveEdgeGrowth => Self::construct_peg(config),
LDPCConstructionMethod::Gallager => Self::construct_gallager(config),
LDPCConstructionMethod::MacKay => Self::construct_mackay(config),
LDPCConstructionMethod::QuantumBicycle => Self::construct_quantum_bicycle(config),
LDPCConstructionMethod::SurfaceCode => Self::construct_surface_code(config),
}
}
fn construct_random_regular(config: &LDPCConfig) -> Result<TannerGraph> {
let mut variable_nodes = Vec::with_capacity(config.n);
let mut check_nodes = Vec::with_capacity(config.m);
let mut adjacency_matrix = Array2::from_elem((config.n, config.m), false);
for i in 0..config.n {
variable_nodes.push(VariableNode::new(i));
}
for i in 0..config.m {
check_nodes.push(CheckNode::new(i));
}
let mut edges = Vec::new();
for var_id in 0..config.n {
for _ in 0..config.dv {
edges.push(var_id);
}
}
for i in 0..edges.len() {
let j = fastrand::usize(i..edges.len());
edges.swap(i, j);
}
let mut edge_idx = 0;
for check_id in 0..config.m {
for _ in 0..config.dc {
if edge_idx < edges.len() {
let var_id = edges[edge_idx];
variable_nodes[var_id].connected_checks.push(check_id);
check_nodes[check_id].connected_variables.push(var_id);
adjacency_matrix[[var_id, check_id]] = true;
edge_idx += 1;
}
}
}
let mut parity_check_matrix = Array2::zeros((config.m, config.n));
for (var_id, check_id) in adjacency_matrix.indexed_iter() {
if *check_id {
parity_check_matrix[[var_id.1, var_id.0]] = 1;
}
}
Ok(TannerGraph {
variable_nodes,
check_nodes,
adjacency_matrix,
parity_check_matrix,
})
}
fn construct_peg(config: &LDPCConfig) -> Result<TannerGraph> {
let mut variable_nodes = Vec::with_capacity(config.n);
let mut check_nodes = Vec::with_capacity(config.m);
let mut adjacency_matrix = Array2::from_elem((config.n, config.m), false);
for i in 0..config.n {
variable_nodes.push(VariableNode::new(i));
}
for i in 0..config.m {
check_nodes.push(CheckNode::new(i));
}
for var_id in 0..config.n {
let mut connected_checks = HashSet::new();
for _ in 0..config.dv {
let best_check = Self::find_best_check_for_peg(
var_id,
&connected_checks,
&variable_nodes,
&check_nodes,
&adjacency_matrix,
config.m,
);
if let Some(check_id) = best_check {
variable_nodes[var_id].connected_checks.push(check_id);
check_nodes[check_id].connected_variables.push(var_id);
adjacency_matrix[[var_id, check_id]] = true;
connected_checks.insert(check_id);
}
}
}
let mut parity_check_matrix = Array2::zeros((config.m, config.n));
for (var_id, check_id) in adjacency_matrix.indexed_iter() {
if *check_id {
parity_check_matrix[[var_id.1, var_id.0]] = 1;
}
}
Ok(TannerGraph {
variable_nodes,
check_nodes,
adjacency_matrix,
parity_check_matrix,
})
}
fn find_best_check_for_peg(
var_id: usize,
connected_checks: &HashSet<usize>,
_variable_nodes: &[VariableNode],
_check_nodes: &[CheckNode],
adjacency_matrix: &Array2<bool>,
num_checks: usize,
) -> Option<usize> {
let mut best_check = None;
let mut best_girth = 0;
for check_id in 0..num_checks {
if connected_checks.contains(&check_id) {
continue;
}
let girth = Self::calculate_local_girth(var_id, check_id, adjacency_matrix);
if girth > best_girth {
best_girth = girth;
best_check = Some(check_id);
}
}
best_check
}
fn calculate_local_girth(
var_id: usize,
check_id: usize,
adjacency_matrix: &Array2<bool>,
) -> usize {
let mut visited_vars = HashSet::new();
let mut visited_checks = HashSet::new();
let mut queue = VecDeque::new();
queue.push_back((var_id, 0, true)); visited_vars.insert(var_id);
while let Some((node_id, dist, is_var)) = queue.pop_front() {
if dist > 6 {
break;
}
if is_var {
for (check_idx, &connected) in adjacency_matrix.row(node_id).indexed_iter() {
if connected && check_idx != check_id {
if visited_checks.contains(&check_idx) {
return dist * 2; }
visited_checks.insert(check_idx);
queue.push_back((check_idx, dist + 1, false));
}
}
} else {
for (var_idx, &connected) in adjacency_matrix.column(node_id).indexed_iter() {
if connected && var_idx != var_id {
if visited_vars.contains(&var_idx) {
return dist * 2 + 1; }
visited_vars.insert(var_idx);
queue.push_back((var_idx, dist + 1, true));
}
}
}
}
12 }
fn construct_gallager(config: &LDPCConfig) -> Result<TannerGraph> {
Self::construct_random_regular(config)
}
fn construct_mackay(config: &LDPCConfig) -> Result<TannerGraph> {
Self::construct_peg(config)
}
fn construct_quantum_bicycle(config: &LDPCConfig) -> Result<TannerGraph> {
let mut variable_nodes = Vec::with_capacity(config.n);
let mut check_nodes = Vec::with_capacity(config.m);
let mut adjacency_matrix = Array2::from_elem((config.n, config.m), false);
for i in 0..config.n {
variable_nodes.push(VariableNode::new(i));
}
for i in 0..config.m {
check_nodes.push(CheckNode::new(i));
}
let l = config.n / 2;
for i in 0..l {
for j in 0..config.dv {
let check_id = (i + j * l / config.dv) % config.m;
variable_nodes[i].connected_checks.push(check_id);
check_nodes[check_id].connected_variables.push(i);
adjacency_matrix[[i, check_id]] = true;
if i + l < config.n {
variable_nodes[i + l].connected_checks.push(check_id);
check_nodes[check_id].connected_variables.push(i + l);
adjacency_matrix[[i + l, check_id]] = true;
}
}
}
let mut parity_check_matrix = Array2::zeros((config.m, config.n));
for (var_id, check_id) in adjacency_matrix.indexed_iter() {
if *check_id {
parity_check_matrix[[var_id.1, var_id.0]] = 1;
}
}
Ok(TannerGraph {
variable_nodes,
check_nodes,
adjacency_matrix,
parity_check_matrix,
})
}
fn construct_surface_code(config: &LDPCConfig) -> Result<TannerGraph> {
let d = (config.n as f64).sqrt() as usize; let mut variable_nodes = Vec::with_capacity(config.n);
let mut check_nodes = Vec::with_capacity(config.m);
let mut adjacency_matrix = Array2::from_elem((config.n, config.m), false);
for i in 0..config.n {
variable_nodes.push(VariableNode::new(i));
}
for i in 0..config.m {
check_nodes.push(CheckNode::new(i));
}
for check_id in 0..config.m {
let row = check_id / d;
let col = check_id % d;
let neighbors = [
(row.wrapping_sub(1), col),
(row + 1, col),
(row, col.wrapping_sub(1)),
(row, col + 1),
];
for (r, c) in &neighbors {
if *r < d && *c < d {
let var_id = r * d + c;
if var_id < config.n {
variable_nodes[var_id].connected_checks.push(check_id);
check_nodes[check_id].connected_variables.push(var_id);
adjacency_matrix[[var_id, check_id]] = true;
}
}
}
}
let mut parity_check_matrix = Array2::zeros((config.m, config.n));
for (var_id, check_id) in adjacency_matrix.indexed_iter() {
if *check_id {
parity_check_matrix[[var_id.1, var_id.0]] = 1;
}
}
Ok(TannerGraph {
variable_nodes,
check_nodes,
adjacency_matrix,
parity_check_matrix,
})
}
fn generate_stabilizer_matrices(
_config: &LDPCConfig,
tanner_graph: &TannerGraph,
) -> Result<(Array2<u8>, Array2<u8>)> {
let x_stabilizers = tanner_graph.parity_check_matrix.clone();
let z_stabilizers = tanner_graph.parity_check_matrix.clone();
Ok((x_stabilizers, z_stabilizers))
}
fn generate_logical_operators(
config: &LDPCConfig,
_x_stabilizers: &Array2<u8>,
_z_stabilizers: &Array2<u8>,
) -> Result<(Array2<u8>, Array2<u8>)> {
let k = config.k;
let n = config.n;
let mut logical_x_ops = Array2::zeros((k, n));
let mut logical_z_ops = Array2::zeros((k, n));
for i in 0..k.min(n) {
logical_x_ops[[i, i]] = 1;
logical_z_ops[[i, i]] = 1;
}
Ok((logical_x_ops, logical_z_ops))
}
pub fn decode_belief_propagation(
&mut self,
received_llrs: &[f64],
syndrome: &[bool],
) -> Result<BPDecodingResult> {
let start_time = std::time::Instant::now();
for (i, &llr) in received_llrs.iter().enumerate() {
if i < self.tanner_graph.variable_nodes.len() {
self.tanner_graph.variable_nodes[i].channel_llr = llr;
}
}
for (i, &syn) in syndrome.iter().enumerate() {
if i < self.tanner_graph.check_nodes.len() {
self.tanner_graph.check_nodes[i].syndrome = syn;
}
}
let mut converged = false;
let mut iteration = 0;
while iteration < self.config.max_bp_iterations && !converged {
self.update_variable_to_check_messages();
self.update_check_to_variable_messages();
self.update_variable_beliefs();
converged = self.check_convergence();
if self.config.early_termination && converged {
break;
}
iteration += 1;
}
let decoded_bits = self.extract_decoded_bits();
let final_syndrome = self.calculate_syndrome(&decoded_bits);
let success = final_syndrome.iter().all(|&s| !s);
let decoding_time_ms = start_time.elapsed().as_secs_f64() * 1000.0;
self.stats
.update_after_decoding(success, iteration, decoding_time_ms);
let final_llrs: Vec<f64> = self
.tanner_graph
.variable_nodes
.iter()
.map(|node| node.belief)
.collect();
Ok(BPDecodingResult {
decoded_bits,
final_syndrome,
iterations: iteration,
converged,
final_llrs,
decoding_time_ms,
success,
})
}
fn update_variable_to_check_messages(&mut self) {
for var_node in &mut self.tanner_graph.variable_nodes {
for &check_id in &var_node.connected_checks.clone() {
let message = var_node.compute_outgoing_message(check_id);
let old_message = self.tanner_graph.check_nodes[check_id]
.incoming_messages
.get(&var_node.id)
.unwrap_or(&0.0);
let damped_message = self
.config
.damping_factor
.mul_add(message, (1.0 - self.config.damping_factor) * old_message);
self.tanner_graph.check_nodes[check_id]
.incoming_messages
.insert(var_node.id, damped_message);
}
}
}
fn update_check_to_variable_messages(&mut self) {
for check_node in &mut self.tanner_graph.check_nodes {
for &var_id in &check_node.connected_variables.clone() {
let message = match self.config.bp_algorithm {
BeliefPropagationAlgorithm::SumProduct => {
check_node.compute_outgoing_message_sum_product(var_id)
}
BeliefPropagationAlgorithm::MinSum => {
check_node.compute_outgoing_message_min_sum(var_id)
}
BeliefPropagationAlgorithm::NormalizedMinSum => {
let min_sum_msg = check_node.compute_outgoing_message_min_sum(var_id);
min_sum_msg * 0.75 }
BeliefPropagationAlgorithm::OffsetMinSum => {
let min_sum_msg = check_node.compute_outgoing_message_min_sum(var_id);
let offset = 0.1;
if min_sum_msg.abs() > offset {
min_sum_msg.signum() * (min_sum_msg.abs() - offset)
} else {
0.0
}
}
BeliefPropagationAlgorithm::LayeredBP => {
check_node.compute_outgoing_message_sum_product(var_id)
}
};
let old_message = self.tanner_graph.variable_nodes[var_id]
.incoming_messages
.get(&check_node.id)
.unwrap_or(&0.0);
let damped_message = self
.config
.damping_factor
.mul_add(message, (1.0 - self.config.damping_factor) * old_message);
self.tanner_graph.variable_nodes[var_id]
.incoming_messages
.insert(check_node.id, damped_message);
}
}
}
fn update_variable_beliefs(&mut self) {
for var_node in &mut self.tanner_graph.variable_nodes {
var_node.update_belief();
}
}
fn check_convergence(&self) -> bool {
let decoded_bits = self.extract_decoded_bits();
let syndrome = self.calculate_syndrome(&decoded_bits);
syndrome.iter().all(|&s| !s)
}
fn extract_decoded_bits(&self) -> Vec<bool> {
self.tanner_graph
.variable_nodes
.iter()
.map(|node| node.belief < 0.0)
.collect()
}
fn calculate_syndrome(&self, codeword: &[bool]) -> Vec<bool> {
let mut syndrome = vec![false; self.tanner_graph.check_nodes.len()];
for (check_id, check_node) in self.tanner_graph.check_nodes.iter().enumerate() {
let mut parity = false;
for &var_id in &check_node.connected_variables {
if var_id < codeword.len() && codeword[var_id] {
parity = !parity;
}
}
syndrome[check_id] = parity;
}
syndrome
}
pub fn syndrome_circuit(&self) -> Result<InterfaceCircuit> {
let num_data_qubits = self.config.n;
let num_syndrome_qubits = self.config.m;
let total_qubits = num_data_qubits + num_syndrome_qubits;
let mut circuit = InterfaceCircuit::new(total_qubits, num_syndrome_qubits);
for (check_id, check_node) in self.tanner_graph.check_nodes.iter().enumerate() {
let syndrome_qubit = num_data_qubits + check_id;
for &var_id in &check_node.connected_variables {
if var_id < num_data_qubits {
circuit.add_gate(InterfaceGate::new(
InterfaceGateType::CNOT,
vec![var_id, syndrome_qubit],
));
}
}
}
Ok(circuit)
}
pub fn estimate_threshold(
&mut self,
noise_range: (f64, f64),
num_trials: usize,
) -> Result<f64> {
let (min_noise, max_noise) = noise_range;
let mut threshold = f64::midpoint(min_noise, max_noise);
let mut search_range = max_noise - min_noise;
while search_range > 0.001 {
self.config.noise_variance = threshold;
let mut successes = 0;
for _ in 0..num_trials {
let errors: Vec<bool> = (0..self.config.n)
.map(|_| fastrand::f64() < threshold)
.collect();
let llrs: Vec<f64> = errors
.iter()
.map(|&error| {
if error {
-2.0 / self.config.noise_variance
} else {
2.0 / self.config.noise_variance
}
})
.collect();
let syndrome = self.calculate_syndrome(&errors);
if let Ok(result) = self.decode_belief_propagation(&llrs, &syndrome) {
if result.success {
successes += 1;
}
}
}
let success_rate = f64::from(successes) / num_trials as f64;
if success_rate > 0.5 {
threshold = f64::midpoint(threshold, max_noise);
} else {
threshold = f64::midpoint(min_noise, threshold);
}
search_range /= 2.0;
}
self.stats.threshold_estimate = threshold;
Ok(threshold)
}
#[must_use]
pub const fn get_stats(&self) -> &LDPCStats {
&self.stats
}
pub fn reset_stats(&mut self) {
self.stats = LDPCStats::default();
}
#[must_use]
pub const fn get_parameters(&self) -> (usize, usize, usize) {
(self.config.n, self.config.k, self.config.m)
}
#[must_use]
pub const fn get_tanner_graph(&self) -> &TannerGraph {
&self.tanner_graph
}
}
pub fn benchmark_quantum_ldpc_codes() -> Result<HashMap<String, f64>> {
let mut results = HashMap::new();
let configs = vec![
LDPCConfig {
k: 10,
n: 20,
m: 10,
construction_method: LDPCConstructionMethod::RandomRegular,
bp_algorithm: BeliefPropagationAlgorithm::SumProduct,
..Default::default()
},
LDPCConfig {
k: 15,
n: 30,
m: 15,
construction_method: LDPCConstructionMethod::ProgressiveEdgeGrowth,
bp_algorithm: BeliefPropagationAlgorithm::MinSum,
..Default::default()
},
LDPCConfig {
k: 20,
n: 40,
m: 20,
construction_method: LDPCConstructionMethod::QuantumBicycle,
bp_algorithm: BeliefPropagationAlgorithm::NormalizedMinSum,
..Default::default()
},
];
for (i, config) in configs.into_iter().enumerate() {
let start = std::time::Instant::now();
let mut ldpc_code = QuantumLDPCCode::new(config)?;
for _ in 0..50 {
let errors: Vec<bool> = (0..ldpc_code.config.n)
.map(|_| fastrand::f64() < 0.05)
.collect();
let llrs: Vec<f64> = errors
.iter()
.map(|&error| if error { -1.0 } else { 1.0 })
.collect();
let syndrome = ldpc_code.calculate_syndrome(&errors);
let _result = ldpc_code.decode_belief_propagation(&llrs, &syndrome)?;
}
let time = start.elapsed().as_secs_f64() * 1000.0;
results.insert(format!("config_{i}"), time);
let stats = ldpc_code.get_stats();
results.insert(format!("config_{i}_success_rate"), stats.convergence_rate);
results.insert(
format!("config_{i}_avg_iterations"),
stats.avg_bp_iterations,
);
}
Ok(results)
}
#[cfg(test)]
mod tests {
use super::*;
use approx::assert_abs_diff_eq;
#[test]
fn test_ldpc_code_creation() {
let config = LDPCConfig::default();
let ldpc_code = QuantumLDPCCode::new(config);
assert!(ldpc_code.is_ok());
}
#[test]
fn test_tanner_graph_construction() {
let config = LDPCConfig {
k: 5,
n: 10,
m: 5,
dv: 2,
dc: 4,
..Default::default()
};
let tanner_graph = QuantumLDPCCode::construct_random_regular(&config);
assert!(tanner_graph.is_ok());
let graph = tanner_graph.expect("tanner_graph construction should succeed");
assert_eq!(graph.variable_nodes.len(), 10);
assert_eq!(graph.check_nodes.len(), 5);
}
#[test]
fn test_variable_node_operations() {
let mut var_node = VariableNode::new(0);
var_node.channel_llr = 1.0;
var_node.incoming_messages.insert(1, 0.5);
var_node.incoming_messages.insert(2, -0.3);
var_node.update_belief();
assert_abs_diff_eq!(var_node.belief, 1.2, epsilon = 1e-10);
let outgoing = var_node.compute_outgoing_message(1);
assert_abs_diff_eq!(outgoing, 0.7, epsilon = 1e-10);
}
#[test]
fn test_check_node_operations() {
let mut check_node = CheckNode::new(0);
check_node.incoming_messages.insert(1, 0.8);
check_node.incoming_messages.insert(2, -0.6);
check_node.incoming_messages.insert(3, 1.2);
let sum_product_msg = check_node.compute_outgoing_message_sum_product(1);
assert!(sum_product_msg.is_finite());
let min_sum_msg = check_node.compute_outgoing_message_min_sum(1);
assert_abs_diff_eq!(min_sum_msg.abs(), 0.6, epsilon = 1e-10);
}
#[test]
fn test_belief_propagation_decoding() {
let config = LDPCConfig {
k: 3,
n: 6,
m: 3,
max_bp_iterations: 10,
..Default::default()
};
let mut ldpc_code =
QuantumLDPCCode::new(config).expect("LDPC code creation should succeed");
let llrs = vec![1.0, -1.0, 1.0, 1.0, -1.0, 1.0];
let syndrome = vec![false, true, false];
let result = ldpc_code.decode_belief_propagation(&llrs, &syndrome);
assert!(result.is_ok());
let bp_result = result.expect("decode_belief_propagation should succeed");
assert_eq!(bp_result.decoded_bits.len(), 6);
assert!(bp_result.iterations <= 10);
}
#[test]
fn test_syndrome_calculation() {
let config = LDPCConfig {
k: 2,
n: 4,
m: 2,
..Default::default()
};
let ldpc_code = QuantumLDPCCode::new(config).expect("LDPC code creation should succeed");
let codeword = vec![false, true, false, true];
let syndrome = ldpc_code.calculate_syndrome(&codeword);
assert_eq!(syndrome.len(), 2);
}
#[test]
fn test_syndrome_circuit_generation() {
let config = LDPCConfig {
k: 3,
n: 6,
m: 3,
..Default::default()
};
let ldpc_code = QuantumLDPCCode::new(config).expect("LDPC code creation should succeed");
let circuit = ldpc_code.syndrome_circuit();
assert!(circuit.is_ok());
let syndrome_circuit = circuit.expect("syndrome_circuit should succeed");
assert_eq!(syndrome_circuit.num_qubits, 9); }
#[test]
fn test_different_construction_methods() {
let base_config = LDPCConfig {
k: 3,
n: 6,
m: 3,
..Default::default()
};
let methods = vec![
LDPCConstructionMethod::RandomRegular,
LDPCConstructionMethod::ProgressiveEdgeGrowth,
LDPCConstructionMethod::QuantumBicycle,
LDPCConstructionMethod::SurfaceCode,
];
for method in methods {
let mut config = base_config.clone();
config.construction_method = method;
let ldpc_code = QuantumLDPCCode::new(config);
assert!(ldpc_code.is_ok(), "Failed for method: {method:?}");
}
}
#[test]
fn test_different_bp_algorithms() {
let base_config = LDPCConfig {
k: 3,
n: 6,
m: 3,
..Default::default()
};
let algorithms = vec![
BeliefPropagationAlgorithm::SumProduct,
BeliefPropagationAlgorithm::MinSum,
BeliefPropagationAlgorithm::NormalizedMinSum,
BeliefPropagationAlgorithm::OffsetMinSum,
];
for algorithm in algorithms {
let mut config = base_config.clone();
config.bp_algorithm = algorithm;
let mut ldpc_code =
QuantumLDPCCode::new(config).expect("LDPC code creation should succeed");
let llrs = vec![1.0, -1.0, 1.0, 1.0, -1.0, 1.0];
let syndrome = vec![false, true, false];
let result = ldpc_code.decode_belief_propagation(&llrs, &syndrome);
assert!(result.is_ok(), "Failed for algorithm: {algorithm:?}");
}
}
#[test]
fn test_stats_updates() {
let mut stats = LDPCStats::default();
stats.update_after_decoding(true, 5, 10.0);
assert_eq!(stats.total_decodings, 1);
assert_eq!(stats.successful_decodings, 1);
assert_abs_diff_eq!(stats.avg_bp_iterations, 5.0, epsilon = 1e-10);
assert_abs_diff_eq!(stats.block_error_rate, 0.0, epsilon = 1e-10);
stats.update_after_decoding(false, 8, 15.0);
assert_eq!(stats.total_decodings, 2);
assert_eq!(stats.successful_decodings, 1);
assert_abs_diff_eq!(stats.avg_bp_iterations, 6.5, epsilon = 1e-10);
assert_abs_diff_eq!(stats.block_error_rate, 0.5, epsilon = 1e-10);
}
}