#![allow(non_snake_case)]
use crate::cli::*;
use crate::code_builder::*;
use crate::complete_model_graph::*;
#[cfg(feature = "fusion_blossom")]
use crate::decoder_fusion::*;
#[cfg(feature = "fusion_blossom")]
use crate::decoder_parallel_fusion::*;
#[cfg(feature = "hyperion")]
use crate::decoder_hyper_union_find::*;
#[cfg(feature = "hyperion")]
use crate::decoder_hyperion::*;
use crate::decoder_mwpm::*;
use crate::decoder_tailored_mwpm::*;
use crate::decoder_union_find::*;
use crate::erasure_graph::*;
use crate::model_graph::*;
use crate::model_hypergraph::*;
use crate::noise_model::*;
use crate::noise_model_builder::*;
use crate::reproducible_rand::Xoroshiro128StarStar;
use crate::simulator::*;
use crate::simulator_compact::*;
use crate::simulator_file::*;
use crate::tailored_complete_model_graph::*;
use crate::tailored_model_graph::*;
use crate::util::local_get_temporary_store;
use crate::visualize::*;
use clap;
use clap::ValueEnum;
use num_cpus;
use pbr::ProgressBar;
#[cfg(feature = "python_binding")]
use pyo3::prelude::*;
use rand_core::SeedableRng;
use serde::{Deserialize, Serialize};
use serde_json;
use serde_json::json;
use std::fs;
use std::fs::File;
use std::io::prelude::*;
use std::sync::atomic::{AtomicBool, Ordering};
use std::sync::{Arc, Mutex};
use std::time::Instant;
impl ToolCommands {
pub fn run(self) -> Result<String, String> {
match self {
Self::Benchmark(benchmark_parameters) => benchmark_parameters.run(),
}
}
}
#[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, ValueEnum, Serialize, Deserialize, Debug)]
#[cfg_attr(feature = "python_binding", cfg_eval)]
#[cfg_attr(feature = "python_binding", pyclass)]
pub enum BenchmarkDebugPrint {
NoiseModel,
FullNoiseModel,
ModelGraph,
CompleteModelGraph,
TailoredModelGraph,
TailoredCompleteModelGraph,
AllErrorPattern,
FailedErrorPattern,
ErasureGraph,
FusionBlossomSyndromeFile,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
#[cfg_attr(feature = "python_binding", cfg_eval)]
#[cfg_attr(feature = "python_binding", pyclass)]
pub struct BenchmarkDebugPrintDecoderConfig {
#[serde(alias = "pcmg")] #[serde(default = "mwpm_default_configs::precompute_complete_model_graph")]
pub precompute_complete_model_graph: bool,
#[serde(alias = "wf")] #[serde(default = "mwpm_default_configs::weight_function")]
pub weight_function: WeightFunction,
#[serde(alias = "ucp")] #[serde(default = "mwpm_default_configs::use_combined_probability")]
pub use_combined_probability: bool,
#[serde(default = "tailored_mwpm_default_configs::use_unfixed_stabilizer_edges")]
pub use_unfixed_stabilizer_edges: bool,
}
#[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, ValueEnum, Serialize, Deserialize, Debug)]
#[cfg_attr(feature = "python_binding", cfg_eval)]
#[cfg_attr(feature = "python_binding", pyclass)]
pub enum BenchmarkDecoder {
None,
MWPM,
Fusion,
TailoredMWPM,
UnionFind,
HyperUnionFind,
Hyperion,
ParallelFusion,
}
#[derive(Clone, Debug, Serialize, Deserialize)]
#[cfg_attr(feature = "python_binding", cfg_eval)]
#[cfg_attr(feature = "python_binding", pyclass)]
pub struct BenchmarkControl {
pub total_repeats: usize,
pub qec_failed: usize,
pub external_termination: bool,
}
impl BenchmarkControl {
fn new() -> Self {
Self {
total_repeats: 0,
qec_failed: 0,
external_termination: false,
}
}
fn update_data_should_terminate(&mut self, is_qec_failed: bool, max_repeats: usize, min_failed_cases: usize) -> bool {
self.total_repeats += 1;
if is_qec_failed {
self.qec_failed += 1;
}
self.should_terminate(max_repeats, min_failed_cases)
}
fn should_terminate(&self, max_repeats: usize, min_failed_cases: usize) -> bool {
self.external_termination || self.total_repeats >= max_repeats || self.qec_failed >= min_failed_cases
}
fn set_external_terminate(&mut self) {
self.external_termination = true;
}
}
#[derive(Clone, Debug, Serialize, Deserialize)]
pub struct BenchmarkThreadDebugger {
thread_counter: usize,
error_pattern: Option<SparseErrorPattern>,
measurement: Option<SparseMeasurement>,
detected_erasures: Option<SparseErasures>,
correction: Option<SparseCorrection>,
}
impl BenchmarkThreadDebugger {
fn new() -> Self {
Self {
thread_counter: 0,
error_pattern: None,
measurement: None,
detected_erasures: None,
correction: None,
}
}
fn update_thread_counter(&mut self, thread_counter: usize) -> &mut Self {
self.thread_counter = thread_counter;
self.error_pattern = None;
self.measurement = None;
self.detected_erasures = None;
self.correction = None;
self
}
#[allow(dead_code)]
pub fn load_errors(&self, simulator: &mut Simulator, noise_model: &NoiseModel) {
if self.error_pattern.is_some() {
simulator
.load_sparse_error_pattern(self.error_pattern.as_ref().unwrap(), noise_model)
.expect("success");
}
if self.detected_erasures.is_some() {
simulator
.load_sparse_detected_erasures(self.detected_erasures.as_ref().unwrap(), noise_model)
.expect("success");
}
simulator.propagate_errors();
}
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct SingleSimulationConfig {
di: usize,
dj: usize,
noisy_measurements: usize,
p: f64,
pe: f64,
p_graph: f64,
pe_graph: f64,
}
impl SingleSimulationConfig {
pub fn new(di: usize, dj: usize, noisy_measurements: usize, p: f64, pe: f64, p_graph: f64, pe_graph: f64) -> Self {
Self {
di,
dj,
noisy_measurements,
p,
pe,
p_graph,
pe_graph,
}
}
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct SimulationConfigs {
dis: Vec<usize>,
djs: Vec<usize>,
nms: Vec<usize>,
ps: Vec<f64>,
pes: Vec<f64>,
ps_graph: Vec<f64>,
pes_graph: Vec<f64>,
max_repeats: usize,
min_failed_cases: usize,
parallel: usize,
parallel_init: usize,
noise_model_modifier: Option<serde_json::Value>,
deterministic_seed: Option<u64>,
}
impl BenchmarkParameters {
pub fn run(&self) -> Result<String, String> {
let configs = self.fill_in_default_parameters()?;
let log_runtime_statistics_file = self
.log_runtime_statistics
.clone()
.map(|filename| Arc::new(Mutex::new(File::create(filename.as_str()).expect("cannot create file"))));
let simulation_configuration = json!({
"configs": configs,
"parameters": self,
});
if let Some(log_runtime_statistics_file) = &log_runtime_statistics_file {
let mut log_runtime_statistics_file = log_runtime_statistics_file.lock().unwrap();
log_runtime_statistics_file.write_all(b"#f ").unwrap();
log_runtime_statistics_file
.write_all(simulation_configuration.to_string().as_bytes())
.unwrap();
log_runtime_statistics_file.write_all(b"\n").unwrap();
log_runtime_statistics_file.sync_data().unwrap();
}
let mut output = "".to_string();
let titles = "format: <p> <di> <nm> <shots> <failed> <pL> <dj> <pL_dev> <pe>".to_string();
eprintln!("{}", titles); if self.debug_print.is_none() {
output = titles + "\n";
}
if self.enable_visualizer {
self.assert_single_configuration(&configs)?;
}
let configurations = self.extract_simulation_configurations(&configs);
for config in configurations.iter() {
if let Some(log_runtime_statistics_file) = &log_runtime_statistics_file {
let mut log_runtime_statistics_file = log_runtime_statistics_file.lock().unwrap();
log_runtime_statistics_file.write_all(b"# ").unwrap();
log_runtime_statistics_file
.write_all(json!(config).to_string().as_bytes())
.unwrap();
log_runtime_statistics_file.write_all(b"\n").unwrap();
log_runtime_statistics_file.sync_data().unwrap();
}
output += &(self.run_single(&configs, config, &log_runtime_statistics_file)? + "\n");
}
Ok(output)
}
pub fn fill_in_default_parameters(&self) -> Result<SimulationConfigs, String> {
let dis = self.dis.clone();
let djs = self.djs.clone().unwrap_or(dis.clone());
let nms = self.nms.clone();
assert!(nms.len() == dis.len(), "nms and dis should be paired");
assert!(dis.len() == djs.len(), "dis and djs should be paired");
let ps = self.ps.clone();
let ps_graph = self.ps_graph.clone().unwrap_or(ps.clone());
let pes = self.pes.clone().unwrap_or(vec![0.; ps.len()]); let pes_graph = self.pes_graph.clone().unwrap_or(pes.clone());
assert_eq!(pes.len(), ps.len(), "pe and p should be matched");
assert_eq!(ps_graph.len(), ps.len(), "ps_graph and p should be matched");
assert_eq!(pes_graph.len(), ps.len(), "pes_graph and p should be matched");
let mut max_repeats: usize = self.max_repeats;
if max_repeats == 0 {
max_repeats = usize::MAX;
}
let mut min_failed_cases: usize = self.min_failed_cases;
if min_failed_cases == 0 {
min_failed_cases = usize::MAX;
}
let parallel = if self.parallel == 0 {
std::cmp::max(num_cpus::get() - 1, 1)
} else {
self.parallel
};
let parallel_init: usize = self.parallel_init.unwrap_or(self.parallel);
let mut noise_model_modifier_str: Option<String> = None;
if let Some(noise_model_temporary_id) = self.load_noise_model_from_temporary_store {
match local_get_temporary_store(noise_model_temporary_id) {
Some(value) => {
noise_model_modifier_str = Some(value);
}
None => {
return Err(format!(
"[error] temporary id not found (may expire): {}",
noise_model_temporary_id
))
}
}
}
match &self.load_noise_model_from_file {
Some(noise_model_filepath) => match fs::read_to_string(noise_model_filepath.clone()) {
Ok(value) => {
noise_model_modifier_str = Some(value);
}
Err(_) => return Err(format!("[error] noise model file cannot open: {}", noise_model_filepath)),
},
None => {}
}
let noise_model_modifier: Option<serde_json::Value> = match noise_model_modifier_str {
Some(value) => match serde_json::from_str(&value) {
Ok(noise_model_modifier) => Some(noise_model_modifier),
Err(_) => return Err("[error] noise model cannot recognize, please check file format".to_string()),
},
None => None,
};
Ok(SimulationConfigs {
dis,
djs,
nms,
ps,
pes,
ps_graph,
pes_graph,
max_repeats,
min_failed_cases,
parallel,
parallel_init,
noise_model_modifier,
deterministic_seed: self.deterministic_seed,
})
}
pub fn assert_single_configuration(&self, configs: &SimulationConfigs) -> Result<(), String> {
if configs.dis.len() != 1 || configs.ps.len() != 1 {
return Err("only single configuration is allowed".to_string());
}
Ok(())
}
pub fn extract_simulation_configurations(&self, configs: &SimulationConfigs) -> Vec<SingleSimulationConfig> {
let mut configurations = Vec::new();
for (di_idx, &di) in configs.dis.iter().enumerate() {
let noisy_measurements = configs.nms[di_idx];
let dj = configs.djs[di_idx];
for (p_idx, p) in configs.ps.iter().enumerate() {
let p = *p;
let pe = configs.pes[p_idx];
let p_graph = configs.ps_graph[p_idx];
let pe_graph = configs.pes_graph[p_idx];
assert!((0. ..=1.0).contains(&p), "invalid probability value");
assert!((0. ..=1.0).contains(&p_graph), "invalid probability value");
assert!((0. ..=1.0).contains(&pe), "invalid probability value");
assert!((0. ..=1.0).contains(&pe_graph), "invalid probability value");
configurations.push(SingleSimulationConfig::new(
di,
dj,
noisy_measurements,
p,
pe,
p_graph,
pe_graph,
));
}
}
configurations
}
pub fn construct_noise_model(
&self,
simulator: &mut Simulator,
configs: &SimulationConfigs,
config: &SingleSimulationConfig,
use_p_graph: bool,
) -> Result<Arc<NoiseModel>, String> {
let mut noise_model: NoiseModel = NoiseModel::new(simulator);
let p = if use_p_graph { config.p_graph } else { config.p };
let pe = if use_p_graph { config.pe_graph } else { config.pe };
let px = p / (1. + self.bias_eta) / 2.;
let py = px;
let pz = p - 2. * px;
simulator.set_error_rates(&mut noise_model, px, py, pz, pe);
if let Some(noise_model_builder) = &self.noise_model_builder {
noise_model_builder.apply(
simulator,
&mut noise_model,
&self.noise_model_configuration,
p,
self.bias_eta,
pe,
);
}
match &configs.noise_model_modifier {
Some(modifier) => {
NoiseModelBuilder::apply_noise_model_modifier(simulator, &mut noise_model, modifier)
.map_err(|e| format!("apply noise model failed: {e}"))?;
}
None => {}
}
debug_assert!({
let sanity_check_result = code_builder_sanity_check(simulator);
if let Err(message) = &sanity_check_result {
eprintln!("\n[error] code_builder_sanity_check: {}", message)
}
sanity_check_result.is_ok()
});
assert!({
let sanity_check_result = noise_model_sanity_check(simulator, &noise_model);
if let Err(message) = &sanity_check_result {
eprintln!("\n[error] noise_model_sanity_check: {}", message)
}
sanity_check_result.is_ok()
});
simulator.compress_error_rates(&mut noise_model); Ok(Arc::new(noise_model))
}
pub fn execute_debug_print(
&self,
configs: &SimulationConfigs,
simulator: &mut Simulator,
noise_model: &Arc<NoiseModel>,
) -> Result<Option<String>, String> {
match self.debug_print {
Some(BenchmarkDebugPrint::NoiseModel) => {
return Ok(Some(format!(
"{}\n",
serde_json::to_string(&simulator.to_json(noise_model)).unwrap()
)));
}
Some(BenchmarkDebugPrint::FullNoiseModel) => {
let mut noise_model = (**noise_model).clone();
simulator.expand_error_rates(&mut noise_model); return Ok(Some(format!(
"{}\n",
serde_json::to_string(&simulator.to_json(&noise_model)).unwrap()
)));
}
Some(BenchmarkDebugPrint::ModelGraph) => {
let config: BenchmarkDebugPrintDecoderConfig =
serde_json::from_value(self.decoder_config.clone()).map_err(|x| x.to_string())?;
let mut model_graph = ModelGraph::new(simulator);
model_graph.build(
simulator,
noise_model.clone(),
&config.weight_function,
configs.parallel_init,
config.use_combined_probability,
self.use_brief_edge,
);
return Ok(Some(format!(
"{}\n",
serde_json::to_string(&model_graph.to_json(simulator)).unwrap()
)));
}
Some(BenchmarkDebugPrint::CompleteModelGraph) => {
let config: BenchmarkDebugPrintDecoderConfig =
serde_json::from_value(self.decoder_config.clone()).map_err(|x| x.to_string())?;
let mut model_graph = ModelGraph::new(simulator);
model_graph.build(
simulator,
noise_model.clone(),
&config.weight_function,
configs.parallel_init,
config.use_combined_probability,
self.use_brief_edge,
);
let model_graph = Arc::new(model_graph);
let mut complete_model_graph = CompleteModelGraph::new(simulator, Arc::clone(&model_graph));
complete_model_graph.precompute(simulator, config.precompute_complete_model_graph, configs.parallel_init);
return Ok(Some(format!(
"{}\n",
serde_json::to_string(&complete_model_graph.to_json(simulator)).unwrap()
)));
}
Some(BenchmarkDebugPrint::TailoredModelGraph) => {
let config: BenchmarkDebugPrintDecoderConfig =
serde_json::from_value(self.decoder_config.clone()).map_err(|x| x.to_string())?;
let mut tailored_model_graph = TailoredModelGraph::new(simulator);
tailored_model_graph.build(
simulator,
noise_model,
&config.weight_function,
config.use_combined_probability,
config.use_unfixed_stabilizer_edges,
);
return Ok(Some(format!(
"{}\n",
serde_json::to_string(&tailored_model_graph.to_json(simulator)).unwrap()
)));
}
Some(BenchmarkDebugPrint::TailoredCompleteModelGraph) => {
let config: BenchmarkDebugPrintDecoderConfig =
serde_json::from_value(self.decoder_config.clone()).map_err(|x| x.to_string())?;
let mut tailored_model_graph = TailoredModelGraph::new(simulator);
tailored_model_graph.build(
simulator,
noise_model,
&config.weight_function,
config.use_combined_probability,
config.use_unfixed_stabilizer_edges,
);
let tailored_model_graph = Arc::new(tailored_model_graph);
let mut complete_tailored_model_graph =
TailoredCompleteModelGraph::new(simulator, Arc::clone(&tailored_model_graph));
complete_tailored_model_graph.precompute(
simulator,
config.precompute_complete_model_graph,
configs.parallel_init,
);
return Ok(Some(format!(
"{}\n",
serde_json::to_string(&complete_tailored_model_graph.to_json(simulator)).unwrap()
)));
}
Some(BenchmarkDebugPrint::ErasureGraph) => {
let mut erasure_graph = ErasureGraph::new(simulator);
erasure_graph.build(simulator, noise_model.clone(), configs.parallel_init);
return Ok(Some(format!(
"{}\n",
serde_json::to_string(&erasure_graph.to_json(simulator)).unwrap()
)));
}
_ => {}
}
Ok(None)
}
pub fn prepare_visualizer(
&self,
simulator: &mut Simulator,
noise_model: &Arc<NoiseModel>,
noise_model_graph: &Arc<NoiseModel>,
configs: &SimulationConfigs,
) -> Result<Option<Arc<Mutex<Visualizer>>>, String> {
let mut visualizer = None;
if self.enable_visualizer {
print_visualize_link(self.visualizer_filename.clone());
let mut new_visualizer: Visualizer =
Visualizer::new(Some(visualize_data_folder() + self.visualizer_filename.as_str()))
.map_err(|x| x.to_string())?;
new_visualizer.add_component(simulator).map_err(|x| x.to_string())?;
new_visualizer
.add_component(noise_model.as_ref())
.map_err(|x| x.to_string())?;
if self.visualizer_model_graph {
let config: BenchmarkDebugPrintDecoderConfig =
serde_json::from_value(self.decoder_config.clone()).map_err(|x| x.to_string())?;
let mut model_graph = ModelGraph::new(simulator);
model_graph.build(
simulator,
noise_model_graph.clone(),
&config.weight_function,
configs.parallel_init,
config.use_combined_probability,
self.use_brief_edge,
);
new_visualizer.add_component(&model_graph).map_err(|x| x.to_string())?;
}
if self.visualizer_model_hypergraph {
let config: BenchmarkDebugPrintDecoderConfig =
serde_json::from_value(self.decoder_config.clone()).map_err(|x| x.to_string())?;
let mut model_hypergraph = ModelHypergraph::new(simulator);
model_hypergraph.build(
simulator,
noise_model_graph.clone(),
&config.weight_function,
configs.parallel_init,
config.use_combined_probability,
self.use_brief_edge,
);
new_visualizer.add_component(&model_hypergraph).map_err(|x| x.to_string())?;
}
if self.visualizer_tailored_model_graph {
let config: BenchmarkDebugPrintDecoderConfig =
serde_json::from_value(self.decoder_config.clone()).map_err(|x| x.to_string())?;
let mut tailored_model_graph = TailoredModelGraph::new(simulator);
tailored_model_graph.build(
simulator,
noise_model_graph.as_ref(),
&config.weight_function,
config.use_combined_probability,
config.use_unfixed_stabilizer_edges,
);
new_visualizer
.add_component(&tailored_model_graph)
.map_err(|x| x.to_string())?;
}
new_visualizer.end_component().map_err(|x| x.to_string())?; visualizer = Some(Arc::new(Mutex::new(new_visualizer)));
}
Ok(visualizer)
}
pub fn run_single(
&self,
configs: &SimulationConfigs,
config: &SingleSimulationConfig,
log_runtime_statistics_file: &Option<Arc<Mutex<File>>>,
) -> Result<String, String> {
let mut simulator = Simulator::new(self.code_type, CodeSize::new(config.noisy_measurements, config.di, config.dj));
let noise_model_graph = self.construct_noise_model(&mut simulator, configs, config, true)?;
if let Some(terminate_message) = self.execute_debug_print(configs, &mut simulator, &noise_model_graph)? {
return Ok(terminate_message); }
let general_decoder = GeneralDecoder::from_parameters(self, configs, config, &simulator, &noise_model_graph)?;
cfg_if::cfg_if! { if #[cfg(feature="fusion_blossom")] {
let mut fusion_blossom_syndrome_exporter = None;
if matches!(self.debug_print, Some(BenchmarkDebugPrint::FusionBlossomSyndromeFile)) {
if let GeneralDecoder::Fusion(fusion_decoder) = &general_decoder {
fusion_blossom_syndrome_exporter = Some(FusionBlossomSyndromeExporter::new(fusion_decoder, self.fusion_blossom_syndrome_export_filename.clone()));
} else {
return Err("need `fusion` decoder to export".to_string())
}
}
let fusion_blossom_syndrome_exporter = Arc::new(fusion_blossom_syndrome_exporter);
} }
let noise_model = self.construct_noise_model(&mut simulator, configs, config, false)?;
let visualizer = self.prepare_visualizer(&mut simulator, &noise_model, &noise_model_graph, configs)?;
let benchmark_control = Arc::new(Mutex::new(BenchmarkControl::new()));
let mut pb = ProgressBar::on(std::io::stderr(), configs.max_repeats as u64);
pb.set(0);
let mut handlers = Vec::new();
let mut threads_debugger: Vec<Arc<Mutex<BenchmarkThreadDebugger>>> = Vec::new();
let mut threads_ended = Vec::new(); let general_simulator: GeneralSimulator = if self.use_compact_simulator {
let first = SimulatorCompact::from_simulator(simulator, noise_model.clone(), configs.parallel_init);
if let Some(simulator_compact_extender_noisy_measurements) = self.simulator_compact_extender_noisy_measurements {
self.assert_single_configuration(configs)?;
if simulator_compact_extender_noisy_measurements < config.noisy_measurements {
return Err(format!("extender only works for larger noisy_measurement than nms[0], now {simulator_compact_extender_noisy_measurements} < {}", config.noisy_measurements));
} else {
let mut second_simulator = Simulator::new(
self.code_type,
CodeSize::new(config.noisy_measurements + 1, config.di, config.dj),
);
let second_noise_model = self.construct_noise_model(&mut second_simulator, configs, config, false)?;
let second =
SimulatorCompact::from_simulator(second_simulator, second_noise_model, configs.parallel_init);
let extender = SimulatorCompactExtender::new(first, second, config.noisy_measurements);
if self.use_compact_simulator_compressed {
GeneralSimulator::SimulatorCompactCompressed(SimulatorCompactCompressed::new(
extender,
simulator_compact_extender_noisy_measurements,
))
} else {
let generated = extender.generate(simulator_compact_extender_noisy_measurements);
GeneralSimulator::SimulatorCompact(generated)
}
}
} else {
GeneralSimulator::SimulatorCompact(first)
}
} else if let Some(error_pattern) = self.error_pattern.as_ref() {
let sparse_error_pattern: SparseErrorPattern = serde_json::from_value(error_pattern.clone()).unwrap();
let simulator_vec = SimulatorVec::from_simulator(simulator, vec![sparse_error_pattern]);
GeneralSimulator::SimulatorVec(simulator_vec)
} else {
GeneralSimulator::Simulator(simulator)
};
for parallel_idx in 0..configs.parallel {
let thread_debugger = Arc::new(Mutex::new(BenchmarkThreadDebugger::new()));
threads_debugger.push(thread_debugger.clone());
let thread_ended = Arc::new(AtomicBool::new(false));
threads_ended.push(Arc::clone(&thread_ended));
let mut thread_general_simulator = general_simulator.clone();
if let Some(deterministic_seed) = configs.deterministic_seed {
let seed: u64 = deterministic_seed + parallel_idx as u64;
thread_general_simulator.set_rng(Xoroshiro128StarStar::seed_from_u64(seed));
}
let mut worker_state = SimulationWorker {
benchmark_control: benchmark_control.clone(),
general_simulator: thread_general_simulator,
noise_model: noise_model.clone(),
log_runtime_statistics_file: log_runtime_statistics_file.clone(),
visualizer: visualizer.clone(),
general_decoder: general_decoder.clone(),
#[cfg(feature = "fusion_blossom")]
fusion_blossom_syndrome_exporter: fusion_blossom_syndrome_exporter.clone(),
thread_debugger,
thread_ended,
parameters: self.clone(),
};
handlers.push(
std::thread::Builder::new()
.stack_size(128 * 1024 * 1024)
.spawn(move || {
worker_state.run();
})
.unwrap(),
);
}
let repeat_begin = Instant::now();
let progress_information = || -> String {
let benchmark_control = benchmark_control.lock().unwrap().clone();
let total_repeats = benchmark_control.total_repeats;
let qec_failed = benchmark_control.qec_failed;
let error_rate = qec_failed as f64 / total_repeats as f64;
let confidence_interval_95_percent =
1.96 * (error_rate * (1. - error_rate) / (total_repeats as f64)).sqrt() / error_rate;
format!(
"{} {} {} {} {} {} {} {:.1e} {} ",
config.p,
config.di,
config.noisy_measurements,
total_repeats,
qec_failed,
error_rate,
config.dj,
confidence_interval_95_percent,
config.pe
)
};
loop {
let time_elapsed = repeat_begin.elapsed().as_secs_f64();
if let Some(time_budget) = self.time_budget {
if time_elapsed > time_budget {
benchmark_control.lock().unwrap().set_external_terminate();
}
}
pb.message(progress_information().as_str());
{
let benchmark_control = benchmark_control.lock().unwrap().clone();
let total_repeats = benchmark_control.total_repeats;
let qec_failed = benchmark_control.qec_failed;
let ratio_total_rounds = (total_repeats as f64) / (configs.max_repeats as f64);
let ratio_qec_failed = (qec_failed as f64) / (configs.min_failed_cases as f64);
let (mut pb_total, mut set_progress) = if ratio_total_rounds >= ratio_qec_failed {
let progress = total_repeats as u64;
(
if configs.max_repeats as u64 > progress {
configs.max_repeats as u64
} else {
progress
},
progress,
)
} else {
let progress = qec_failed as u64;
(
if configs.min_failed_cases as u64 > progress {
configs.min_failed_cases as u64
} else {
progress
},
progress,
)
};
if let Some(time_budget) = self.time_budget {
let ratio_time = time_elapsed / time_budget;
if ratio_time >= ratio_total_rounds && ratio_time >= ratio_qec_failed {
let progress = total_repeats as u64;
pb_total = ((progress as f64) / ratio_time) as u64;
set_progress = progress;
}
}
pb.total = pb_total;
pb.set(set_progress);
}
if let Some(log_runtime_statistics_file) = &log_runtime_statistics_file {
let log_runtime_statistics_file = log_runtime_statistics_file.lock().unwrap();
log_runtime_statistics_file.sync_data().unwrap();
}
if benchmark_control
.lock()
.unwrap()
.should_terminate(configs.max_repeats, configs.min_failed_cases)
{
break;
}
std::thread::sleep(std::time::Duration::from_millis(250));
}
let begin = Instant::now();
std::thread::sleep(std::time::Duration::from_millis(500));
loop {
let time_elapsed = begin.elapsed().as_secs_f64();
if self.thread_timeout >= 0. && time_elapsed >= self.thread_timeout {
eprintln!("[error] some threads don't terminate properly within timeout, here are the details:");
for parallel_idx in (0..configs.parallel).rev() {
let thread_ended = threads_ended.swap_remove(parallel_idx);
let handler = handlers.swap_remove(parallel_idx);
let thread_debugger = threads_debugger.swap_remove(parallel_idx);
if !thread_ended.load(Ordering::SeqCst) {
eprintln!("[error] thread {} doesn't terminate within timeout", parallel_idx);
eprintln!("{}", json!(thread_debugger.lock().unwrap().clone()));
} else {
eprintln!("[info] thread {} normally exit", parallel_idx);
handler.join().unwrap();
}
}
break;
}
let mut all_threads_ended = true;
for thread_ended in threads_ended.iter() {
if !thread_ended.load(Ordering::SeqCst) {
all_threads_ended = false;
}
}
if all_threads_ended {
for handler in handlers.drain(..) {
handler.join().unwrap();
}
break;
}
eprintln!("[info] waiting for all threads to end, time elapsed: {:.3}s", time_elapsed);
std::thread::sleep(std::time::Duration::from_millis(1000));
}
pb.finish();
eprintln!("{}", progress_information());
Ok(progress_information())
}
}
#[derive(Clone)]
pub enum GeneralDecoder {
None,
MWPM(MWPMDecoder),
#[cfg(feature = "fusion_blossom")]
Fusion(FusionDecoder),
#[cfg(feature = "fusion_blossom")]
ParallelFusion(ParallelFusionDecoder),
TailoredMWPM(TailoredMWPMDecoder),
UnionFind(UnionFindDecoder),
#[cfg(feature = "hyperion")]
HyperUnionFind(HyperUnionFindDecoder),
#[cfg(feature = "hyperion")]
Hyperion(HyperionDecoder),
}
impl GeneralDecoder {
pub fn from_parameters(
parameters: &BenchmarkParameters,
configs: &SimulationConfigs,
config: &SingleSimulationConfig,
simulator: &Simulator,
noise_model_graph: &Arc<NoiseModel>,
) -> Result<Self, String> {
Ok(match parameters.decoder {
BenchmarkDecoder::None => {
GeneralDecoder::None
}
BenchmarkDecoder::MWPM => GeneralDecoder::MWPM(MWPMDecoder::new(
simulator,
noise_model_graph.clone(),
¶meters.decoder_config,
configs.parallel_init,
parameters.use_brief_edge,
)),
#[cfg(feature = "fusion_blossom")]
BenchmarkDecoder::Fusion => {
let first = FusionDecoder::new(
simulator,
noise_model_graph.clone(),
¶meters.decoder_config,
configs.parallel_init,
parameters.use_brief_edge,
);
if let Some(simulator_compact_extender_noisy_measurements) =
parameters.simulator_compact_extender_noisy_measurements
{
parameters.assert_single_configuration(configs)?;
if simulator_compact_extender_noisy_measurements < config.noisy_measurements {
return Err(format!("extender only works for larger noisy_measurement than nms[0], now {simulator_compact_extender_noisy_measurements} < {}", config.noisy_measurements));
} else {
let mut second_simulator = Simulator::new(
parameters.code_type,
CodeSize::new(config.noisy_measurements + 1, config.di, config.dj),
);
let mut second_config = config.clone();
second_config.noisy_measurements += 1;
let second_noise_model_graph =
parameters.construct_noise_model(&mut second_simulator, configs, &second_config, true)?;
let second = FusionDecoder::new(
&second_simulator,
second_noise_model_graph.clone(),
¶meters.decoder_config,
configs.parallel_init,
parameters.use_brief_edge,
);
let skip_decoding = first.config.skip_decoding;
let extender = FusionBlossomAdaptorExtender::new(
Arc::try_unwrap(first.adaptor).unwrap(),
Arc::try_unwrap(second.adaptor).unwrap(),
config.noisy_measurements,
);
let generated = extender.generate(simulator_compact_extender_noisy_measurements, skip_decoding);
let fusion_solver = if first.config.skip_decoding {
fusion_blossom::mwpm_solver::SolverSerial::new(&extender.base.initializer)
} else {
fusion_blossom::mwpm_solver::SolverSerial::new(&generated.initializer)
};
GeneralDecoder::Fusion(FusionDecoder {
adaptor: Arc::new(generated),
fusion_solver,
config: first.config,
})
}
} else {
GeneralDecoder::Fusion(first)
}
}
#[cfg(not(feature = "fusion_blossom"))]
BenchmarkDecoder::Fusion => {
return Err("decoder is not available; try enable feature `fusion_blossom`".to_string())
}
#[cfg(feature = "fusion_blossom")]
BenchmarkDecoder::ParallelFusion => {
GeneralDecoder::ParallelFusion(ParallelFusionDecoder::new(
simulator,
noise_model_graph.clone(),
¶meters.decoder_config,
configs.parallel_init,
parameters.use_brief_edge,
))
}
#[cfg(not(feature = "fusion_blossom"))]
BenchmarkDecoder::ParallelFusion => {
return Err("decoder is not available; try enable feature `fusion_blossom`".to_string())
}
BenchmarkDecoder::TailoredMWPM => GeneralDecoder::TailoredMWPM(TailoredMWPMDecoder::new(
simulator,
noise_model_graph.clone(),
¶meters.decoder_config,
configs.parallel_init,
parameters.use_brief_edge,
)),
BenchmarkDecoder::UnionFind => GeneralDecoder::UnionFind(UnionFindDecoder::new(
simulator,
noise_model_graph.clone(),
¶meters.decoder_config,
configs.parallel_init,
parameters.use_brief_edge,
)),
#[cfg(feature = "hyperion")]
BenchmarkDecoder::HyperUnionFind => GeneralDecoder::HyperUnionFind(HyperUnionFindDecoder::new(
simulator,
noise_model_graph.clone(),
¶meters.decoder_config,
configs.parallel_init,
parameters.use_brief_edge,
)),
#[cfg(not(feature = "hyperion"))]
BenchmarkDecoder::HyperUnionFind => {
return Err("decoder is not available; try enable feature `hyperion`".to_string())
}
#[cfg(feature = "hyperion")]
BenchmarkDecoder::Hyperion => GeneralDecoder::Hyperion(HyperionDecoder::new(
simulator,
noise_model_graph.clone(),
¶meters.decoder_config,
configs.parallel_init,
parameters.use_brief_edge,
)),
#[cfg(not(feature = "hyperion"))]
BenchmarkDecoder::Hyperion => return Err("decoder is not available; try enable feature `hyperion`".to_string()),
})
}
pub fn decode_with_erasure(
&mut self,
sparse_measurement: &SparseMeasurement,
sparse_detected_erasures: &SparseErasures,
) -> (SparseCorrection, serde_json::Value) {
match self {
Self::None => (SparseCorrection::new(), json!({})),
Self::MWPM(mwpm_decoder) => mwpm_decoder.decode_with_erasure(sparse_measurement, sparse_detected_erasures),
#[cfg(feature = "fusion_blossom")]
Self::Fusion(fusion_decoder) => fusion_decoder.decode_with_erasure(sparse_measurement, sparse_detected_erasures),
#[cfg(feature = "fusion_blossom")]
Self::ParallelFusion(fusion_decoder) => fusion_decoder.decode_with_erasure(sparse_measurement, sparse_detected_erasures),
Self::TailoredMWPM(tailored_mwpm_decoder) => {
assert!(
sparse_detected_erasures.is_empty(),
"tailored MWPM decoder doesn't support erasures"
);
tailored_mwpm_decoder.decode(sparse_measurement)
}
Self::UnionFind(union_find_decoder) => {
union_find_decoder.decode_with_erasure(sparse_measurement, sparse_detected_erasures)
}
#[cfg(feature = "hyperion")]
Self::HyperUnionFind(hyper_union_find_decoder) => {
hyper_union_find_decoder.decode_with_erasure(sparse_measurement, sparse_detected_erasures)
}
#[cfg(feature = "hyperion")]
Self::Hyperion(hyperion_decoder) => {
hyperion_decoder.decode_with_erasure(sparse_measurement, sparse_detected_erasures)
}
}
}
}
pub struct SimulationWorker {
pub benchmark_control: Arc<Mutex<BenchmarkControl>>,
pub general_simulator: GeneralSimulator,
pub noise_model: Arc<NoiseModel>,
pub log_runtime_statistics_file: Option<Arc<Mutex<File>>>,
pub visualizer: Option<Arc<Mutex<Visualizer>>>,
pub general_decoder: GeneralDecoder,
#[cfg(feature = "fusion_blossom")]
pub fusion_blossom_syndrome_exporter: Arc<Option<FusionBlossomSyndromeExporter>>,
pub thread_debugger: Arc<Mutex<BenchmarkThreadDebugger>>,
pub thread_ended: Arc<AtomicBool>,
pub parameters: BenchmarkParameters,
}
impl SimulationWorker {
pub fn run(&mut self) {
for thread_counter in 0..usize::MAX {
let parameters = &self.parameters;
if parameters.thread_timeout >= 0. {
self.thread_debugger.lock().unwrap().update_thread_counter(thread_counter);
}
let begin = Instant::now();
let (error_count, erasure_count) = self.general_simulator.generate_random_errors(&self.noise_model);
let sparse_detected_erasures = if erasure_count != 0 {
self.general_simulator.generate_sparse_detected_erasures()
} else {
SparseErasures::new()
};
if parameters.thread_timeout >= 0. {
let mut thread_debugger = self.thread_debugger.lock().unwrap();
thread_debugger.error_pattern = Some(self.general_simulator.generate_sparse_error_pattern());
thread_debugger.detected_erasures = Some(sparse_detected_erasures.clone());
} if matches!(parameters.debug_print, Some(BenchmarkDebugPrint::AllErrorPattern)) {
let sparse_error_pattern = self.general_simulator.generate_sparse_error_pattern();
eprint!(
"{}",
serde_json::to_string(&sparse_error_pattern).expect("serialize should success")
);
if !sparse_detected_erasures.is_empty() {
eprintln!(
", {}",
serde_json::to_string(&sparse_detected_erasures).expect("serialize should success")
);
} else {
eprintln!();
}
}
let sparse_measurement = if error_count != 0 {
self.general_simulator.generate_sparse_measurement()
} else {
SparseMeasurement::new()
};
if parameters.thread_timeout >= 0. {
self.thread_debugger.lock().unwrap().measurement = Some(sparse_measurement.clone());
} let simulate_elapsed = begin.elapsed().as_secs_f64();
cfg_if::cfg_if! { if #[cfg(feature="fusion_blossom")] {
if let Some(fusion_blossom_syndrome_exporter) = self.fusion_blossom_syndrome_exporter.as_ref() {
fusion_blossom_syndrome_exporter.add_syndrome(&sparse_measurement, &sparse_detected_erasures);
}
} }
let begin = Instant::now();
let (correction, mut runtime_statistics) = self
.general_decoder
.decode_with_erasure(&sparse_measurement, &sparse_detected_erasures);
if parameters.thread_timeout >= 0. {
self.thread_debugger.lock().unwrap().correction = Some(correction.clone());
} let decode_elapsed = begin.elapsed().as_secs_f64();
let begin = Instant::now();
let mut is_qec_failed = false;
let (logical_i, logical_j) = self.general_simulator.validate_correction(&correction);
if logical_i && !parameters.ignore_logical_i {
is_qec_failed = true;
}
if logical_j && !parameters.ignore_logical_j {
is_qec_failed = true;
}
let validate_elapsed = begin.elapsed().as_secs_f64();
if is_qec_failed && matches!(parameters.debug_print, Some(BenchmarkDebugPrint::FailedErrorPattern)) {
let sparse_error_pattern = self.general_simulator.generate_sparse_error_pattern();
eprint!(
"{}",
serde_json::to_string(&sparse_error_pattern).expect("serialize should success")
);
if !sparse_detected_erasures.is_empty() {
eprintln!(
", {}",
serde_json::to_string(&sparse_detected_erasures).expect("serialize should success")
);
} else {
eprintln!();
}
}
if let Some(log_runtime_statistics_file) = &self.log_runtime_statistics_file {
runtime_statistics["qec_failed"] = json!(is_qec_failed);
if parameters.log_error_pattern_when_logical_error && is_qec_failed {
runtime_statistics["error_pattern"] = json!(self.general_simulator.generate_sparse_error_pattern());
}
runtime_statistics["elapsed"] = json!({
"simulate": simulate_elapsed,
"decode": decode_elapsed,
"validate": validate_elapsed,
});
let to_be_written = format!("{}\n", runtime_statistics);
let mut log_runtime_statistics_file = log_runtime_statistics_file.lock().unwrap();
log_runtime_statistics_file.write_all(to_be_written.as_bytes()).unwrap();
}
if let Some(visualizer) = &self.visualizer {
if !parameters.visualizer_skip_success_cases || is_qec_failed {
let case = json!({
"error_pattern": self.general_simulator.generate_sparse_error_pattern(),
"measurement": sparse_measurement,
"detected_erasures": sparse_detected_erasures,
"correction": correction,
"qec_failed": is_qec_failed,
"elapsed": {
"simulate": simulate_elapsed,
"decode": decode_elapsed,
"validate": validate_elapsed,
},
"runtime_statistics": runtime_statistics,
});
let mut visualizer = visualizer.lock().unwrap();
visualizer.add_case(case).unwrap();
}
}
if self.benchmark_control.lock().unwrap().update_data_should_terminate(
is_qec_failed,
parameters.max_repeats,
parameters.min_failed_cases,
) {
break;
}
}
self.thread_ended.store(true, Ordering::SeqCst);
}
}