use super::either::Either;
use super::float_cmp;
use super::noise_model::*;
use super::simulator::*;
use super::types::*;
use super::util_macros::*;
use super::visualize::*;
#[cfg(feature = "python_binding")]
use pyo3::prelude::*;
use serde::{Deserialize, Serialize};
use std::collections::BTreeMap;
use std::sync::{Arc, Mutex};
#[derive(Debug, Clone, Serialize)]
#[cfg_attr(feature = "python_binding", pyclass)]
pub struct ModelGraph {
pub nodes: Vec<Vec<Vec<Option<Box<ModelGraphNode>>>>>,
}
impl QecpVisualizer for ModelGraph {
fn component_info(&self, abbrev: bool) -> (String, serde_json::Value) {
let name = "model_graph";
let info = json!({
"nodes": (0..self.nodes.len()).map(|t| {
(0..self.nodes[t].len()).map(|i| {
(0..self.nodes[t][i].len()).map(|j| {
let position = &pos!(t, i, j);
if self.is_node_exist(position) {
let node = self.get_node_unwrap(position);
let mut edges = serde_json::Map::with_capacity(node.edges.len());
for (peer_position, edge) in node.edges.iter() {
edges.insert(peer_position.to_string(), edge.component_edge_info(abbrev));
}
let mut all_edges = serde_json::Map::with_capacity(node.all_edges.len());
for (peer_position, all_edge) in node.all_edges.iter() {
let (edges, _) = all_edge;
let components: Vec<_> = edges.iter().map(|edge| edge.component_edge_info(abbrev)).collect();
all_edges.insert(peer_position.to_string(), json!(components));
}
Some(json!({
if abbrev { "p" } else { "position" }: position, "all_edges": all_edges,
"edges": edges,
"all_boundaries": node.all_boundaries.iter().map(|boundary| boundary.component_edge_info(abbrev)).collect::<Vec<_>>(),
"boundary": node.boundary.as_ref().map(|boundary| boundary.component_edge_info(abbrev)),
}))
} else {
None
}
}).collect::<Vec<Option<serde_json::Value>>>()
}).collect::<Vec<Vec<Option<serde_json::Value>>>>()
}).collect::<Vec<Vec<Vec<Option<serde_json::Value>>>>>(),
});
(name.to_string(), info)
}
}
#[derive(Debug, Clone, Serialize)]
pub struct ModelGraphNode {
pub all_edges: BTreeMap<Position, (Vec<ModelGraphEdge>, Vec<BriefModelGraphEdge>)>,
pub edges: BTreeMap<Position, ModelGraphEdge>,
pub all_boundaries: Vec<ModelGraphBoundary>,
pub boundary: Option<Box<ModelGraphBoundary>>,
}
#[derive(Debug, Clone, Serialize)]
pub struct BriefModelGraphEdge {
pub probability: f64,
pub weight: f64,
}
#[derive(Debug, Clone, Serialize)]
pub struct ModelGraphEdge {
pub probability: f64,
pub weight: f64,
pub error_pattern: Arc<SparseErrorPattern>,
pub correction: Arc<SparseCorrection>,
}
impl ModelGraphEdge {
fn component_edge_info(&self, abbrev: bool) -> serde_json::Value {
json!({
if abbrev { "p" } else { "probability" }: self.probability,
if abbrev { "w" } else { "weight" }: self.weight,
if abbrev { "e" } else { "error_pattern" }: self.error_pattern,
if abbrev { "c" } else { "correction" }: self.correction,
})
}
}
#[derive(Debug, Clone, Serialize)]
pub struct ModelGraphBoundary {
pub probability: f64,
pub weight: f64,
pub error_pattern: Arc<SparseErrorPattern>,
pub correction: Arc<SparseCorrection>,
pub virtual_node: Option<Position>,
}
impl ModelGraphBoundary {
fn component_edge_info(&self, abbrev: bool) -> serde_json::Value {
json!({
if abbrev { "p" } else { "probability" }: self.probability,
if abbrev { "w" } else { "weight" }: self.weight,
if abbrev { "e" } else { "error_pattern" }: self.error_pattern,
if abbrev { "c" } else { "correction" }: self.correction,
if abbrev { "v" } else { "virtual_node" }: self.virtual_node,
})
}
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub enum WeightFunction {
Autotune,
AutotuneImproved,
Unweighted,
}
pub mod weight_function {
pub fn autotune(p: f64) -> f64 {
if p > 0. {
-p.ln()
} else {
f64::from(f32::MAX)
} }
pub fn autotune_improved(p: f64) -> f64 {
if p > 0. {
(1. - p).ln() - p.ln()
} else {
f64::from(f32::MAX)
} }
pub fn unweighted(p: f64) -> f64 {
if p > 0. {
1.
} else {
f64::from(f32::MAX)
} }
}
impl ModelGraph {
pub fn new(simulator: &Simulator) -> Self {
assert!(simulator.volume() > 0, "cannot build model graph out of zero-sized simulator");
Self {
nodes: (0..simulator.height)
.map(|t| {
(0..simulator.vertical)
.map(|i| {
(0..simulator.horizontal)
.map(|j| {
let position = &pos!(t, i, j);
if t != 0 && t % simulator.measurement_cycles == 0 && simulator.is_node_real(position) {
let node = simulator.get_node_unwrap(position);
if node.gate_type.is_measurement() {
return Some(Box::new(ModelGraphNode {
all_edges: BTreeMap::new(),
edges: BTreeMap::new(),
all_boundaries: Vec::with_capacity(0), boundary: None,
}));
}
}
None
})
.collect()
})
.collect()
})
.collect(),
}
}
pub fn get_node(&'_ self, position: &Position) -> &'_ Option<Box<ModelGraphNode>> {
&self.nodes[position.t][position.i][position.j]
}
pub fn is_node_exist(&self, position: &Position) -> bool {
self.get_node(position).is_some()
}
pub fn get_node_unwrap(&'_ self, position: &Position) -> &'_ ModelGraphNode {
self.get_node(position).as_ref().unwrap()
}
pub fn get_node_mut_unwrap(&'_ mut self, position: &Position) -> &'_ mut ModelGraphNode {
self.nodes[position.t][position.i][position.j].as_mut().unwrap()
}
pub fn build(
&mut self,
simulator: &mut Simulator,
noise_model: Arc<NoiseModel>,
weight_function: &WeightFunction,
parallel: usize,
use_combined_probability: bool,
use_brief_edge: bool,
) {
match weight_function {
WeightFunction::Autotune => self.build_with_weight_function(
simulator,
noise_model,
weight_function::autotune,
parallel,
use_combined_probability,
use_brief_edge,
),
WeightFunction::AutotuneImproved => self.build_with_weight_function(
simulator,
noise_model,
weight_function::autotune_improved,
parallel,
use_combined_probability,
use_brief_edge,
),
WeightFunction::Unweighted => self.build_with_weight_function(
simulator,
noise_model,
weight_function::unweighted,
parallel,
use_combined_probability,
use_brief_edge,
),
}
}
fn build_with_weight_function_region<F>(
&mut self,
simulator: &mut Simulator,
noise_model: Arc<NoiseModel>,
weight_of: F,
t_start: usize,
t_end: usize,
use_brief_edge: bool,
) where
F: Fn(f64) -> f64 + Copy,
{
let mut all_possible_errors: Vec<Either<ErrorType, CorrelatedPauliErrorType>> = Vec::new();
for error_type in ErrorType::all_possible_errors().drain(..) {
all_possible_errors.push(Either::Left(error_type));
}
for correlated_error_type in CorrelatedPauliErrorType::all_possible_errors().drain(..) {
all_possible_errors.push(Either::Right(correlated_error_type));
}
simulator.clear_all_errors();
simulator_iter!(simulator, position, {
if position.t < t_start || position.t >= t_end {
continue;
}
let noise_model_node = noise_model.get_node_unwrap(position);
let possible_erasure_error =
noise_model_node.erasure_error_rate > 0. || noise_model_node.correlated_erasure_error_rates.is_some() || {
let node = simulator.get_node_unwrap(position);
if let Some(gate_peer) = node.gate_peer.as_ref() {
let peer_noise_model_node = noise_model.get_node_unwrap(gate_peer);
if let Some(correlated_erasure_error_rates) = &peer_noise_model_node.correlated_erasure_error_rates {
correlated_erasure_error_rates.error_probability() > 0.
} else {
false
}
} else {
false
}
};
for error in all_possible_errors.iter() {
let p = match error {
Either::Left(error_type) => noise_model_node.pauli_error_rates.error_rate(error_type),
Either::Right(error_type) => match &noise_model_node.correlated_pauli_error_rates {
Some(correlated_pauli_error_rates) => correlated_pauli_error_rates.error_rate(error_type),
None => 0.,
},
}; let is_erasure = possible_erasure_error && error.is_left();
if p > 0. || is_erasure {
let mut sparse_errors = SparseErrorPattern::new();
match error {
Either::Left(error_type) => {
sparse_errors.add(position.clone(), *error_type);
}
Either::Right(error_type) => {
sparse_errors.add(position.clone(), error_type.my_error());
let node = simulator.get_node_unwrap(position);
let gate_peer = node
.gate_peer
.as_ref()
.expect("correlated error must corresponds to a two-qubit gate");
sparse_errors.add((**gate_peer).clone(), error_type.peer_error());
}
}
let sparse_errors = Arc::new(sparse_errors); let (sparse_correction, sparse_measurement_real, sparse_measurement_virtual) =
simulator.fast_measurement_given_few_errors(&sparse_errors);
let sparse_correction = Arc::new(sparse_correction); let sparse_measurement_real = sparse_measurement_real.to_vec();
let sparse_measurement_virtual = sparse_measurement_virtual.to_vec();
if sparse_measurement_real.is_empty() {
continue;
}
if sparse_measurement_real.len() == 1 {
let position = &sparse_measurement_real[0];
if p > 0. || is_erasure {
let model_graph_node = self.get_node_mut_unwrap(position);
model_graph_node.all_boundaries.push(ModelGraphBoundary {
probability: p,
weight: weight_of(p),
error_pattern: sparse_errors.clone(),
correction: sparse_correction.clone(),
virtual_node: if sparse_measurement_virtual.len() == 1 {
Some(sparse_measurement_virtual[0].clone())
} else {
None
},
});
}
}
if sparse_measurement_real.len() == 2 {
let position1 = &sparse_measurement_real[0];
let position2 = &sparse_measurement_real[1];
let node1 = simulator.get_node_unwrap(position1);
let node2 = simulator.get_node_unwrap(position2);
let is_same_type = if cfg!(feature = "include_different_type_edges") {
true
} else {
node1.qubit_type == node2.qubit_type
};
if is_same_type && (p > 0. || is_erasure) {
self.add_edge_between(
(position1, position2),
p,
weight_of(p),
sparse_errors.clone(),
sparse_correction.clone(),
use_brief_edge,
);
}
}
}
}
});
}
pub fn build_with_weight_function<F>(
&mut self,
simulator: &mut Simulator,
noise_model: Arc<NoiseModel>,
weight_of: F,
parallel: usize,
use_combined_probability: bool,
use_brief_edge: bool,
) where
F: Fn(f64) -> f64 + Copy + Send + Sync + 'static,
{
debug_assert!({
let mut state_clean = true;
simulator_iter!(simulator, position, node, {
if position.t != 0 && node.gate_type.is_measurement() && simulator.is_node_real(position) {
let model_graph_node = self.get_node_unwrap(position);
if !model_graph_node.all_edges.is_empty() || !model_graph_node.edges.is_empty() {
state_clean = false;
}
}
});
if !state_clean {
println!("[warning] state must be clean before calling `build`, please make sure you don't call this function twice");
}
state_clean
});
if parallel <= 1 {
self.build_with_weight_function_region(simulator, noise_model, weight_of, 0, simulator.height, use_brief_edge);
} else {
let mut handlers = Vec::new();
let mut instances = Vec::new();
let interval = simulator.height / parallel;
for parallel_idx in 0..parallel {
let instance = Arc::new(Mutex::new(self.clone()));
let mut simulator = simulator.clone();
instances.push(Arc::clone(&instance));
let t_start = parallel_idx * interval; let mut t_end = (parallel_idx + 1) * interval; if parallel_idx == parallel - 1 {
t_end = simulator.height; }
let noise_model = Arc::clone(&noise_model);
handlers.push(std::thread::spawn(move || {
let mut instance = instance.lock().unwrap();
instance.build_with_weight_function_region(
&mut simulator,
noise_model,
weight_of,
t_start,
t_end,
use_brief_edge,
);
}));
}
for handler in handlers.drain(..) {
handler.join().unwrap();
}
for instance in instances.iter() {
let mut instance = instance.lock().unwrap();
simulator_iter!(simulator, position, delta_t => simulator.measurement_cycles, if instance.is_node_exist(position) {
let instance_model_graph_node = instance.get_node_mut_unwrap(position);
let model_graph_node = self.get_node_mut_unwrap(position);
for boundary in instance_model_graph_node.all_boundaries.drain(..) {
model_graph_node.all_boundaries.push(boundary);
}
let mut all_edges = BTreeMap::new();
std::mem::swap(&mut all_edges, &mut instance_model_graph_node.all_edges);
for (target, (mut edges, mut brief_edges)) in all_edges.into_iter() {
if !model_graph_node.all_edges.contains_key(&target) {
model_graph_node.all_edges.insert(target.clone(), (Vec::new(), Vec::new()));
}
let (node_edges, node_brief_edges) = model_graph_node.all_edges.get_mut(&target).unwrap();
for edge in edges.drain(..) {
node_edges.push(edge);
}
for brief_edge in brief_edges.drain(..) {
node_brief_edges.push(brief_edge);
}
}
});
}
}
self.elect_edges(simulator, use_combined_probability, weight_of); }
pub fn add_edge(
&mut self,
positions: (&Position, &Position),
probability: f64,
weight: f64,
error_pattern: Arc<SparseErrorPattern>,
correction: Arc<SparseCorrection>,
use_brief_edge: bool,
) {
let (source, target) = positions;
let node = self.get_node_mut_unwrap(source);
if !node.all_edges.contains_key(target) {
node.all_edges.insert(target.clone(), (Vec::new(), Vec::new()));
}
let (node_edges, node_brief_edges) = node.all_edges.get_mut(target).unwrap();
if use_brief_edge {
if node_edges.is_empty() {
node_edges.push(ModelGraphEdge {
probability,
weight,
error_pattern,
correction,
});
} else if probability > node_edges[0].probability {
node_brief_edges.push(BriefModelGraphEdge {
probability: node_edges[0].probability,
weight: node_edges[0].weight,
});
node_edges.push(ModelGraphEdge {
probability,
weight,
error_pattern,
correction,
});
} else {
node_brief_edges.push(BriefModelGraphEdge { probability, weight });
}
} else {
node_edges.push(ModelGraphEdge {
probability,
weight,
error_pattern,
correction,
});
}
}
pub fn add_edge_between(
&mut self,
positions: (&Position, &Position),
probability: f64,
weight: f64,
error_pattern: Arc<SparseErrorPattern>,
correction: Arc<SparseCorrection>,
use_brief_edge: bool,
) {
self.add_edge(
positions,
probability,
weight,
error_pattern.clone(),
correction.clone(),
use_brief_edge,
);
self.add_edge(
(positions.1, positions.0),
probability,
weight,
error_pattern.clone(),
correction.clone(),
use_brief_edge,
);
}
pub fn build_correction_matching(&self, source: &Position, target: &Position) -> &SparseCorrection {
let node = self.get_node_unwrap(source);
let edge = node.edges.get(target);
&edge.as_ref().unwrap().correction
}
pub fn build_correction_boundary(&self, source: &Position) -> &SparseCorrection {
let node = self.get_node_unwrap(source);
&node.boundary.as_ref().unwrap().correction
}
pub fn elect_edges<F>(&mut self, simulator: &Simulator, use_combined_probability: bool, weight_of: F)
where
F: Fn(f64) -> f64 + Copy,
{
simulator_iter!(simulator, position, delta_t => simulator.measurement_cycles, if self.is_node_exist(position) {
let model_graph_node = self.get_node_mut_unwrap(position);
for (target, (edges, brief_edges)) in model_graph_node.all_edges.iter() {
let mut elected_idx = 0;
let mut elected_probability = edges[0].probability;
for i in 1..edges.len() {
let edge = &edges[i];
if use_combined_probability {
elected_probability = elected_probability * (1. - edge.probability) + edge.probability * (1. - elected_probability); } else {
elected_probability = elected_probability.max(edge.probability);
}
let best_edge = &edges[elected_idx];
if edge.probability > best_edge.probability {
elected_idx = i; }
}
for brief_edge in brief_edges.iter() {
if use_combined_probability {
elected_probability = elected_probability * (1. - brief_edge.probability) + brief_edge.probability * (1. - elected_probability); }
}
let elected = ModelGraphEdge {
probability: elected_probability,
weight: weight_of(elected_probability),
error_pattern: edges[elected_idx].error_pattern.clone(),
correction: edges[elected_idx].correction.clone(),
};
model_graph_node.edges.insert(target.clone(), elected);
}
if !model_graph_node.all_boundaries.is_empty() {
let mut elected_idx = 0;
let mut elected_probability = model_graph_node.all_boundaries[0].probability;
for i in 1..model_graph_node.all_boundaries.len() {
let edge = &model_graph_node.all_boundaries[i];
if use_combined_probability {
elected_probability = elected_probability * (1. - edge.probability) + edge.probability * (1. - elected_probability); } else {
elected_probability = elected_probability.max(edge.probability);
}
let best_edge = &model_graph_node.all_boundaries[elected_idx];
if edge.probability > best_edge.probability {
elected_idx = i; }
}
let elected = ModelGraphBoundary {
probability: elected_probability,
weight: weight_of(elected_probability),
error_pattern: model_graph_node.all_boundaries[elected_idx].error_pattern.clone(),
correction: model_graph_node.all_boundaries[elected_idx].correction.clone(),
virtual_node: model_graph_node.all_boundaries[elected_idx].virtual_node.clone(),
};
model_graph_node.boundary = Some(Box::new(elected));
} else {
model_graph_node.boundary = None;
}
});
debug_assert!({
let mut sanity_check_passed = true;
for t in (simulator.measurement_cycles..simulator.height).step_by(simulator.measurement_cycles) {
simulator_iter_real!(simulator, position, node, t => t, if node.gate_type.is_measurement() {
let model_graph_node = self.get_node_unwrap(position);
for (target, edge) in model_graph_node.edges.iter() {
let target_model_graph_node = self.get_node_unwrap(target);
let reverse_edge = target_model_graph_node.edges.get(position).expect("edge should be symmetric");
if !float_cmp::approx_eq!(f64, edge.probability, reverse_edge.probability, ulps = 5) {
println!("[warning] the edge between {} and {} has unequal probability {} and {}"
, position, target, edge.probability, reverse_edge.probability);
sanity_check_passed = false;
}
}
});
}
sanity_check_passed
});
}
pub fn to_json(&self, simulator: &Simulator) -> serde_json::Value {
json!({
"code_type": simulator.code_type,
"height": simulator.height,
"vertical": simulator.vertical,
"horizontal": simulator.horizontal,
"nodes": (0..simulator.height).map(|t| {
(0..simulator.vertical).map(|i| {
(0..simulator.horizontal).map(|j| {
let position = &pos!(t, i, j);
if self.is_node_exist(position) {
let node = self.get_node_unwrap(position);
Some(json!({
"position": position,
"all_edges": node.all_edges,
"edges": node.edges,
"all_boundaries": node.all_boundaries,
"boundary": node.boundary,
}))
} else {
None
}
}).collect::<Vec<Option<serde_json::Value>>>()
}).collect::<Vec<Vec<Option<serde_json::Value>>>>()
}).collect::<Vec<Vec<Vec<Option<serde_json::Value>>>>>()
})
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn model_graph_basics() {
println!(
"std::mem::size_of::<ModelGraphNode>() = {}",
std::mem::size_of::<ModelGraphNode>()
);
println!(
"std::mem::size_of::<ModelGraphEdge>() = {}",
std::mem::size_of::<ModelGraphEdge>()
);
println!(
"std::mem::size_of::<ModelGraphBoundary>() = {}",
std::mem::size_of::<ModelGraphBoundary>()
);
println!(
"std::mem::size_of::<BriefModelGraphEdge>() = {}",
std::mem::size_of::<BriefModelGraphEdge>()
);
if std::mem::size_of::<ModelGraphNode>() > 80 {
panic!("ModelGraphNode which is unexpectedly large, check if anything wrong");
}
}
}