use std::collections::{HashMap, HashSet};
use std::time::{Duration, Instant};
use thiserror::Error;
use crate::ising::{IsingError, IsingModel, QuboModel};
use crate::partitioning::{Partition, SpectralPartitioner};
use crate::simulator::{AnnealingParams, AnnealingSolution, QuantumAnnealingSimulator};
#[derive(Error, Debug)]
pub enum DecompositionError {
#[error("Ising error: {0}")]
IsingError(#[from] IsingError),
#[error("Invalid parameter: {0}")]
InvalidParameter(String),
#[error("Decomposition failed: {0}")]
DecompositionFailed(String),
#[error("Solving failed: {0}")]
SolvingFailed(String),
#[error("Reconstruction failed: {0}")]
ReconstructionFailed(String),
}
pub type DecompositionResult<T> = Result<T, DecompositionError>;
#[derive(Debug, Clone)]
pub enum DecompositionStrategy {
Spectral {
num_partitions: usize,
overlap_size: usize,
},
Block {
block_size: usize,
overlap_size: usize,
},
Hierarchical {
min_partition_size: usize,
max_depth: usize,
overlap_size: usize,
},
Clustering {
num_clusters: usize,
strength_threshold: f64,
overlap_size: usize,
},
}
#[derive(Debug, Clone)]
pub struct DecompositionConfig {
pub strategy: DecompositionStrategy,
pub max_subproblem_size: usize,
pub min_subproblem_size: usize,
pub refinement_iterations: usize,
pub convergence_tolerance: f64,
pub solver_params: AnnealingParams,
pub parallel_solving: bool,
pub timeout: Option<Duration>,
}
impl Default for DecompositionConfig {
fn default() -> Self {
Self {
strategy: DecompositionStrategy::Spectral {
num_partitions: 4,
overlap_size: 2,
},
max_subproblem_size: 100,
min_subproblem_size: 10,
refinement_iterations: 5,
convergence_tolerance: 1e-6,
solver_params: AnnealingParams {
num_sweeps: 1000,
num_repetitions: 5,
..Default::default()
},
parallel_solving: true,
timeout: Some(Duration::from_secs(3600)),
}
}
}
#[derive(Debug, Clone)]
pub struct SubProblem {
pub variables: Vec<usize>,
pub variable_mapping: HashMap<usize, usize>,
pub qubo: QuboModel,
pub fixed_variables: HashMap<usize, bool>,
pub id: usize,
}
#[derive(Debug, Clone)]
pub struct SubSolution {
pub subproblem_id: usize,
pub values: HashMap<usize, bool>,
pub objective_value: f64,
pub solver_info: String,
}
#[derive(Debug, Clone)]
pub struct DecomposedSolution {
pub variable_values: HashMap<usize, bool>,
pub objective_value: f64,
pub sub_solutions: Vec<SubSolution>,
pub stats: DecompositionStats,
}
#[derive(Debug, Clone)]
pub struct DecompositionStats {
pub num_subproblems: usize,
pub avg_subproblem_size: f64,
pub max_subproblem_size: usize,
pub min_subproblem_size: usize,
pub decomposition_time: Duration,
pub solving_time: Duration,
pub refinement_iterations: usize,
pub converged: bool,
}
pub struct QuboDecomposer {
config: DecompositionConfig,
}
impl QuboDecomposer {
#[must_use]
pub const fn new(config: DecompositionConfig) -> Self {
Self { config }
}
pub fn solve(&self, qubo: &QuboModel) -> DecompositionResult<DecomposedSolution> {
let total_start = Instant::now();
let decompose_start = Instant::now();
let sub_problems = self.decompose_problem(qubo)?;
let decomposition_time = decompose_start.elapsed();
println!(
"Decomposed problem into {} sub-problems",
sub_problems.len()
);
let solve_start = Instant::now();
let mut sub_solutions = self.solve_subproblems(&sub_problems)?;
let mut solving_time = solve_start.elapsed();
let mut converged = false;
let mut refinement_iterations = 0;
for iteration in 0..self.config.refinement_iterations {
let refine_start = Instant::now();
let (new_solutions, improvement) =
self.refine_solutions(qubo, &sub_problems, &sub_solutions)?;
solving_time += refine_start.elapsed();
refinement_iterations += 1;
if improvement < self.config.convergence_tolerance {
converged = true;
break;
}
sub_solutions = new_solutions;
if let Some(timeout) = self.config.timeout {
if total_start.elapsed() > timeout {
break;
}
}
println!(
"Refinement iteration {}: improvement = {:.6}",
iteration + 1,
improvement
);
}
let (variable_values, objective_value) = self.reconstruct_solution(qubo, &sub_solutions)?;
let subproblem_sizes: Vec<usize> =
sub_problems.iter().map(|sp| sp.variables.len()).collect();
let stats = DecompositionStats {
num_subproblems: sub_problems.len(),
avg_subproblem_size: subproblem_sizes.iter().sum::<usize>() as f64
/ subproblem_sizes.len() as f64,
max_subproblem_size: *subproblem_sizes.iter().max().unwrap_or(&0),
min_subproblem_size: *subproblem_sizes.iter().min().unwrap_or(&0),
decomposition_time,
solving_time,
refinement_iterations,
converged,
};
Ok(DecomposedSolution {
variable_values,
objective_value,
sub_solutions,
stats,
})
}
fn decompose_problem(&self, qubo: &QuboModel) -> DecompositionResult<Vec<SubProblem>> {
match &self.config.strategy {
DecompositionStrategy::Spectral {
num_partitions,
overlap_size,
} => self.spectral_decomposition(qubo, *num_partitions, *overlap_size),
DecompositionStrategy::Block {
block_size,
overlap_size,
} => self.block_decomposition(qubo, *block_size, *overlap_size),
DecompositionStrategy::Hierarchical {
min_partition_size,
max_depth,
overlap_size,
} => self.hierarchical_decomposition(
qubo,
*min_partition_size,
*max_depth,
*overlap_size,
),
DecompositionStrategy::Clustering {
num_clusters,
strength_threshold,
overlap_size,
} => self.clustering_decomposition(
qubo,
*num_clusters,
*strength_threshold,
*overlap_size,
),
}
}
fn spectral_decomposition(
&self,
qubo: &QuboModel,
num_partitions: usize,
overlap_size: usize,
) -> DecompositionResult<Vec<SubProblem>> {
let (ising, _) = qubo.to_ising();
let partitioner = SpectralPartitioner::new();
let edges: Vec<(usize, usize, f64)> = ising
.couplings()
.into_iter()
.map(|coupling| (coupling.i, coupling.j, coupling.strength))
.collect();
let partition = partitioner
.partition_graph(ising.num_qubits, &edges, num_partitions)
.map_err(|e| DecompositionError::DecompositionFailed(e.to_string()))?;
let mut sub_problems = Vec::new();
for id in 0..num_partitions {
let nodes = partition.get_partition(id);
let variables = self.add_overlap(&nodes, &ising, overlap_size);
let sub_qubo = self.extract_subproblem(qubo, &variables)?;
let variable_mapping = variables
.iter()
.enumerate()
.map(|(i, &var)| (i, var))
.collect();
sub_problems.push(SubProblem {
variables: (0..variables.len()).collect(),
variable_mapping,
qubo: sub_qubo,
fixed_variables: HashMap::new(),
id,
});
}
Ok(sub_problems)
}
fn block_decomposition(
&self,
qubo: &QuboModel,
block_size: usize,
overlap_size: usize,
) -> DecompositionResult<Vec<SubProblem>> {
let mut sub_problems = Vec::new();
let mut id = 0;
for start in (0..qubo.num_variables).step_by(block_size - overlap_size) {
let end = std::cmp::min(start + block_size, qubo.num_variables);
if end - start < self.config.min_subproblem_size {
break;
}
let variables: Vec<usize> = (start..end).collect();
let sub_qubo = self.extract_subproblem(qubo, &variables)?;
let variable_mapping = variables
.iter()
.enumerate()
.map(|(i, &var)| (i, var))
.collect();
sub_problems.push(SubProblem {
variables: (0..variables.len()).collect(),
variable_mapping,
qubo: sub_qubo,
fixed_variables: HashMap::new(),
id,
});
id += 1;
}
Ok(sub_problems)
}
fn hierarchical_decomposition(
&self,
qubo: &QuboModel,
min_partition_size: usize,
max_depth: usize,
overlap_size: usize,
) -> DecompositionResult<Vec<SubProblem>> {
let all_variables: Vec<usize> = (0..qubo.num_variables).collect();
let mut sub_problems = Vec::new();
let mut id = 0;
self.recursive_bisection(
qubo,
all_variables,
0,
max_depth,
min_partition_size,
overlap_size,
&mut sub_problems,
&mut id,
)?;
Ok(sub_problems)
}
fn recursive_bisection(
&self,
qubo: &QuboModel,
variables: Vec<usize>,
depth: usize,
max_depth: usize,
min_size: usize,
overlap_size: usize,
sub_problems: &mut Vec<SubProblem>,
id: &mut usize,
) -> DecompositionResult<()> {
if variables.len() <= min_size || depth >= max_depth {
let sub_qubo = self.extract_subproblem(qubo, &variables)?;
let variable_mapping = variables
.iter()
.enumerate()
.map(|(i, &var)| (i, var))
.collect();
sub_problems.push(SubProblem {
variables: (0..variables.len()).collect(),
variable_mapping,
qubo: sub_qubo,
fixed_variables: HashMap::new(),
id: *id,
});
*id += 1;
return Ok(());
}
let mid = variables.len() / 2;
let left_vars = variables[..mid].to_vec();
let right_vars = variables[mid..].to_vec();
let left_with_overlap = self.add_overlap_to_vars(&left_vars, &variables, overlap_size);
let right_with_overlap = self.add_overlap_to_vars(&right_vars, &variables, overlap_size);
self.recursive_bisection(
qubo,
left_with_overlap,
depth + 1,
max_depth,
min_size,
overlap_size,
sub_problems,
id,
)?;
self.recursive_bisection(
qubo,
right_with_overlap,
depth + 1,
max_depth,
min_size,
overlap_size,
sub_problems,
id,
)?;
Ok(())
}
fn clustering_decomposition(
&self,
qubo: &QuboModel,
num_clusters: usize,
strength_threshold: f64,
overlap_size: usize,
) -> DecompositionResult<Vec<SubProblem>> {
let mut clusters = vec![0; qubo.num_variables];
let mut current_cluster = 0;
let mut visited = vec![false; qubo.num_variables];
for i in 0..qubo.num_variables {
if visited[i] {
continue;
}
let mut cluster_vars = vec![i];
let mut stack = vec![i];
visited[i] = true;
while let Some(var) = stack.pop() {
for j in 0..qubo.num_variables {
if i != j && !visited[j] {
let interaction = qubo.get_quadratic(var, j).unwrap_or(0.0).abs();
if interaction > strength_threshold {
visited[j] = true;
clusters[j] = current_cluster;
cluster_vars.push(j);
stack.push(j);
}
}
}
}
clusters[i] = current_cluster;
current_cluster += 1;
if current_cluster >= num_clusters {
break;
}
}
for i in 0..qubo.num_variables {
if !visited[i] {
clusters[i] = i % num_clusters;
}
}
let mut cluster_vars: HashMap<usize, Vec<usize>> = HashMap::new();
for (var, &cluster) in clusters.iter().enumerate() {
cluster_vars.entry(cluster).or_default().push(var);
}
let mut sub_problems = Vec::new();
for (id, mut variables) in cluster_vars {
let (ising, _) = qubo.to_ising();
variables = self.add_overlap(&variables, &ising, overlap_size);
if variables.len() >= self.config.min_subproblem_size {
let sub_qubo = self.extract_subproblem(qubo, &variables)?;
let variable_mapping = variables
.iter()
.enumerate()
.map(|(i, &var)| (i, var))
.collect();
sub_problems.push(SubProblem {
variables: (0..variables.len()).collect(),
variable_mapping,
qubo: sub_qubo,
fixed_variables: HashMap::new(),
id,
});
}
}
Ok(sub_problems)
}
fn add_overlap(
&self,
variables: &[usize],
ising: &IsingModel,
overlap_size: usize,
) -> Vec<usize> {
let mut result: HashSet<usize> = variables.iter().copied().collect();
for &var in variables {
let mut neighbors = Vec::new();
for coupling in ising.couplings() {
if coupling.i == var && !result.contains(&coupling.j) {
neighbors.push(coupling.j);
} else if coupling.j == var && !result.contains(&coupling.i) {
neighbors.push(coupling.i);
}
}
neighbors.sort_by(|&a, &b| {
let strength_a = ising.get_coupling(var, a).unwrap_or(0.0).abs();
let strength_b = ising.get_coupling(var, b).unwrap_or(0.0).abs();
strength_b
.partial_cmp(&strength_a)
.unwrap_or(std::cmp::Ordering::Equal)
});
for &neighbor in neighbors.iter().take(overlap_size) {
result.insert(neighbor);
}
}
let mut result_vec: Vec<usize> = result.into_iter().collect();
result_vec.sort_unstable();
result_vec
}
fn add_overlap_to_vars(
&self,
variables: &[usize],
all_vars: &[usize],
overlap_size: usize,
) -> Vec<usize> {
let mut result: HashSet<usize> = variables.iter().copied().collect();
let boundary_start = if variables.len() >= overlap_size {
variables.len() - overlap_size
} else {
0
};
for i in boundary_start..std::cmp::min(variables.len() + overlap_size, all_vars.len()) {
if i < all_vars.len() {
result.insert(all_vars[i]);
}
}
let mut result_vec: Vec<usize> = result.into_iter().collect();
result_vec.sort_unstable();
result_vec
}
fn extract_subproblem(
&self,
qubo: &QuboModel,
variables: &[usize],
) -> DecompositionResult<QuboModel> {
let mut sub_qubo = QuboModel::new(variables.len());
let var_map: HashMap<usize, usize> = variables
.iter()
.enumerate()
.map(|(i, &var)| (var, i))
.collect();
for (i, &var) in variables.iter().enumerate() {
let linear = qubo.get_linear(var)?;
if linear != 0.0 {
sub_qubo.set_linear(i, linear)?;
}
}
for &var1 in variables {
for &var2 in variables {
if var1 < var2 {
let quadratic = qubo.get_quadratic(var1, var2)?;
if quadratic != 0.0 {
let sub_var1 = var_map[&var1];
let sub_var2 = var_map[&var2];
sub_qubo.set_quadratic(sub_var1, sub_var2, quadratic)?;
}
}
}
}
Ok(sub_qubo)
}
fn solve_subproblems(
&self,
sub_problems: &[SubProblem],
) -> DecompositionResult<Vec<SubSolution>> {
let mut sub_solutions = Vec::new();
for sub_problem in sub_problems {
let (ising, offset) = sub_problem.qubo.to_ising();
let mut simulator = QuantumAnnealingSimulator::new(self.config.solver_params.clone())
.map_err(|e| DecompositionError::SolvingFailed(e.to_string()))?;
let result = simulator
.solve(&ising)
.map_err(|e| DecompositionError::SolvingFailed(e.to_string()))?;
let values: HashMap<usize, bool> = result
.best_spins
.iter()
.enumerate()
.map(|(i, &spin)| {
let original_var = sub_problem.variable_mapping[&i];
(original_var, spin > 0)
})
.collect();
let objective_value = result.best_energy + offset;
sub_solutions.push(SubSolution {
subproblem_id: sub_problem.id,
values,
objective_value,
solver_info: result.info,
});
}
Ok(sub_solutions)
}
fn refine_solutions(
&self,
_qubo: &QuboModel,
_sub_problems: &[SubProblem],
sub_solutions: &[SubSolution],
) -> DecompositionResult<(Vec<SubSolution>, f64)> {
Ok((sub_solutions.to_vec(), 0.0))
}
fn reconstruct_solution(
&self,
qubo: &QuboModel,
sub_solutions: &[SubSolution],
) -> DecompositionResult<(HashMap<usize, bool>, f64)> {
let mut variable_values = HashMap::new();
let mut vote_counts: HashMap<usize, (usize, usize)> = HashMap::new();
for sub_solution in sub_solutions {
for (&var, &value) in &sub_solution.values {
let (true_votes, false_votes) = vote_counts.entry(var).or_insert((0, 0));
if value {
*true_votes += 1;
} else {
*false_votes += 1;
}
}
}
for (var, (true_votes, false_votes)) in vote_counts {
variable_values.insert(var, true_votes > false_votes);
}
let binary_vars: Vec<bool> = (0..qubo.num_variables)
.map(|i| variable_values.get(&i).copied().unwrap_or(false))
.collect();
let objective_value = qubo.objective(&binary_vars)?;
Ok((variable_values, objective_value))
}
}
#[cfg(test)]
mod tests {
use super::*;
use crate::qubo::QuboBuilder;
#[test]
fn test_decomposition_config() {
let config = DecompositionConfig::default();
assert!(config.max_subproblem_size > 0);
assert!(config.refinement_iterations > 0);
}
#[test]
fn test_block_decomposition() {
let mut qubo = QuboModel::new(20); for i in 0..20 {
qubo.set_linear(i, 1.0).expect("set_linear should succeed");
}
let config = DecompositionConfig {
strategy: DecompositionStrategy::Block {
block_size: 4,
overlap_size: 1,
},
min_subproblem_size: 3, ..Default::default()
};
let decomposer = QuboDecomposer::new(config);
let sub_problems = decomposer
.block_decomposition(&qubo, 4, 1)
.expect("block_decomposition should succeed");
assert!(!sub_problems.is_empty());
for sub_problem in &sub_problems {
assert!(sub_problem.variables.len() <= 4);
}
}
#[test]
fn test_small_qubo_decomposition() {
let mut builder = QuboBuilder::new();
let vars: Vec<_> = (0..6)
.map(|i| {
builder
.add_variable(format!("x{}", i))
.expect("add_variable should succeed")
})
.collect();
for i in 0..6 {
builder
.set_linear_term(&vars[i], 1.0)
.expect("set_linear_term should succeed");
}
for i in 0..5 {
builder
.set_quadratic_term(&vars[i], &vars[i + 1], -2.0)
.expect("set_quadratic_term should succeed");
}
let qubo_formulation = builder.build();
let qubo = qubo_formulation.to_qubo_model();
let config = DecompositionConfig {
strategy: DecompositionStrategy::Block {
block_size: 3,
overlap_size: 1,
},
max_subproblem_size: 5,
min_subproblem_size: 2,
refinement_iterations: 2,
..Default::default()
};
let decomposer = QuboDecomposer::new(config);
let result = decomposer.solve(&qubo).expect("solve should succeed");
assert_eq!(result.variable_values.len(), 6);
assert!(result.stats.num_subproblems > 0);
}
}