pub mod complexity;
pub mod eincode;
pub mod expr_tree;
pub mod greedy;
pub mod incidence_list;
pub mod json;
pub mod label;
pub mod score;
pub mod simplifier;
pub mod slicer;
pub mod treesa;
pub mod utils;
#[cfg(test)]
pub mod test_utils;
pub use complexity::{
eincode_complexity, flop, nested_complexity, nested_flop, peak_memory, sliced_complexity,
ContractionComplexity,
};
pub use eincode::{log2_size_dict, uniform_size_dict, EinCode, NestedEinsum, SlicedEinsum};
pub use greedy::{optimize_greedy, ContractionTree, GreedyMethod, GreedyResult};
pub use label::Label;
pub use score::ScoreFunction;
pub use slicer::{slice_code, CodeSlicer, Slicer, TreeSASlicer};
pub use treesa::{optimize_treesa, Initializer, TreeSA};
use std::collections::HashMap;
pub trait CodeOptimizer {
fn optimize<L: Label>(
&self,
code: &EinCode<L>,
size_dict: &HashMap<L, usize>,
) -> Option<NestedEinsum<L>>;
}
impl CodeOptimizer for GreedyMethod {
fn optimize<L: Label>(
&self,
code: &EinCode<L>,
size_dict: &HashMap<L, usize>,
) -> Option<NestedEinsum<L>> {
optimize_greedy(code, size_dict, self)
}
}
impl CodeOptimizer for TreeSA {
fn optimize<L: Label>(
&self,
code: &EinCode<L>,
size_dict: &HashMap<L, usize>,
) -> Option<NestedEinsum<L>> {
optimize_treesa(code, size_dict, self)
}
}
pub fn optimize_code<L: Label, O: CodeOptimizer>(
code: &EinCode<L>,
size_dict: &HashMap<L, usize>,
optimizer: &O,
) -> Option<NestedEinsum<L>> {
optimizer.optimize(code, size_dict)
}
pub fn contraction_complexity<L: Label>(
code: &NestedEinsum<L>,
size_dict: &HashMap<L, usize>,
original_ixs: &[Vec<L>],
) -> ContractionComplexity {
nested_complexity(code, size_dict, original_ixs)
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_optimize_code_greedy() {
let code = EinCode::new(
vec![vec!['i', 'j'], vec!['j', 'k'], vec!['k', 'l']],
vec!['i', 'l'],
);
let mut sizes = HashMap::new();
sizes.insert('i', 4);
sizes.insert('j', 8);
sizes.insert('k', 8);
sizes.insert('l', 4);
let result = optimize_code(&code, &sizes, &GreedyMethod::default());
assert!(result.is_some());
let nested = result.unwrap();
assert!(nested.is_binary());
assert_eq!(nested.leaf_count(), 3);
}
#[test]
fn test_optimize_code_treesa() {
let code = EinCode::new(vec![vec!['i', 'j'], vec!['j', 'k']], vec!['i', 'k']);
let mut sizes = HashMap::new();
sizes.insert('i', 4);
sizes.insert('j', 8);
sizes.insert('k', 4);
let result = optimize_code(&code, &sizes, &TreeSA::fast());
assert!(result.is_some());
let nested = result.unwrap();
assert!(nested.is_binary());
}
#[test]
fn test_contraction_complexity_wrapper() {
let code = EinCode::new(vec![vec!['i', 'j'], vec!['j', 'k']], vec!['i', 'k']);
let mut sizes = HashMap::new();
sizes.insert('i', 4);
sizes.insert('j', 8);
sizes.insert('k', 4);
let result = optimize_code(&code, &sizes, &GreedyMethod::default()).unwrap();
let complexity = contraction_complexity(&result, &sizes, &code.ixs);
assert!(complexity.tc > 0.0);
assert!(complexity.sc > 0.0);
}
#[test]
fn test_single_tensor() {
let code = EinCode::new(vec![vec!['i', 'j']], vec!['i', 'j']);
let mut sizes = HashMap::new();
sizes.insert('i', 4);
sizes.insert('j', 4);
let result = optimize_code(&code, &sizes, &GreedyMethod::default());
assert!(result.is_some());
assert!(result.unwrap().is_leaf());
}
#[test]
fn test_trace() {
let code = EinCode::new(
vec![vec!['i', 'j'], vec!['j', 'i']],
vec![], );
let mut sizes = HashMap::new();
sizes.insert('i', 4);
sizes.insert('j', 4);
let result = optimize_code(&code, &sizes, &GreedyMethod::default());
assert!(result.is_some());
}
#[test]
fn test_empty_code() {
let code: EinCode<char> = EinCode::new(vec![], vec![]);
let sizes: HashMap<char, usize> = HashMap::new();
let result = optimize_code(&code, &sizes, &GreedyMethod::default());
assert!(result.is_none());
}
#[test]
fn test_optimize_code_with_slicing() {
use crate::slicer::{slice_code, TreeSASlicer};
let code = EinCode::new(
vec![vec!['i', 'j'], vec!['j', 'k'], vec!['k', 'l']],
vec!['i', 'l'],
);
let mut sizes = HashMap::new();
sizes.insert('i', 4);
sizes.insert('j', 8);
sizes.insert('k', 8);
sizes.insert('l', 4);
let nested = optimize_code(&code, &sizes, &GreedyMethod::default()).unwrap();
let slicer = TreeSASlicer::fast();
let sliced = slice_code(&nested, &sizes, &slicer, &code.ixs);
assert!(sliced.is_some());
}
#[test]
fn test_contraction_complexity_deep_tree() {
let code = EinCode::new(
vec![
vec!['a', 'b'],
vec!['b', 'c'],
vec!['c', 'd'],
vec!['d', 'e'],
],
vec!['a', 'e'],
);
let mut sizes = HashMap::new();
sizes.insert('a', 2);
sizes.insert('b', 2);
sizes.insert('c', 2);
sizes.insert('d', 2);
sizes.insert('e', 2);
let nested = optimize_code(&code, &sizes, &GreedyMethod::default()).unwrap();
let complexity = contraction_complexity(&nested, &sizes, &code.ixs);
assert!(complexity.tc > 0.0);
assert!(complexity.sc > 0.0);
assert!(complexity.rwc > 0.0);
}
#[test]
fn test_optimize_code_treesa_with_path_decomp() {
let code = EinCode::new(
vec![vec!['i', 'j'], vec!['j', 'k'], vec!['k', 'l']],
vec!['i', 'l'],
);
let mut sizes = HashMap::new();
sizes.insert('i', 4);
sizes.insert('j', 8);
sizes.insert('k', 8);
sizes.insert('l', 4);
let result = optimize_code(&code, &sizes, &TreeSA::path());
assert!(result.is_some());
let nested = result.unwrap();
assert!(nested.is_binary() || nested.is_leaf());
}
}
#[cfg(test)]
mod issue_13_tests {
use super::*;
#[test]
fn test_issue_13_greedy_scalar_output() {
let code = EinCode::new(vec![vec![0usize, 1], vec![1usize, 2]], vec![]);
let sizes: HashMap<usize, usize> = [(0, 2), (1, 2), (2, 2)].into();
let optimizer = GreedyMethod::default();
let tree = optimize_code(&code, &sizes, &optimizer).expect("should optimize");
match &tree {
NestedEinsum::Node { eins, .. } => {
assert_eq!(
eins.iy, code.iy,
"Root node's iy should match requested output. Got {:?}, expected {:?}",
eins.iy, code.iy
);
}
NestedEinsum::Leaf { .. } => {
panic!("Multi-tensor operation should not return Leaf");
}
}
}
#[test]
fn test_issue_13_greedy_partial_output() {
let code = EinCode::new(vec![vec![0usize, 1], vec![1usize, 2]], vec![0usize]);
let sizes: HashMap<usize, usize> = [(0, 2), (1, 2), (2, 2)].into();
let optimizer = GreedyMethod::default();
let tree = optimize_code(&code, &sizes, &optimizer).expect("should optimize");
match &tree {
NestedEinsum::Node { eins, .. } => {
assert_eq!(
eins.iy, code.iy,
"Root node's iy should match requested output. Got {:?}, expected {:?}",
eins.iy, code.iy
);
}
NestedEinsum::Leaf { .. } => {
panic!("Multi-tensor operation should not return Leaf");
}
}
}
#[test]
fn test_issue_13_treesa_scalar_output() {
let code = EinCode::new(vec![vec![0usize, 1], vec![1usize, 2]], vec![]);
let sizes: HashMap<usize, usize> = [(0, 2), (1, 2), (2, 2)].into();
let optimizer = TreeSA::fast();
let tree = optimize_code(&code, &sizes, &optimizer).expect("should optimize");
match &tree {
NestedEinsum::Node { eins, .. } => {
assert_eq!(
eins.iy, code.iy,
"TreeSA root node's iy should match requested output. Got {:?}, expected {:?}",
eins.iy, code.iy
);
}
NestedEinsum::Leaf { .. } => {
panic!("Multi-tensor operation should not return Leaf");
}
}
}
#[test]
fn test_issue_13_treesa_partial_output() {
let code = EinCode::new(vec![vec![0usize, 1], vec![1usize, 2]], vec![2usize]);
let sizes: HashMap<usize, usize> = [(0, 2), (1, 2), (2, 2)].into();
let optimizer = TreeSA::fast();
let tree = optimize_code(&code, &sizes, &optimizer).expect("should optimize");
match &tree {
NestedEinsum::Node { eins, .. } => {
assert_eq!(
eins.iy, code.iy,
"TreeSA root node's iy should match requested output. Got {:?}, expected {:?}",
eins.iy, code.iy
);
}
NestedEinsum::Leaf { .. } => {
panic!("Multi-tensor operation should not return Leaf");
}
}
}
#[test]
fn test_issue_13_greedy_chain_scalar_output() {
let code = EinCode::new(
vec![vec![0usize, 1], vec![1usize, 2], vec![2usize, 3]],
vec![],
);
let sizes: HashMap<usize, usize> = [(0, 2), (1, 2), (2, 2), (3, 2)].into();
let optimizer = GreedyMethod::default();
let tree = optimize_code(&code, &sizes, &optimizer).expect("should optimize");
match &tree {
NestedEinsum::Node { eins, .. } => {
assert_eq!(
eins.iy, code.iy,
"Root node's iy should be empty (scalar). Got {:?}",
eins.iy
);
}
NestedEinsum::Leaf { .. } => {
panic!("3-tensor operation should not return Leaf");
}
}
}
#[test]
fn test_issue_13_greedy_chain_partial_output() {
let code = EinCode::new(
vec![vec![0usize, 1], vec![1usize, 2], vec![2usize, 3]],
vec![0usize],
);
let sizes: HashMap<usize, usize> = [(0, 2), (1, 2), (2, 2), (3, 2)].into();
let optimizer = GreedyMethod::default();
let tree = optimize_code(&code, &sizes, &optimizer).expect("should optimize");
match &tree {
NestedEinsum::Node { eins, .. } => {
assert_eq!(
eins.iy, code.iy,
"Root node's iy should be [0]. Got {:?}",
eins.iy
);
}
NestedEinsum::Leaf { .. } => {
panic!("3-tensor operation should not return Leaf");
}
}
}
#[test]
fn test_issue_13_treesa_chain_scalar_output() {
let code = EinCode::new(
vec![vec![0usize, 1], vec![1usize, 2], vec![2usize, 3]],
vec![],
);
let sizes: HashMap<usize, usize> = [(0, 2), (1, 2), (2, 2), (3, 2)].into();
let optimizer = TreeSA::fast();
let tree = optimize_code(&code, &sizes, &optimizer).expect("should optimize");
match &tree {
NestedEinsum::Node { eins, .. } => {
assert_eq!(
eins.iy, code.iy,
"TreeSA root node's iy should be empty (scalar). Got {:?}",
eins.iy
);
}
NestedEinsum::Leaf { .. } => {
panic!("3-tensor operation should not return Leaf");
}
}
}
#[test]
fn test_issue_13_intermediate_nodes_have_correct_output() {
let code = EinCode::new(
vec![vec!['i', 'j'], vec!['j', 'k'], vec!['k', 'l']],
vec!['i', 'l'],
);
let sizes: HashMap<char, usize> = [('i', 2), ('j', 2), ('k', 2), ('l', 2)].into();
let optimizer = GreedyMethod::default();
let tree = optimize_code(&code, &sizes, &optimizer).expect("should optimize");
match &tree {
NestedEinsum::Node { eins, args, .. } => {
assert_eq!(eins.iy, vec!['i', 'l'], "Root iy should be [i, l]");
let has_intermediate = args.iter().any(|arg| !arg.is_leaf());
assert!(
has_intermediate,
"Should have at least one intermediate node"
);
}
NestedEinsum::Leaf { .. } => {
panic!("3-tensor operation should not return Leaf");
}
}
}
}
#[cfg(test)]
mod numerical_verification_tests {
use super::*;
use crate::test_utils::{execute_nested, tensors_approx_equal, NaiveContractor};
#[test]
fn test_numerical_matrix_chain() {
let code = EinCode::new(
vec![vec![1usize, 2], vec![2, 3], vec![3, 4], vec![4, 5]],
vec![1, 5],
);
let sizes: HashMap<usize, usize> = [(1, 3), (2, 4), (3, 5), (4, 4), (5, 3)].into();
let label_map: HashMap<usize, usize> = (1..=5).map(|i| (i, i)).collect();
let mut contractor_greedy = NaiveContractor::new();
contractor_greedy.add_tensor(0, vec![3, 4]); contractor_greedy.add_tensor(1, vec![4, 5]); contractor_greedy.add_tensor(2, vec![5, 4]); contractor_greedy.add_tensor(3, vec![4, 3]);
let mut contractor_treesa = contractor_greedy.clone();
let greedy_tree =
optimize_code(&code, &sizes, &GreedyMethod::default()).expect("Greedy should succeed");
let treesa_tree =
optimize_code(&code, &sizes, &TreeSA::fast()).expect("TreeSA should succeed");
let greedy_idx = execute_nested(&greedy_tree, &mut contractor_greedy, &label_map);
let treesa_idx = execute_nested(&treesa_tree, &mut contractor_treesa, &label_map);
let greedy_result = contractor_greedy.get_tensor(greedy_idx).unwrap();
let treesa_result = contractor_treesa.get_tensor(treesa_idx).unwrap();
assert_eq!(
greedy_result.shape(),
treesa_result.shape(),
"Shapes should match"
);
assert!(
tensors_approx_equal(greedy_result, treesa_result, 1e-10, 1e-12),
"Greedy and TreeSA should produce identical numerical results"
);
}
#[test]
fn test_numerical_with_hyperedge() {
let code = EinCode::new(vec![vec![1usize, 2], vec![2, 3], vec![2, 4]], vec![1, 3, 4]);
let sizes: HashMap<usize, usize> = [(1, 2), (2, 3), (3, 2), (4, 2)].into();
let label_map: HashMap<usize, usize> = (1..=4).map(|i| (i, i)).collect();
let mut contractor = NaiveContractor::new();
contractor.add_tensor(0, vec![2, 3]); contractor.add_tensor(1, vec![3, 2]); contractor.add_tensor(2, vec![3, 2]);
let greedy_tree =
optimize_code(&code, &sizes, &GreedyMethod::default()).expect("Greedy should succeed");
let treesa_tree =
optimize_code(&code, &sizes, &TreeSA::fast()).expect("TreeSA should succeed");
assert!(greedy_tree.is_binary(), "Greedy should produce binary tree");
assert!(treesa_tree.is_binary(), "TreeSA should produce binary tree");
assert_eq!(greedy_tree.leaf_count(), 3, "Should have 3 leaves");
assert_eq!(treesa_tree.leaf_count(), 3, "Should have 3 leaves");
let greedy_idx = execute_nested(&greedy_tree, &mut contractor, &label_map);
let greedy_result = contractor.get_tensor(greedy_idx).unwrap();
assert_eq!(
greedy_result.shape(),
&[2, 2, 2],
"Output should be 2x2x2 for [i,k,l]"
);
}
#[test]
fn test_numerical_scalar_output() {
let code = EinCode::new(vec![vec![1usize, 2], vec![2, 3], vec![3, 1]], vec![]);
let sizes: HashMap<usize, usize> = [(1, 3), (2, 4), (3, 3)].into();
let label_map: HashMap<usize, usize> = (1..=3).map(|i| (i, i)).collect();
let mut contractor_greedy = NaiveContractor::new();
contractor_greedy.add_tensor(0, vec![3, 4]); contractor_greedy.add_tensor(1, vec![4, 3]); contractor_greedy.add_tensor(2, vec![3, 3]);
let mut contractor_treesa = contractor_greedy.clone();
let greedy_tree =
optimize_code(&code, &sizes, &GreedyMethod::default()).expect("Greedy should succeed");
let treesa_tree =
optimize_code(&code, &sizes, &TreeSA::fast()).expect("TreeSA should succeed");
let greedy_idx = execute_nested(&greedy_tree, &mut contractor_greedy, &label_map);
let treesa_idx = execute_nested(&treesa_tree, &mut contractor_treesa, &label_map);
let greedy_result = contractor_greedy.get_tensor(greedy_idx).unwrap();
let treesa_result = contractor_treesa.get_tensor(treesa_idx).unwrap();
assert_eq!(greedy_result.ndim(), 0, "Should produce scalar");
assert!(
tensors_approx_equal(greedy_result, treesa_result, 1e-10, 1e-12),
"Scalar contraction should match"
);
}
#[test]
fn test_numerical_five_tensor_network() {
let code = EinCode::new(
vec![
vec![1usize, 2], vec![2, 3, 4], vec![3, 5], vec![4, 5, 6], vec![6, 1], ],
vec![5], );
let sizes: HashMap<usize, usize> = [(1, 2), (2, 3), (3, 2), (4, 2), (5, 4), (6, 2)].into();
let label_map: HashMap<usize, usize> = (1..=6).map(|i| (i, i)).collect();
let mut contractor_greedy = NaiveContractor::new();
contractor_greedy.add_tensor(0, vec![2, 3]); contractor_greedy.add_tensor(1, vec![3, 2, 2]); contractor_greedy.add_tensor(2, vec![2, 4]); contractor_greedy.add_tensor(3, vec![2, 4, 2]); contractor_greedy.add_tensor(4, vec![2, 2]);
let mut contractor_treesa = contractor_greedy.clone();
let greedy_tree =
optimize_code(&code, &sizes, &GreedyMethod::default()).expect("Greedy should succeed");
let treesa_tree =
optimize_code(&code, &sizes, &TreeSA::fast()).expect("TreeSA should succeed");
let greedy_idx = execute_nested(&greedy_tree, &mut contractor_greedy, &label_map);
let treesa_idx = execute_nested(&treesa_tree, &mut contractor_treesa, &label_map);
let greedy_result = contractor_greedy.get_tensor(greedy_idx).unwrap();
let treesa_result = contractor_treesa.get_tensor(treesa_idx).unwrap();
assert_eq!(greedy_result.shape(), &[4], "Output should have shape [4]");
assert!(
tensors_approx_equal(greedy_result, treesa_result, 1e-8, 1e-10),
"5-tensor network should produce same result"
);
}
}
#[cfg(test)]
mod large_scale_stress_tests {
use super::*;
use petgraph::graph::UnGraph;
use petgraph::visit::EdgeRef;
use rand::seq::SliceRandom;
use rand::SeedableRng;
use std::collections::HashSet;
fn generate_random_regular_graph(n: usize, k: usize, seed: u64) -> UnGraph<(), ()> {
use rand::Rng;
let mut rng = rand::rngs::StdRng::seed_from_u64(seed);
let mut graph = UnGraph::new_undirected();
for _ in 0..n {
graph.add_node(());
}
assert!(n * k % 2 == 0, "n*k must be even for k-regular graph");
let target_edges = (n * k) / 2;
let mut edges: HashSet<(usize, usize)> = HashSet::new();
let mut degrees = vec![0usize; n];
let max_outer_attempts = 20;
for _attempt in 0..max_outer_attempts {
edges.clear();
degrees.fill(0);
for _ in 0..target_edges * 10 {
let mut candidates: Vec<usize> = (0..n).filter(|&v| degrees[v] < k).collect();
if candidates.len() < 2 {
break;
}
for _ in 0..100 {
if candidates.len() < 2 {
break;
}
let i = rng.random_range(0..candidates.len());
let u = candidates[i];
candidates.swap_remove(i);
if candidates.is_empty() {
break;
}
let valid_partners: Vec<usize> = candidates
.iter()
.filter(|&&v| {
let edge = (u.min(v), u.max(v));
!edges.contains(&edge) && degrees[v] < k
})
.copied()
.collect();
if valid_partners.is_empty() {
continue;
}
let j = rng.random_range(0..valid_partners.len());
let v = valid_partners[j];
let edge = (u.min(v), u.max(v));
edges.insert(edge);
degrees[u] += 1;
degrees[v] += 1;
if degrees[v] >= k {
if let Some(pos) = candidates.iter().position(|&x| x == v) {
candidates.swap_remove(pos);
}
}
break;
}
if edges.len() >= target_edges {
break;
}
}
if edges.len() == target_edges && degrees.iter().all(|&d| d == k) {
for (u, v) in &edges {
graph.add_edge(
petgraph::graph::NodeIndex::new(*u),
petgraph::graph::NodeIndex::new(*v),
(),
);
}
return graph;
}
}
edges.clear();
degrees.fill(0);
let mut available: Vec<usize> = (0..n).collect();
available.shuffle(&mut rng);
for (i, &u) in available.iter().enumerate() {
for &v in available.iter().skip(i + 1) {
if degrees[u] < k && degrees[v] < k {
let edge = (u.min(v), u.max(v));
if !edges.contains(&edge) {
edges.insert(edge);
degrees[u] += 1;
degrees[v] += 1;
}
}
if degrees[u] >= k {
break;
}
}
}
for (u, v) in &edges {
graph.add_edge(
petgraph::graph::NodeIndex::new(*u),
petgraph::graph::NodeIndex::new(*v),
(),
);
}
graph
}
fn graph_to_eincode(graph: &UnGraph<(), ()>) -> (EinCode<usize>, HashMap<usize, usize>) {
let n_vertices = graph.node_count();
let mut ixs: Vec<Vec<usize>> = Vec::new();
for edge in graph.edge_references() {
let src = edge.source().index() + 1; let dst = edge.target().index() + 1;
ixs.push(vec![src.min(dst), src.max(dst)]);
}
for v in 0..n_vertices {
ixs.push(vec![v + 1]); }
let mut sizes = HashMap::new();
for v in 1..=n_vertices {
sizes.insert(v, 2);
}
let code = EinCode::new(ixs, vec![]); (code, sizes)
}
#[test]
fn test_random_regular_graph_n40_k3() {
let graph = generate_random_regular_graph(40, 3, 42);
assert_eq!(graph.node_count(), 40);
assert!(graph.edge_count() > 50, "Should have close to 60 edges");
let (code, sizes) = graph_to_eincode(&graph);
let greedy_tree = optimize_code(&code, &sizes, &GreedyMethod::default());
assert!(
greedy_tree.is_some(),
"Greedy should succeed on 40-node graph"
);
let tree = greedy_tree.unwrap();
let complexity = contraction_complexity(&tree, &sizes, &code.ixs);
assert!(
complexity.sc < 30.0,
"Space complexity should be reasonable"
);
}
#[test]
fn test_random_regular_graph_n60_k3_treesa() {
let graph = generate_random_regular_graph(60, 3, 123);
let (code, sizes) = graph_to_eincode(&graph);
let greedy_tree = optimize_code(&code, &sizes, &GreedyMethod::default());
assert!(greedy_tree.is_some());
let greedy_complexity =
contraction_complexity(greedy_tree.as_ref().unwrap(), &sizes, &code.ixs);
let treesa = TreeSA::default()
.with_sc_target(greedy_complexity.sc - 2.0)
.with_niters(50)
.with_ntrials(2);
let treesa_tree = optimize_code(&code, &sizes, &treesa);
assert!(
treesa_tree.is_some(),
"TreeSA should succeed on 60-node graph"
);
let treesa_complexity =
contraction_complexity(treesa_tree.as_ref().unwrap(), &sizes, &code.ixs);
assert!(
treesa_complexity.sc <= greedy_complexity.sc + 1.0,
"TreeSA should not significantly increase space complexity"
);
}
#[test]
fn test_large_random_regular_graph_n100() {
let graph = generate_random_regular_graph(100, 3, 456);
let (code, sizes) = graph_to_eincode(&graph);
assert!(code.num_tensors() > 200, "Should have many tensors");
let greedy_tree = optimize_code(&code, &sizes, &GreedyMethod::default());
assert!(greedy_tree.is_some(), "Greedy should handle 100-node graph");
let tree = greedy_tree.unwrap();
assert!(tree.is_binary(), "Should produce binary tree");
assert_eq!(
tree.leaf_count(),
code.num_tensors(),
"Should have correct leaf count"
);
}
#[test]
fn test_path_decomposition_random_graph_n50() {
let graph = generate_random_regular_graph(50, 3, 789);
let (code, sizes) = graph_to_eincode(&graph);
let path_treesa = TreeSA::path().with_niters(30).with_ntrials(2);
let path_tree = optimize_code(&code, &sizes, &path_treesa);
assert!(path_tree.is_some(), "Path TreeSA should succeed");
let tree = path_tree.unwrap();
assert!(
tree.is_path_decomposition(),
"Should produce path decomposition"
);
let tree_treesa = TreeSA::default().with_niters(30).with_ntrials(2);
let tree_tree = optimize_code(&code, &sizes, &tree_treesa);
assert!(tree_tree.is_some());
}
#[test]
fn test_fullerene_c60_optimization() {
use crate::test_utils::generate_fullerene_edges;
let edges = generate_fullerene_edges();
let mut ixs: Vec<Vec<usize>> = Vec::new();
for (a, b) in &edges {
ixs.push(vec![*a, *b]);
}
for v in 1..=60 {
ixs.push(vec![v]);
}
let code = EinCode::new(ixs, vec![]); let sizes: HashMap<usize, usize> = (1..=60).map(|v| (v, 2)).collect();
let unopt = crate::complexity::eincode_complexity(&code, &sizes);
assert_eq!(
unopt.tc, 60.0,
"Unoptimized tc should be 60 (sum of indices)"
);
assert_eq!(unopt.sc, 0.0, "Unoptimized sc should be 0 (scalar output)");
let greedy_tree = optimize_code(&code, &sizes, &GreedyMethod::default());
assert!(greedy_tree.is_some(), "Should optimize C60");
let tree = greedy_tree.unwrap();
let complexity = contraction_complexity(&tree, &sizes, &code.ixs);
assert!(
complexity.sc <= 20.0,
"C60 space complexity should be <= 20, got {}",
complexity.sc
);
}
#[test]
fn test_chain_and_ring_topologies() {
let chain_ixs: Vec<Vec<usize>> = (1..=9).map(|i| vec![i, i + 1]).collect();
let chain_code = EinCode::new(chain_ixs, vec![1, 10]);
let chain_sizes: HashMap<usize, usize> = (1..=10).map(|v| (v, 2)).collect();
let chain_tree = optimize_code(&chain_code, &chain_sizes, &GreedyMethod::default());
assert!(chain_tree.is_some());
let chain_complexity =
contraction_complexity(chain_tree.as_ref().unwrap(), &chain_sizes, &chain_code.ixs);
assert_eq!(chain_complexity.sc, 2.0, "Chain should have sc=2");
let mut ring_ixs: Vec<Vec<usize>> = (1..=9).map(|i| vec![i, i + 1]).collect();
ring_ixs.push(vec![10, 1]); let ring_code = EinCode::new(ring_ixs, vec![]);
let ring_sizes: HashMap<usize, usize> = (1..=10).map(|v| (v, 2)).collect();
let ring_tree = optimize_code(&ring_code, &ring_sizes, &GreedyMethod::default());
assert!(ring_tree.is_some());
let ring_complexity =
contraction_complexity(ring_tree.as_ref().unwrap(), &ring_sizes, &ring_code.ixs);
assert_eq!(ring_complexity.sc, 2.0, "Ring should have sc=2");
}
#[test]
fn test_tutte_graph_stochastic_optimization() {
use crate::test_utils::generate_tutte_edges;
let edges = generate_tutte_edges();
let mut ixs: Vec<Vec<usize>> = Vec::new();
let mut seen_edges: HashSet<(usize, usize)> = HashSet::new();
for (a, b) in &edges {
let edge = ((*a).min(*b), (*a).max(*b));
if !seen_edges.contains(&edge) {
ixs.push(vec![edge.0, edge.1]);
seen_edges.insert(edge);
}
}
let max_vertex = edges.iter().flat_map(|(a, b)| [*a, *b]).max().unwrap_or(0);
let code = EinCode::new(ixs, vec![]); let sizes: HashMap<usize, usize> = (1..=max_vertex).map(|v| (v, 2)).collect();
let mut best_sc = f64::INFINITY;
for _trial in 0..10 {
let stochastic_greedy = GreedyMethod::stochastic(100.0);
if let Some(tree) = optimize_code(&code, &sizes, &stochastic_greedy) {
let complexity = contraction_complexity(&tree, &sizes, &code.ixs);
if complexity.sc < best_sc {
best_sc = complexity.sc;
}
}
}
assert!(
best_sc <= 8.0,
"Best Tutte graph sc should be <= 8, got {}",
best_sc
);
}
#[test]
fn test_petersen_graph_optimization() {
use crate::test_utils::generate_petersen_edges;
let edges = generate_petersen_edges();
let mut ixs: Vec<Vec<usize>> = Vec::new();
for (a, b) in &edges {
ixs.push(vec![*a, *b]);
}
for v in 1..=10 {
ixs.push(vec![v]);
}
let code = EinCode::new(ixs, vec![]);
let sizes: HashMap<usize, usize> = (1..=10).map(|v| (v, 2)).collect();
let tree = optimize_code(&code, &sizes, &GreedyMethod::default());
assert!(tree.is_some());
let complexity = contraction_complexity(tree.as_ref().unwrap(), &sizes, &code.ixs);
assert!(
complexity.sc <= 6.0,
"Petersen sc should be <= 6, got {}",
complexity.sc
);
}
#[test]
fn test_very_large_graph_n200() {
let graph = generate_random_regular_graph(200, 3, 999);
let (code, sizes) = graph_to_eincode(&graph);
assert!(code.num_tensors() > 400, "Should have many tensors");
let greedy_tree = optimize_code(&code, &sizes, &GreedyMethod::default());
assert!(greedy_tree.is_some(), "Greedy should handle 200-node graph");
let tree = greedy_tree.unwrap();
let complexity = contraction_complexity(&tree, &sizes, &code.ixs);
assert!(
complexity.sc <= 55.0,
"Large graph sc should be <= 55, got {}",
complexity.sc
);
}
#[test]
fn test_reg3_220_treesa() {
let graph_json = include_str!("../../benchmarks/graphs/reg3_220.json");
let graph_data: serde_json::Value = serde_json::from_str(graph_json).unwrap();
let edge_list = graph_data["edge_list"].as_array().unwrap();
let mut ixs: Vec<Vec<usize>> = Vec::new();
for edge in edge_list {
let arr = edge.as_array().unwrap();
let a = arr[0].as_u64().unwrap() as usize;
let b = arr[1].as_u64().unwrap() as usize;
ixs.push(vec![a, b]);
}
for v in 1..=220 {
ixs.push(vec![v]);
}
let code = EinCode::new(ixs, vec![]);
let unique_labels = code.unique_labels();
eprintln!("Unique labels: {}", unique_labels.len());
eprintln!("First 5 tensors: {:?}", &code.ixs[0..5]);
eprintln!("Last 5 tensors: {:?}", &code.ixs[545..550]);
let sizes: HashMap<usize, usize> = unique_labels.iter().map(|&v| (v, 2)).collect();
assert_eq!(code.num_tensors(), 550, "Should have 550 tensors");
let greedy_tree = optimize_code(&code, &sizes, &GreedyMethod::default());
assert!(greedy_tree.is_some(), "Greedy should handle 220-node graph");
let greedy_complexity =
contraction_complexity(greedy_tree.as_ref().unwrap(), &sizes, &code.ixs);
eprintln!(
"Greedy: tc={:.2}, sc={:.2}",
greedy_complexity.tc, greedy_complexity.sc
);
let treesa = TreeSA::default()
.with_betas((0..399).map(|i| 0.1 + 0.05 * i as f64).collect())
.with_ntrials(2)
.with_niters(10)
.with_sc_target(32.0);
let treesa_tree = optimize_code(&code, &sizes, &treesa);
assert!(treesa_tree.is_some(), "TreeSA should succeed");
let treesa_complexity =
contraction_complexity(treesa_tree.as_ref().unwrap(), &sizes, &code.ixs);
eprintln!(
"TreeSA: tc={:.2}, sc={:.2}",
treesa_complexity.tc, treesa_complexity.sc
);
assert!(
treesa_complexity.sc <= 32.0,
"TreeSA with sc_target=32 should achieve sc <= 32, got {}",
treesa_complexity.sc
);
}
#[test]
fn test_treesa_with_sc_target_large_graph() {
let graph = generate_random_regular_graph(80, 3, 111);
let (code, sizes) = graph_to_eincode(&graph);
let greedy_tree = optimize_code(&code, &sizes, &GreedyMethod::default()).unwrap();
let greedy_complexity = contraction_complexity(&greedy_tree, &sizes, &code.ixs);
let sc_target = greedy_complexity.sc - 3.0;
let treesa = TreeSA::default()
.with_sc_target(sc_target)
.with_niters(100)
.with_ntrials(3);
let treesa_tree = optimize_code(&code, &sizes, &treesa);
assert!(treesa_tree.is_some());
let treesa_complexity =
contraction_complexity(treesa_tree.as_ref().unwrap(), &sizes, &code.ixs);
assert!(
treesa_complexity.sc <= greedy_complexity.sc + 2.0,
"TreeSA should not be much worse than greedy"
);
}
#[test]
fn test_multiple_optimizers_consistency() {
let graph = generate_random_regular_graph(30, 3, 222);
let (code, sizes) = graph_to_eincode(&graph);
fn verify_optimizer_result(
tree: Option<NestedEinsum<usize>>,
code: &EinCode<usize>,
sizes: &HashMap<usize, usize>,
name: &str,
) {
assert!(tree.is_some(), "{} should succeed", name);
let t = tree.unwrap();
assert!(t.is_binary(), "{} should produce binary tree", name);
assert_eq!(
t.leaf_count(),
code.num_tensors(),
"{} should have correct leaves",
name
);
let complexity = contraction_complexity(&t, sizes, &code.ixs);
assert!(
complexity.tc > 0.0,
"{} should have positive time complexity",
name
);
assert!(
complexity.sc >= 0.0,
"{} space complexity should be non-negative",
name
);
}
verify_optimizer_result(
optimize_code(&code, &sizes, &GreedyMethod::default()),
&code,
&sizes,
"GreedyMethod::default",
);
verify_optimizer_result(
optimize_code(&code, &sizes, &GreedyMethod::stochastic(10.0)),
&code,
&sizes,
"GreedyMethod::stochastic",
);
verify_optimizer_result(
optimize_code(&code, &sizes, &TreeSA::fast()),
&code,
&sizes,
"TreeSA::fast",
);
verify_optimizer_result(
optimize_code(&code, &sizes, &TreeSA::path()),
&code,
&sizes,
"TreeSA::path",
);
}
}