1pub mod complexity;
129pub mod eincode;
130pub mod expr_tree;
131pub mod greedy;
132pub mod incidence_list;
133pub mod json;
134pub mod label;
135pub mod score;
136pub mod simplifier;
137pub mod slicer;
138pub mod treesa;
139pub mod utils;
140
141#[cfg(test)]
142pub mod test_utils;
143
144pub use complexity::{
146 eincode_complexity, flop, nested_complexity, nested_flop, peak_memory, sliced_complexity,
147 ContractionComplexity,
148};
149pub use eincode::{log2_size_dict, uniform_size_dict, EinCode, NestedEinsum, SlicedEinsum};
150pub use greedy::{optimize_greedy, ContractionTree, GreedyMethod, GreedyResult};
151pub use label::Label;
152pub use score::ScoreFunction;
153pub use slicer::{slice_code, CodeSlicer, Slicer, TreeSASlicer};
154pub use treesa::{optimize_treesa, Initializer, TreeSA};
155
156use std::collections::HashMap;
157
158pub trait CodeOptimizer {
160 fn optimize<L: Label>(
162 &self,
163 code: &EinCode<L>,
164 size_dict: &HashMap<L, usize>,
165 ) -> Option<NestedEinsum<L>>;
166}
167
168impl CodeOptimizer for GreedyMethod {
169 fn optimize<L: Label>(
170 &self,
171 code: &EinCode<L>,
172 size_dict: &HashMap<L, usize>,
173 ) -> Option<NestedEinsum<L>> {
174 optimize_greedy(code, size_dict, self)
175 }
176}
177
178impl CodeOptimizer for TreeSA {
179 fn optimize<L: Label>(
180 &self,
181 code: &EinCode<L>,
182 size_dict: &HashMap<L, usize>,
183 ) -> Option<NestedEinsum<L>> {
184 optimize_treesa(code, size_dict, self)
185 }
186}
187
188pub fn optimize_code<L: Label, O: CodeOptimizer>(
210 code: &EinCode<L>,
211 size_dict: &HashMap<L, usize>,
212 optimizer: &O,
213) -> Option<NestedEinsum<L>> {
214 optimizer.optimize(code, size_dict)
215}
216
217pub fn contraction_complexity<L: Label>(
221 code: &NestedEinsum<L>,
222 size_dict: &HashMap<L, usize>,
223 original_ixs: &[Vec<L>],
224) -> ContractionComplexity {
225 nested_complexity(code, size_dict, original_ixs)
226}
227
228#[cfg(test)]
229mod tests {
230 use super::*;
231
232 #[test]
233 fn test_optimize_code_greedy() {
234 let code = EinCode::new(
235 vec![vec!['i', 'j'], vec!['j', 'k'], vec!['k', 'l']],
236 vec!['i', 'l'],
237 );
238
239 let mut sizes = HashMap::new();
240 sizes.insert('i', 4);
241 sizes.insert('j', 8);
242 sizes.insert('k', 8);
243 sizes.insert('l', 4);
244
245 let result = optimize_code(&code, &sizes, &GreedyMethod::default());
246 assert!(result.is_some());
247
248 let nested = result.unwrap();
249 assert!(nested.is_binary());
250 assert_eq!(nested.leaf_count(), 3);
251 }
252
253 #[test]
254 fn test_optimize_code_treesa() {
255 let code = EinCode::new(vec![vec!['i', 'j'], vec!['j', 'k']], vec!['i', 'k']);
256
257 let mut sizes = HashMap::new();
258 sizes.insert('i', 4);
259 sizes.insert('j', 8);
260 sizes.insert('k', 4);
261
262 let result = optimize_code(&code, &sizes, &TreeSA::fast());
263 assert!(result.is_some());
264
265 let nested = result.unwrap();
266 assert!(nested.is_binary());
267 }
268
269 #[test]
270 fn test_contraction_complexity_wrapper() {
271 let code = EinCode::new(vec![vec!['i', 'j'], vec!['j', 'k']], vec!['i', 'k']);
272
273 let mut sizes = HashMap::new();
274 sizes.insert('i', 4);
275 sizes.insert('j', 8);
276 sizes.insert('k', 4);
277
278 let result = optimize_code(&code, &sizes, &GreedyMethod::default()).unwrap();
279 let complexity = contraction_complexity(&result, &sizes, &code.ixs);
280
281 assert!(complexity.tc > 0.0);
282 assert!(complexity.sc > 0.0);
283 }
284
285 #[test]
286 fn test_single_tensor() {
287 let code = EinCode::new(vec![vec!['i', 'j']], vec!['i', 'j']);
288
289 let mut sizes = HashMap::new();
290 sizes.insert('i', 4);
291 sizes.insert('j', 4);
292
293 let result = optimize_code(&code, &sizes, &GreedyMethod::default());
294 assert!(result.is_some());
295 assert!(result.unwrap().is_leaf());
296 }
297
298 #[test]
299 fn test_trace() {
300 let code = EinCode::new(
301 vec![vec!['i', 'j'], vec!['j', 'i']],
302 vec![], );
304
305 let mut sizes = HashMap::new();
306 sizes.insert('i', 4);
307 sizes.insert('j', 4);
308
309 let result = optimize_code(&code, &sizes, &GreedyMethod::default());
310 assert!(result.is_some());
311 }
312
313 #[test]
314 fn test_empty_code() {
315 let code: EinCode<char> = EinCode::new(vec![], vec![]);
316 let sizes: HashMap<char, usize> = HashMap::new();
317
318 let result = optimize_code(&code, &sizes, &GreedyMethod::default());
319 assert!(result.is_none());
320 }
321
322 #[test]
323 fn test_optimize_code_with_slicing() {
324 use crate::slicer::{slice_code, TreeSASlicer};
325
326 let code = EinCode::new(
327 vec![vec!['i', 'j'], vec!['j', 'k'], vec!['k', 'l']],
328 vec!['i', 'l'],
329 );
330
331 let mut sizes = HashMap::new();
332 sizes.insert('i', 4);
333 sizes.insert('j', 8);
334 sizes.insert('k', 8);
335 sizes.insert('l', 4);
336
337 let nested = optimize_code(&code, &sizes, &GreedyMethod::default()).unwrap();
339
340 let slicer = TreeSASlicer::fast();
342 let sliced = slice_code(&nested, &sizes, &slicer, &code.ixs);
343
344 assert!(sliced.is_some());
346 }
347
348 #[test]
349 fn test_contraction_complexity_deep_tree() {
350 let code = EinCode::new(
351 vec![
352 vec!['a', 'b'],
353 vec!['b', 'c'],
354 vec!['c', 'd'],
355 vec!['d', 'e'],
356 ],
357 vec!['a', 'e'],
358 );
359
360 let mut sizes = HashMap::new();
361 sizes.insert('a', 2);
362 sizes.insert('b', 2);
363 sizes.insert('c', 2);
364 sizes.insert('d', 2);
365 sizes.insert('e', 2);
366
367 let nested = optimize_code(&code, &sizes, &GreedyMethod::default()).unwrap();
368 let complexity = contraction_complexity(&nested, &sizes, &code.ixs);
369
370 assert!(complexity.tc > 0.0);
372 assert!(complexity.sc > 0.0);
373 assert!(complexity.rwc > 0.0);
374 }
375
376 #[test]
377 fn test_optimize_code_treesa_with_path_decomp() {
378 let code = EinCode::new(
379 vec![vec!['i', 'j'], vec!['j', 'k'], vec!['k', 'l']],
380 vec!['i', 'l'],
381 );
382
383 let mut sizes = HashMap::new();
384 sizes.insert('i', 4);
385 sizes.insert('j', 8);
386 sizes.insert('k', 8);
387 sizes.insert('l', 4);
388
389 let result = optimize_code(&code, &sizes, &TreeSA::path());
390 assert!(result.is_some());
391
392 let nested = result.unwrap();
393 assert!(nested.is_binary() || nested.is_leaf());
395 }
396}
397
398#[cfg(test)]
399mod issue_13_tests {
400 use super::*;
401
402 #[test]
403 fn test_issue_13_greedy_scalar_output() {
404 let code = EinCode::new(vec![vec![0usize, 1], vec![1usize, 2]], vec![]);
407
408 let sizes: HashMap<usize, usize> = [(0, 2), (1, 2), (2, 2)].into();
409 let optimizer = GreedyMethod::default();
410
411 let tree = optimize_code(&code, &sizes, &optimizer).expect("should optimize");
412
413 match &tree {
414 NestedEinsum::Node { eins, .. } => {
415 assert_eq!(
416 eins.iy, code.iy,
417 "Root node's iy should match requested output. Got {:?}, expected {:?}",
418 eins.iy, code.iy
419 );
420 }
421 NestedEinsum::Leaf { .. } => {
422 panic!("Multi-tensor operation should not return Leaf");
423 }
424 }
425 }
426
427 #[test]
428 fn test_issue_13_greedy_partial_output() {
429 let code = EinCode::new(vec![vec![0usize, 1], vec![1usize, 2]], vec![0usize]);
431
432 let sizes: HashMap<usize, usize> = [(0, 2), (1, 2), (2, 2)].into();
433 let optimizer = GreedyMethod::default();
434
435 let tree = optimize_code(&code, &sizes, &optimizer).expect("should optimize");
436
437 match &tree {
438 NestedEinsum::Node { eins, .. } => {
439 assert_eq!(
440 eins.iy, code.iy,
441 "Root node's iy should match requested output. Got {:?}, expected {:?}",
442 eins.iy, code.iy
443 );
444 }
445 NestedEinsum::Leaf { .. } => {
446 panic!("Multi-tensor operation should not return Leaf");
447 }
448 }
449 }
450
451 #[test]
452 fn test_issue_13_treesa_scalar_output() {
453 let code = EinCode::new(vec![vec![0usize, 1], vec![1usize, 2]], vec![]);
455
456 let sizes: HashMap<usize, usize> = [(0, 2), (1, 2), (2, 2)].into();
457 let optimizer = TreeSA::fast();
458
459 let tree = optimize_code(&code, &sizes, &optimizer).expect("should optimize");
460
461 match &tree {
462 NestedEinsum::Node { eins, .. } => {
463 assert_eq!(
464 eins.iy, code.iy,
465 "TreeSA root node's iy should match requested output. Got {:?}, expected {:?}",
466 eins.iy, code.iy
467 );
468 }
469 NestedEinsum::Leaf { .. } => {
470 panic!("Multi-tensor operation should not return Leaf");
471 }
472 }
473 }
474
475 #[test]
476 fn test_issue_13_treesa_partial_output() {
477 let code = EinCode::new(vec![vec![0usize, 1], vec![1usize, 2]], vec![2usize]);
479
480 let sizes: HashMap<usize, usize> = [(0, 2), (1, 2), (2, 2)].into();
481 let optimizer = TreeSA::fast();
482
483 let tree = optimize_code(&code, &sizes, &optimizer).expect("should optimize");
484
485 match &tree {
486 NestedEinsum::Node { eins, .. } => {
487 assert_eq!(
488 eins.iy, code.iy,
489 "TreeSA root node's iy should match requested output. Got {:?}, expected {:?}",
490 eins.iy, code.iy
491 );
492 }
493 NestedEinsum::Leaf { .. } => {
494 panic!("Multi-tensor operation should not return Leaf");
495 }
496 }
497 }
498
499 #[test]
500 fn test_issue_13_greedy_chain_scalar_output() {
501 let code = EinCode::new(
504 vec![vec![0usize, 1], vec![1usize, 2], vec![2usize, 3]],
505 vec![],
506 );
507
508 let sizes: HashMap<usize, usize> = [(0, 2), (1, 2), (2, 2), (3, 2)].into();
509 let optimizer = GreedyMethod::default();
510
511 let tree = optimize_code(&code, &sizes, &optimizer).expect("should optimize");
512
513 match &tree {
515 NestedEinsum::Node { eins, .. } => {
516 assert_eq!(
517 eins.iy, code.iy,
518 "Root node's iy should be empty (scalar). Got {:?}",
519 eins.iy
520 );
521 }
522 NestedEinsum::Leaf { .. } => {
523 panic!("3-tensor operation should not return Leaf");
524 }
525 }
526 }
527
528 #[test]
529 fn test_issue_13_greedy_chain_partial_output() {
530 let code = EinCode::new(
532 vec![vec![0usize, 1], vec![1usize, 2], vec![2usize, 3]],
533 vec![0usize],
534 );
535
536 let sizes: HashMap<usize, usize> = [(0, 2), (1, 2), (2, 2), (3, 2)].into();
537 let optimizer = GreedyMethod::default();
538
539 let tree = optimize_code(&code, &sizes, &optimizer).expect("should optimize");
540
541 match &tree {
542 NestedEinsum::Node { eins, .. } => {
543 assert_eq!(
544 eins.iy, code.iy,
545 "Root node's iy should be [0]. Got {:?}",
546 eins.iy
547 );
548 }
549 NestedEinsum::Leaf { .. } => {
550 panic!("3-tensor operation should not return Leaf");
551 }
552 }
553 }
554
555 #[test]
556 fn test_issue_13_treesa_chain_scalar_output() {
557 let code = EinCode::new(
559 vec![vec![0usize, 1], vec![1usize, 2], vec![2usize, 3]],
560 vec![],
561 );
562
563 let sizes: HashMap<usize, usize> = [(0, 2), (1, 2), (2, 2), (3, 2)].into();
564 let optimizer = TreeSA::fast();
565
566 let tree = optimize_code(&code, &sizes, &optimizer).expect("should optimize");
567
568 match &tree {
569 NestedEinsum::Node { eins, .. } => {
570 assert_eq!(
571 eins.iy, code.iy,
572 "TreeSA root node's iy should be empty (scalar). Got {:?}",
573 eins.iy
574 );
575 }
576 NestedEinsum::Leaf { .. } => {
577 panic!("3-tensor operation should not return Leaf");
578 }
579 }
580 }
581
582 #[test]
583 fn test_issue_13_intermediate_nodes_have_correct_output() {
584 let code = EinCode::new(
587 vec![vec!['i', 'j'], vec!['j', 'k'], vec!['k', 'l']],
588 vec!['i', 'l'],
589 );
590
591 let sizes: HashMap<char, usize> = [('i', 2), ('j', 2), ('k', 2), ('l', 2)].into();
592 let optimizer = GreedyMethod::default();
593
594 let tree = optimize_code(&code, &sizes, &optimizer).expect("should optimize");
595
596 match &tree {
598 NestedEinsum::Node { eins, args, .. } => {
599 assert_eq!(eins.iy, vec!['i', 'l'], "Root iy should be [i, l]");
600
601 let has_intermediate = args.iter().any(|arg| !arg.is_leaf());
603 assert!(
604 has_intermediate,
605 "Should have at least one intermediate node"
606 );
607 }
608 NestedEinsum::Leaf { .. } => {
609 panic!("3-tensor operation should not return Leaf");
610 }
611 }
612 }
613}
614
615#[cfg(test)]
616mod numerical_verification_tests {
617 use super::*;
621 use crate::test_utils::{execute_nested, tensors_approx_equal, NaiveContractor};
622
623 #[test]
624 fn test_numerical_matrix_chain() {
625 let code = EinCode::new(
628 vec![vec![1usize, 2], vec![2, 3], vec![3, 4], vec![4, 5]],
629 vec![1, 5],
630 );
631
632 let sizes: HashMap<usize, usize> = [(1, 3), (2, 4), (3, 5), (4, 4), (5, 3)].into();
633 let label_map: HashMap<usize, usize> = (1..=5).map(|i| (i, i)).collect();
634
635 let mut contractor_greedy = NaiveContractor::new();
637 contractor_greedy.add_tensor(0, vec![3, 4]); contractor_greedy.add_tensor(1, vec![4, 5]); contractor_greedy.add_tensor(2, vec![5, 4]); contractor_greedy.add_tensor(3, vec![4, 3]); let mut contractor_treesa = contractor_greedy.clone();
643
644 let greedy_tree =
646 optimize_code(&code, &sizes, &GreedyMethod::default()).expect("Greedy should succeed");
647 let treesa_tree =
648 optimize_code(&code, &sizes, &TreeSA::fast()).expect("TreeSA should succeed");
649
650 let greedy_idx = execute_nested(&greedy_tree, &mut contractor_greedy, &label_map);
652 let treesa_idx = execute_nested(&treesa_tree, &mut contractor_treesa, &label_map);
653
654 let greedy_result = contractor_greedy.get_tensor(greedy_idx).unwrap();
656 let treesa_result = contractor_treesa.get_tensor(treesa_idx).unwrap();
657
658 assert_eq!(
659 greedy_result.shape(),
660 treesa_result.shape(),
661 "Shapes should match"
662 );
663 assert!(
664 tensors_approx_equal(greedy_result, treesa_result, 1e-10, 1e-12),
665 "Greedy and TreeSA should produce identical numerical results"
666 );
667 }
668
669 #[test]
670 fn test_numerical_with_hyperedge() {
671 let code = EinCode::new(vec![vec![1usize, 2], vec![2, 3], vec![2, 4]], vec![1, 3, 4]);
676
677 let sizes: HashMap<usize, usize> = [(1, 2), (2, 3), (3, 2), (4, 2)].into();
678 let label_map: HashMap<usize, usize> = (1..=4).map(|i| (i, i)).collect();
679
680 let mut contractor = NaiveContractor::new();
681 contractor.add_tensor(0, vec![2, 3]); contractor.add_tensor(1, vec![3, 2]); contractor.add_tensor(2, vec![3, 2]); let greedy_tree =
686 optimize_code(&code, &sizes, &GreedyMethod::default()).expect("Greedy should succeed");
687 let treesa_tree =
688 optimize_code(&code, &sizes, &TreeSA::fast()).expect("TreeSA should succeed");
689
690 assert!(greedy_tree.is_binary(), "Greedy should produce binary tree");
692 assert!(treesa_tree.is_binary(), "TreeSA should produce binary tree");
693
694 assert_eq!(greedy_tree.leaf_count(), 3, "Should have 3 leaves");
696 assert_eq!(treesa_tree.leaf_count(), 3, "Should have 3 leaves");
697
698 let greedy_idx = execute_nested(&greedy_tree, &mut contractor, &label_map);
700 let greedy_result = contractor.get_tensor(greedy_idx).unwrap();
701 assert_eq!(
702 greedy_result.shape(),
703 &[2, 2, 2],
704 "Output should be 2x2x2 for [i,k,l]"
705 );
706 }
707
708 #[test]
709 fn test_numerical_scalar_output() {
710 let code = EinCode::new(vec![vec![1usize, 2], vec![2, 3], vec![3, 1]], vec![]);
712
713 let sizes: HashMap<usize, usize> = [(1, 3), (2, 4), (3, 3)].into();
714 let label_map: HashMap<usize, usize> = (1..=3).map(|i| (i, i)).collect();
715
716 let mut contractor_greedy = NaiveContractor::new();
717 contractor_greedy.add_tensor(0, vec![3, 4]); contractor_greedy.add_tensor(1, vec![4, 3]); contractor_greedy.add_tensor(2, vec![3, 3]); let mut contractor_treesa = contractor_greedy.clone();
722
723 let greedy_tree =
724 optimize_code(&code, &sizes, &GreedyMethod::default()).expect("Greedy should succeed");
725 let treesa_tree =
726 optimize_code(&code, &sizes, &TreeSA::fast()).expect("TreeSA should succeed");
727
728 let greedy_idx = execute_nested(&greedy_tree, &mut contractor_greedy, &label_map);
729 let treesa_idx = execute_nested(&treesa_tree, &mut contractor_treesa, &label_map);
730
731 let greedy_result = contractor_greedy.get_tensor(greedy_idx).unwrap();
732 let treesa_result = contractor_treesa.get_tensor(treesa_idx).unwrap();
733
734 assert_eq!(greedy_result.ndim(), 0, "Should produce scalar");
735 assert!(
736 tensors_approx_equal(greedy_result, treesa_result, 1e-10, 1e-12),
737 "Scalar contraction should match"
738 );
739 }
740
741 #[test]
742 fn test_numerical_five_tensor_network() {
743 let code = EinCode::new(
746 vec![
747 vec![1usize, 2], vec![2, 3, 4], vec![3, 5], vec![4, 5, 6], vec![6, 1], ],
753 vec![5], );
755
756 let sizes: HashMap<usize, usize> = [(1, 2), (2, 3), (3, 2), (4, 2), (5, 4), (6, 2)].into();
757 let label_map: HashMap<usize, usize> = (1..=6).map(|i| (i, i)).collect();
758
759 let mut contractor_greedy = NaiveContractor::new();
760 contractor_greedy.add_tensor(0, vec![2, 3]); contractor_greedy.add_tensor(1, vec![3, 2, 2]); contractor_greedy.add_tensor(2, vec![2, 4]); contractor_greedy.add_tensor(3, vec![2, 4, 2]); contractor_greedy.add_tensor(4, vec![2, 2]); let mut contractor_treesa = contractor_greedy.clone();
767
768 let greedy_tree =
769 optimize_code(&code, &sizes, &GreedyMethod::default()).expect("Greedy should succeed");
770 let treesa_tree =
771 optimize_code(&code, &sizes, &TreeSA::fast()).expect("TreeSA should succeed");
772
773 let greedy_idx = execute_nested(&greedy_tree, &mut contractor_greedy, &label_map);
774 let treesa_idx = execute_nested(&treesa_tree, &mut contractor_treesa, &label_map);
775
776 let greedy_result = contractor_greedy.get_tensor(greedy_idx).unwrap();
777 let treesa_result = contractor_treesa.get_tensor(treesa_idx).unwrap();
778
779 assert_eq!(greedy_result.shape(), &[4], "Output should have shape [4]");
780 assert!(
781 tensors_approx_equal(greedy_result, treesa_result, 1e-8, 1e-10),
782 "5-tensor network should produce same result"
783 );
784 }
785}
786
787#[cfg(test)]
790mod large_scale_stress_tests {
791 use super::*;
792 use petgraph::graph::UnGraph;
793 use petgraph::visit::EdgeRef;
794 use rand::seq::SliceRandom;
795 use rand::SeedableRng;
796 use std::collections::HashSet;
797
798 fn generate_random_regular_graph(n: usize, k: usize, seed: u64) -> UnGraph<(), ()> {
801 use rand::Rng;
802 let mut rng = rand::rngs::StdRng::seed_from_u64(seed);
803 let mut graph = UnGraph::new_undirected();
804
805 for _ in 0..n {
807 graph.add_node(());
808 }
809
810 assert!(n * k % 2 == 0, "n*k must be even for k-regular graph");
813
814 let target_edges = (n * k) / 2;
816 let mut edges: HashSet<(usize, usize)> = HashSet::new();
817 let mut degrees = vec![0usize; n];
818
819 let max_outer_attempts = 20;
821 for _attempt in 0..max_outer_attempts {
822 edges.clear();
823 degrees.fill(0);
824
825 for _ in 0..target_edges * 10 {
827 let mut candidates: Vec<usize> = (0..n).filter(|&v| degrees[v] < k).collect();
829
830 if candidates.len() < 2 {
831 break;
832 }
833
834 for _ in 0..100 {
836 if candidates.len() < 2 {
837 break;
838 }
839 let i = rng.random_range(0..candidates.len());
840 let u = candidates[i];
841 candidates.swap_remove(i);
842
843 if candidates.is_empty() {
844 break;
845 }
846
847 let valid_partners: Vec<usize> = candidates
849 .iter()
850 .filter(|&&v| {
851 let edge = (u.min(v), u.max(v));
852 !edges.contains(&edge) && degrees[v] < k
853 })
854 .copied()
855 .collect();
856
857 if valid_partners.is_empty() {
858 continue;
859 }
860
861 let j = rng.random_range(0..valid_partners.len());
862 let v = valid_partners[j];
863
864 let edge = (u.min(v), u.max(v));
865 edges.insert(edge);
866 degrees[u] += 1;
867 degrees[v] += 1;
868
869 if degrees[v] >= k {
871 if let Some(pos) = candidates.iter().position(|&x| x == v) {
872 candidates.swap_remove(pos);
873 }
874 }
875
876 break;
877 }
878
879 if edges.len() >= target_edges {
880 break;
881 }
882 }
883
884 if edges.len() == target_edges && degrees.iter().all(|&d| d == k) {
886 for (u, v) in &edges {
888 graph.add_edge(
889 petgraph::graph::NodeIndex::new(*u),
890 petgraph::graph::NodeIndex::new(*v),
891 (),
892 );
893 }
894 return graph;
895 }
896 }
897
898 edges.clear();
901 degrees.fill(0);
902
903 let mut available: Vec<usize> = (0..n).collect();
904 available.shuffle(&mut rng);
905
906 for (i, &u) in available.iter().enumerate() {
907 for &v in available.iter().skip(i + 1) {
908 if degrees[u] < k && degrees[v] < k {
909 let edge = (u.min(v), u.max(v));
910 if !edges.contains(&edge) {
911 edges.insert(edge);
912 degrees[u] += 1;
913 degrees[v] += 1;
914 }
915 }
916 if degrees[u] >= k {
917 break;
918 }
919 }
920 }
921
922 for (u, v) in &edges {
923 graph.add_edge(
924 petgraph::graph::NodeIndex::new(*u),
925 petgraph::graph::NodeIndex::new(*v),
926 (),
927 );
928 }
929
930 graph
931 }
932
933 fn graph_to_eincode(graph: &UnGraph<(), ()>) -> (EinCode<usize>, HashMap<usize, usize>) {
936 let n_vertices = graph.node_count();
937 let mut ixs: Vec<Vec<usize>> = Vec::new();
938
939 for edge in graph.edge_references() {
941 let src = edge.source().index() + 1; let dst = edge.target().index() + 1;
943 ixs.push(vec![src.min(dst), src.max(dst)]);
944 }
945
946 for v in 0..n_vertices {
948 ixs.push(vec![v + 1]); }
950
951 let mut sizes = HashMap::new();
953 for v in 1..=n_vertices {
954 sizes.insert(v, 2);
955 }
956
957 let code = EinCode::new(ixs, vec![]); (code, sizes)
959 }
960
961 #[test]
962 fn test_random_regular_graph_n40_k3() {
963 let graph = generate_random_regular_graph(40, 3, 42);
965
966 assert_eq!(graph.node_count(), 40);
968 assert!(graph.edge_count() > 50, "Should have close to 60 edges");
969
970 let (code, sizes) = graph_to_eincode(&graph);
971
972 let greedy_tree = optimize_code(&code, &sizes, &GreedyMethod::default());
974 assert!(
975 greedy_tree.is_some(),
976 "Greedy should succeed on 40-node graph"
977 );
978
979 let tree = greedy_tree.unwrap();
980 let complexity = contraction_complexity(&tree, &sizes, &code.ixs);
981
982 assert!(
984 complexity.sc < 30.0,
985 "Space complexity should be reasonable"
986 );
987 }
988
989 #[test]
990 fn test_random_regular_graph_n60_k3_treesa() {
991 let graph = generate_random_regular_graph(60, 3, 123);
993 let (code, sizes) = graph_to_eincode(&graph);
994
995 let greedy_tree = optimize_code(&code, &sizes, &GreedyMethod::default());
997 assert!(greedy_tree.is_some());
998 let greedy_complexity =
999 contraction_complexity(greedy_tree.as_ref().unwrap(), &sizes, &code.ixs);
1000
1001 let treesa = TreeSA::default()
1003 .with_sc_target(greedy_complexity.sc - 2.0)
1004 .with_niters(50)
1005 .with_ntrials(2);
1006 let treesa_tree = optimize_code(&code, &sizes, &treesa);
1007 assert!(
1008 treesa_tree.is_some(),
1009 "TreeSA should succeed on 60-node graph"
1010 );
1011
1012 let treesa_complexity =
1013 contraction_complexity(treesa_tree.as_ref().unwrap(), &sizes, &code.ixs);
1014
1015 assert!(
1017 treesa_complexity.sc <= greedy_complexity.sc + 1.0,
1018 "TreeSA should not significantly increase space complexity"
1019 );
1020 }
1021
1022 #[test]
1023 fn test_large_random_regular_graph_n100() {
1024 let graph = generate_random_regular_graph(100, 3, 456);
1026 let (code, sizes) = graph_to_eincode(&graph);
1027
1028 assert!(code.num_tensors() > 200, "Should have many tensors");
1030
1031 let greedy_tree = optimize_code(&code, &sizes, &GreedyMethod::default());
1032 assert!(greedy_tree.is_some(), "Greedy should handle 100-node graph");
1033
1034 let tree = greedy_tree.unwrap();
1035 assert!(tree.is_binary(), "Should produce binary tree");
1036 assert_eq!(
1037 tree.leaf_count(),
1038 code.num_tensors(),
1039 "Should have correct leaf count"
1040 );
1041 }
1042
1043 #[test]
1044 fn test_path_decomposition_random_graph_n50() {
1045 let graph = generate_random_regular_graph(50, 3, 789);
1047 let (code, sizes) = graph_to_eincode(&graph);
1048
1049 let path_treesa = TreeSA::path().with_niters(30).with_ntrials(2);
1051 let path_tree = optimize_code(&code, &sizes, &path_treesa);
1052 assert!(path_tree.is_some(), "Path TreeSA should succeed");
1053
1054 let tree = path_tree.unwrap();
1055
1056 assert!(
1058 tree.is_path_decomposition(),
1059 "Should produce path decomposition"
1060 );
1061
1062 let tree_treesa = TreeSA::default().with_niters(30).with_ntrials(2);
1064 let tree_tree = optimize_code(&code, &sizes, &tree_treesa);
1065 assert!(tree_tree.is_some());
1066
1067 }
1070
1071 #[test]
1072 fn test_fullerene_c60_optimization() {
1073 use crate::test_utils::generate_fullerene_edges;
1075
1076 let edges = generate_fullerene_edges();
1077 let mut ixs: Vec<Vec<usize>> = Vec::new();
1078
1079 for (a, b) in &edges {
1081 ixs.push(vec![*a, *b]);
1082 }
1083
1084 for v in 1..=60 {
1086 ixs.push(vec![v]);
1087 }
1088
1089 let code = EinCode::new(ixs, vec![]); let sizes: HashMap<usize, usize> = (1..=60).map(|v| (v, 2)).collect();
1091
1092 let unopt = crate::complexity::eincode_complexity(&code, &sizes);
1094 assert_eq!(
1095 unopt.tc, 60.0,
1096 "Unoptimized tc should be 60 (sum of indices)"
1097 );
1098 assert_eq!(unopt.sc, 0.0, "Unoptimized sc should be 0 (scalar output)");
1099
1100 let greedy_tree = optimize_code(&code, &sizes, &GreedyMethod::default());
1102 assert!(greedy_tree.is_some(), "Should optimize C60");
1103
1104 let tree = greedy_tree.unwrap();
1105 let complexity = contraction_complexity(&tree, &sizes, &code.ixs);
1106
1107 assert!(
1109 complexity.sc <= 20.0,
1110 "C60 space complexity should be <= 20, got {}",
1111 complexity.sc
1112 );
1113 }
1114
1115 #[test]
1116 fn test_chain_and_ring_topologies() {
1117 let chain_ixs: Vec<Vec<usize>> = (1..=9).map(|i| vec![i, i + 1]).collect();
1121 let chain_code = EinCode::new(chain_ixs, vec![1, 10]);
1122 let chain_sizes: HashMap<usize, usize> = (1..=10).map(|v| (v, 2)).collect();
1123
1124 let chain_tree = optimize_code(&chain_code, &chain_sizes, &GreedyMethod::default());
1125 assert!(chain_tree.is_some());
1126 let chain_complexity =
1127 contraction_complexity(chain_tree.as_ref().unwrap(), &chain_sizes, &chain_code.ixs);
1128 assert_eq!(chain_complexity.sc, 2.0, "Chain should have sc=2");
1129
1130 let mut ring_ixs: Vec<Vec<usize>> = (1..=9).map(|i| vec![i, i + 1]).collect();
1132 ring_ixs.push(vec![10, 1]); let ring_code = EinCode::new(ring_ixs, vec![]);
1134 let ring_sizes: HashMap<usize, usize> = (1..=10).map(|v| (v, 2)).collect();
1135
1136 let ring_tree = optimize_code(&ring_code, &ring_sizes, &GreedyMethod::default());
1137 assert!(ring_tree.is_some());
1138 let ring_complexity =
1139 contraction_complexity(ring_tree.as_ref().unwrap(), &ring_sizes, &ring_code.ixs);
1140 assert_eq!(ring_complexity.sc, 2.0, "Ring should have sc=2");
1141 }
1142
1143 #[test]
1144 fn test_tutte_graph_stochastic_optimization() {
1145 use crate::test_utils::generate_tutte_edges;
1147
1148 let edges = generate_tutte_edges();
1149 let mut ixs: Vec<Vec<usize>> = Vec::new();
1150
1151 let mut seen_edges: HashSet<(usize, usize)> = HashSet::new();
1153 for (a, b) in &edges {
1154 let edge = ((*a).min(*b), (*a).max(*b));
1155 if !seen_edges.contains(&edge) {
1156 ixs.push(vec![edge.0, edge.1]);
1157 seen_edges.insert(edge);
1158 }
1159 }
1160
1161 let max_vertex = edges.iter().flat_map(|(a, b)| [*a, *b]).max().unwrap_or(0);
1163
1164 let code = EinCode::new(ixs, vec![]); let sizes: HashMap<usize, usize> = (1..=max_vertex).map(|v| (v, 2)).collect();
1166
1167 let mut best_sc = f64::INFINITY;
1169 for _trial in 0..10 {
1170 let stochastic_greedy = GreedyMethod::stochastic(100.0);
1171 if let Some(tree) = optimize_code(&code, &sizes, &stochastic_greedy) {
1172 let complexity = contraction_complexity(&tree, &sizes, &code.ixs);
1173 if complexity.sc < best_sc {
1174 best_sc = complexity.sc;
1175 }
1176 }
1177 }
1178
1179 assert!(
1181 best_sc <= 8.0,
1182 "Best Tutte graph sc should be <= 8, got {}",
1183 best_sc
1184 );
1185 }
1186
1187 #[test]
1188 fn test_petersen_graph_optimization() {
1189 use crate::test_utils::generate_petersen_edges;
1191
1192 let edges = generate_petersen_edges();
1193 let mut ixs: Vec<Vec<usize>> = Vec::new();
1194
1195 for (a, b) in &edges {
1196 ixs.push(vec![*a, *b]);
1197 }
1198
1199 for v in 1..=10 {
1201 ixs.push(vec![v]);
1202 }
1203
1204 let code = EinCode::new(ixs, vec![]);
1205 let sizes: HashMap<usize, usize> = (1..=10).map(|v| (v, 2)).collect();
1206
1207 let tree = optimize_code(&code, &sizes, &GreedyMethod::default());
1208 assert!(tree.is_some());
1209
1210 let complexity = contraction_complexity(tree.as_ref().unwrap(), &sizes, &code.ixs);
1211
1212 assert!(
1214 complexity.sc <= 6.0,
1215 "Petersen sc should be <= 6, got {}",
1216 complexity.sc
1217 );
1218 }
1219
1220 #[test]
1221 fn test_very_large_graph_n200() {
1222 let graph = generate_random_regular_graph(200, 3, 999);
1224 let (code, sizes) = graph_to_eincode(&graph);
1225
1226 assert!(code.num_tensors() > 400, "Should have many tensors");
1228
1229 let greedy_tree = optimize_code(&code, &sizes, &GreedyMethod::default());
1231 assert!(greedy_tree.is_some(), "Greedy should handle 200-node graph");
1232
1233 let tree = greedy_tree.unwrap();
1234 let complexity = contraction_complexity(&tree, &sizes, &code.ixs);
1235
1236 assert!(
1238 complexity.sc <= 55.0,
1239 "Large graph sc should be <= 55, got {}",
1240 complexity.sc
1241 );
1242 }
1243
1244 #[test]
1245 fn test_reg3_220_treesa() {
1246 let graph_json = include_str!("../../benchmarks/graphs/reg3_220.json");
1263 let graph_data: serde_json::Value = serde_json::from_str(graph_json).unwrap();
1264 let edge_list = graph_data["edge_list"].as_array().unwrap();
1265
1266 let mut ixs: Vec<Vec<usize>> = Vec::new();
1268 for edge in edge_list {
1269 let arr = edge.as_array().unwrap();
1270 let a = arr[0].as_u64().unwrap() as usize;
1271 let b = arr[1].as_u64().unwrap() as usize;
1272 ixs.push(vec![a, b]);
1273 }
1274 for v in 1..=220 {
1276 ixs.push(vec![v]);
1277 }
1278
1279 let code = EinCode::new(ixs, vec![]);
1280
1281 let unique_labels = code.unique_labels();
1283 eprintln!("Unique labels: {}", unique_labels.len());
1284 eprintln!("First 5 tensors: {:?}", &code.ixs[0..5]);
1285 eprintln!("Last 5 tensors: {:?}", &code.ixs[545..550]);
1286
1287 let sizes: HashMap<usize, usize> = unique_labels.iter().map(|&v| (v, 2)).collect();
1288
1289 assert_eq!(code.num_tensors(), 550, "Should have 550 tensors");
1291
1292 let greedy_tree = optimize_code(&code, &sizes, &GreedyMethod::default());
1294 assert!(greedy_tree.is_some(), "Greedy should handle 220-node graph");
1295
1296 let greedy_complexity =
1297 contraction_complexity(greedy_tree.as_ref().unwrap(), &sizes, &code.ixs);
1298 eprintln!(
1299 "Greedy: tc={:.2}, sc={:.2}",
1300 greedy_complexity.tc, greedy_complexity.sc
1301 );
1302
1303 let treesa = TreeSA::default()
1306 .with_betas((0..399).map(|i| 0.1 + 0.05 * i as f64).collect())
1307 .with_ntrials(2)
1308 .with_niters(10)
1309 .with_sc_target(32.0);
1310 let treesa_tree = optimize_code(&code, &sizes, &treesa);
1311 assert!(treesa_tree.is_some(), "TreeSA should succeed");
1312
1313 let treesa_complexity =
1314 contraction_complexity(treesa_tree.as_ref().unwrap(), &sizes, &code.ixs);
1315
1316 eprintln!(
1317 "TreeSA: tc={:.2}, sc={:.2}",
1318 treesa_complexity.tc, treesa_complexity.sc
1319 );
1320
1321 assert!(
1323 treesa_complexity.sc <= 32.0,
1324 "TreeSA with sc_target=32 should achieve sc <= 32, got {}",
1325 treesa_complexity.sc
1326 );
1327 }
1328
1329 #[test]
1330 fn test_treesa_with_sc_target_large_graph() {
1331 let graph = generate_random_regular_graph(80, 3, 111);
1333 let (code, sizes) = graph_to_eincode(&graph);
1334
1335 let greedy_tree = optimize_code(&code, &sizes, &GreedyMethod::default()).unwrap();
1337 let greedy_complexity = contraction_complexity(&greedy_tree, &sizes, &code.ixs);
1338
1339 let sc_target = greedy_complexity.sc - 3.0;
1341 let treesa = TreeSA::default()
1342 .with_sc_target(sc_target)
1343 .with_niters(100)
1344 .with_ntrials(3);
1345
1346 let treesa_tree = optimize_code(&code, &sizes, &treesa);
1347 assert!(treesa_tree.is_some());
1348
1349 let treesa_complexity =
1350 contraction_complexity(treesa_tree.as_ref().unwrap(), &sizes, &code.ixs);
1351
1352 assert!(
1355 treesa_complexity.sc <= greedy_complexity.sc + 2.0,
1356 "TreeSA should not be much worse than greedy"
1357 );
1358 }
1359
1360 #[test]
1361 fn test_multiple_optimizers_consistency() {
1362 let graph = generate_random_regular_graph(30, 3, 222);
1364 let (code, sizes) = graph_to_eincode(&graph);
1365
1366 fn verify_optimizer_result(
1368 tree: Option<NestedEinsum<usize>>,
1369 code: &EinCode<usize>,
1370 sizes: &HashMap<usize, usize>,
1371 name: &str,
1372 ) {
1373 assert!(tree.is_some(), "{} should succeed", name);
1374 let t = tree.unwrap();
1375 assert!(t.is_binary(), "{} should produce binary tree", name);
1376 assert_eq!(
1377 t.leaf_count(),
1378 code.num_tensors(),
1379 "{} should have correct leaves",
1380 name
1381 );
1382
1383 let complexity = contraction_complexity(&t, sizes, &code.ixs);
1384 assert!(
1385 complexity.tc > 0.0,
1386 "{} should have positive time complexity",
1387 name
1388 );
1389 assert!(
1390 complexity.sc >= 0.0,
1391 "{} space complexity should be non-negative",
1392 name
1393 );
1394 }
1395
1396 verify_optimizer_result(
1398 optimize_code(&code, &sizes, &GreedyMethod::default()),
1399 &code,
1400 &sizes,
1401 "GreedyMethod::default",
1402 );
1403 verify_optimizer_result(
1404 optimize_code(&code, &sizes, &GreedyMethod::stochastic(10.0)),
1405 &code,
1406 &sizes,
1407 "GreedyMethod::stochastic",
1408 );
1409 verify_optimizer_result(
1410 optimize_code(&code, &sizes, &TreeSA::fast()),
1411 &code,
1412 &sizes,
1413 "TreeSA::fast",
1414 );
1415 verify_optimizer_result(
1416 optimize_code(&code, &sizes, &TreeSA::path()),
1417 &code,
1418 &sizes,
1419 "TreeSA::path",
1420 );
1421 }
1422}