1use super::functions::*;
6#[derive(Debug, Clone)]
8pub struct TreeDecomposition {
9 pub width: usize,
11 pub num_nodes: usize,
13}
14impl TreeDecomposition {
15 pub fn treewidth(&self) -> usize {
17 self.width
18 }
19 pub fn pathwidth(&self) -> usize {
21 self.width * 2 + 1
22 }
23 pub fn is_tree(&self) -> bool {
25 self.num_nodes > 1
26 }
27}
28#[derive(Debug, Clone)]
30pub struct CourcelleMSOLChecker {
31 pub max_treewidth: usize,
33 pub formula: String,
35 pub mso_version: u8,
37}
38impl CourcelleMSOLChecker {
39 pub fn new(max_treewidth: usize, formula: impl Into<String>, mso_version: u8) -> Self {
41 Self {
42 max_treewidth,
43 formula: formula.into(),
44 mso_version,
45 }
46 }
47 pub fn running_time(&self) -> String {
49 format!(
50 "O(tower({tw}, |φ|) * n) for treewidth ≤ {tw} and MSO_{v} formula φ",
51 tw = self.max_treewidth,
52 v = self.mso_version
53 )
54 }
55 pub fn check(&self, adj: &[Vec<usize>]) -> bool {
58 let tw = treewidth_upper_bound(adj);
59 tw <= self.max_treewidth
60 }
61 pub fn decidable_properties(&self) -> String {
63 format!(
64 "All graph properties definable in MSO_{} are decidable in \
65 linear time on graphs of treewidth ≤ {}.",
66 self.mso_version, self.max_treewidth
67 )
68 }
69}
70#[derive(Debug, Clone)]
72pub struct FPTAlgorithm {
73 pub problem: String,
75 pub parameter: String,
77 pub time_complexity: String,
79}
80impl FPTAlgorithm {
81 pub fn is_fpt(&self) -> bool {
83 !self.problem.is_empty() && !self.parameter.is_empty()
84 }
85 pub fn running_time(&self, n: usize, k: usize) -> usize {
88 let base: usize = 2_usize.saturating_pow(k as u32);
89 base.saturating_mul(n)
90 }
91}
92#[derive(Debug, Clone)]
95pub struct CrownDecomposition {
96 pub crown: Vec<usize>,
98 pub head: Vec<usize>,
100 pub reduced_adj: Vec<Vec<usize>>,
102}
103impl CrownDecomposition {
104 pub fn compute(adj: &[Vec<usize>], _k: usize) -> Self {
108 let n = adj.len();
109 let mut in_crown = vec![false; n];
110 let mut in_head = vec![false; n];
111 for v in 0..n {
112 if adj[v].is_empty() {
113 in_crown[v] = true;
114 }
115 }
116 for v in 0..n {
117 if in_crown[v] {
118 for &u in &adj[v] {
119 in_head[u] = true;
120 }
121 }
122 }
123 let crown: Vec<usize> = (0..n).filter(|&v| in_crown[v]).collect();
124 let head: Vec<usize> = (0..n).filter(|&v| in_head[v]).collect();
125 let mut reduced_adj = adj.to_vec();
126 for &v in crown.iter().chain(head.iter()) {
127 reduced_adj[v].clear();
128 for u in 0..n {
129 reduced_adj[u].retain(|&x| x != v);
130 }
131 }
132 CrownDecomposition {
133 crown,
134 head,
135 reduced_adj,
136 }
137 }
138 pub fn head_size(&self) -> usize {
140 self.head.len()
141 }
142 pub fn verify(&self, adj: &[Vec<usize>]) -> bool {
144 for &u in &self.crown {
145 for &v in &adj[u] {
146 if self.crown.contains(&v) {
147 return false;
148 }
149 }
150 }
151 true
152 }
153}
154#[derive(Debug, Clone)]
156pub struct ColorCoding {
157 pub num_colors: usize,
159 pub derandomize: bool,
161}
162impl ColorCoding {
163 pub fn universal_hash_family(&self) -> String {
165 format!(
166 "({num_colors}, k)-perfect hash family of size O({num_colors}^O(1) * log n)",
167 num_colors = self.num_colors
168 )
169 }
170 pub fn fpt_running_time(&self) -> String {
172 if self.derandomize {
173 format!(
174 "O({nc}^{nc} * n * log n) derandomized via perfect hash families",
175 nc = self.num_colors
176 )
177 } else {
178 format!(
179 "O({nc}^{nc} * n) expected (randomized)",
180 nc = self.num_colors
181 )
182 }
183 }
184}
185#[allow(dead_code)]
187#[derive(Debug, Clone)]
188pub struct Kernelization {
189 pub problem: String,
190 pub parameter: String,
191 pub kernel_size_bound: KernelSizeBound,
192}
193#[allow(dead_code)]
194impl Kernelization {
195 pub fn new(prob: &str, param: &str, bound: KernelSizeBound) -> Self {
196 Kernelization {
197 problem: prob.to_string(),
198 parameter: param.to_string(),
199 kernel_size_bound: bound,
200 }
201 }
202 pub fn vertex_cover_2k() -> Self {
203 Kernelization::new("VertexCover", "k", KernelSizeBound::Linear(2.0))
204 }
205 pub fn feedback_vertex_set() -> Self {
206 Kernelization::new("FeedbackVertexSet", "k", KernelSizeBound::Quadratic(4.0))
207 }
208 pub fn is_polynomial_kernel(&self) -> bool {
209 matches!(
210 self.kernel_size_bound,
211 KernelSizeBound::Linear(_)
212 | KernelSizeBound::Quadratic(_)
213 | KernelSizeBound::Cubic(_)
214 | KernelSizeBound::Polynomial(_, _)
215 )
216 }
217 pub fn kernel_size(&self, k: usize) -> Option<f64> {
218 match &self.kernel_size_bound {
219 KernelSizeBound::Linear(c) => Some(c * k as f64),
220 KernelSizeBound::Quadratic(c) => Some(c * (k as f64).powi(2)),
221 KernelSizeBound::Cubic(c) => Some(c * (k as f64).powi(3)),
222 KernelSizeBound::Polynomial(c, d) => Some(c * (k as f64).powi(*d as i32)),
223 _ => None,
224 }
225 }
226}
227#[derive(Debug, Clone)]
229pub struct KernelizationAlgorithm {
230 pub problem: String,
232 pub kernel_size: String,
234}
235impl KernelizationAlgorithm {
236 pub fn polynomial_kernel(&self) -> bool {
238 self.kernel_size.contains('k')
239 }
240 pub fn linear_kernel(&self) -> bool {
242 let s = self.kernel_size.trim();
243 s == "k" || s == "2k" || s == "3k"
244 }
245}
246#[derive(Debug, Clone)]
248pub struct BoundedSearchTree {
249 pub max_depth: usize,
251 pub branching_vector: Vec<usize>,
253}
254impl BoundedSearchTree {
255 pub fn run(&self) -> bool {
258 self.max_depth > 0
259 }
260 pub fn running_time_analysis(&self) -> String {
263 let b: usize = self.branching_vector.iter().sum::<usize>().max(1);
264 format!("O({b}^{k} * poly(n))", b = b, k = self.max_depth)
265 }
266}
267#[derive(Debug, Clone)]
269pub struct VertexCoverFPT {
270 pub k: usize,
272}
273impl VertexCoverFPT {
274 pub fn new(k: usize) -> Self {
276 Self { k }
277 }
278 pub fn lp_kernel(&self, adj: &[Vec<usize>]) -> (Vec<Vec<usize>>, Vec<usize>, usize) {
281 let n = adj.len();
282 let mut in_cover = vec![false; n];
283 let mut new_adj: Vec<Vec<usize>> = adj.to_vec();
284 let mut k_remaining = self.k;
285 loop {
286 let high_v = (0..n).find(|&v| !in_cover[v] && new_adj[v].len() > k_remaining);
287 match high_v {
288 None => break,
289 Some(v) => {
290 if k_remaining == 0 {
291 return (
292 new_adj,
293 in_cover
294 .iter()
295 .enumerate()
296 .filter(|&(_, &b)| b)
297 .map(|(i, _)| i)
298 .collect(),
299 0,
300 );
301 }
302 in_cover[v] = true;
303 k_remaining = k_remaining.saturating_sub(1);
304 let nbrs = new_adj[v].clone();
305 new_adj[v].clear();
306 for u in nbrs {
307 new_adj[u].retain(|&x| x != v);
308 }
309 }
310 }
311 }
312 let cover_so_far: Vec<usize> = in_cover
313 .iter()
314 .enumerate()
315 .filter(|&(_, &b)| b)
316 .map(|(i, _)| i)
317 .collect();
318 (new_adj, cover_so_far, k_remaining)
319 }
320 pub fn solve(&self, adj: &[Vec<usize>]) -> Option<Vec<usize>> {
323 let (kernelized, mut cover, k_rem) = self.lp_kernel(adj);
324 let active: Vec<usize> = (0..kernelized.len())
325 .filter(|&v| {
326 kernelized[v]
327 .iter()
328 .any(|&u| !kernelized[u].is_empty() || kernelized[v].contains(&u))
329 })
330 .collect();
331 if active.len() > 2 * self.k {
332 return None;
333 }
334 if let Some(bst_cover) = vertex_cover_bst(&kernelized, k_rem) {
335 cover.extend(bst_cover);
336 Some(cover)
337 } else {
338 None
339 }
340 }
341 pub fn running_time(&self) -> String {
343 format!(
344 "O(1.2738^{k} + {k}·n) for vertex cover with k = {}",
345 self.k,
346 k = self.k
347 )
348 }
349}
350#[derive(Debug, Clone)]
352pub struct EtaExpansion {
353 pub parameter: String,
355}
356impl EtaExpansion {
357 pub fn kernelization_lower_bound(&self) -> String {
359 format!(
360 "No polynomial kernel for {} unless NP ⊆ coNP/poly (via OR-composition)",
361 self.parameter
362 )
363 }
364 pub fn cross_composition(&self) -> String {
366 format!(
367 "Cross-composition from {} into itself rules out polynomial kernels",
368 self.parameter
369 )
370 }
371}
372#[derive(Debug, Clone)]
374pub struct IrrelevantVertex {
375 pub problem: String,
377}
378impl IrrelevantVertex {
379 pub fn find_irrelevant_vertex(&self) -> String {
381 format!(
382 "Find a vertex irrelevant to {} using the grid-minor theorem",
383 self.problem
384 )
385 }
386 pub fn reduce_instance(&self) -> String {
388 format!(
389 "Remove the irrelevant vertex; reduced {} instance has smaller parameter",
390 self.problem
391 )
392 }
393}
394#[derive(Debug, Clone)]
397pub struct IterativeCompressionVC {
398 pub k: usize,
400}
401impl IterativeCompressionVC {
402 pub fn new(k: usize) -> Self {
404 Self { k }
405 }
406 pub fn compress(&self, adj: &[Vec<usize>], over_cover: &[usize]) -> Option<Vec<usize>> {
409 let m = over_cover.len();
410 if m > 20 {
411 return vertex_cover_bst(adj, self.k);
412 }
413 for mask in 0u32..(1u32 << m) {
414 let mut partial_cover: Vec<usize> = (0..m)
415 .filter(|&i| mask & (1 << i) != 0)
416 .map(|i| over_cover[i])
417 .collect();
418 if partial_cover.len() > self.k {
419 continue;
420 }
421 let k_rem = self.k - partial_cover.len();
422 let n = adj.len();
423 let mut removed = vec![false; n];
424 for &v in &partial_cover {
425 removed[v] = true;
426 }
427 if let Some(extra) = vertex_cover_bst(adj, k_rem) {
428 partial_cover.extend(extra);
429 return Some(partial_cover);
430 }
431 }
432 None
433 }
434}
435#[allow(dead_code)]
436#[derive(Debug, Clone, PartialEq)]
437pub enum KernelSizeBound {
438 Linear(f64),
439 Quadratic(f64),
440 Cubic(f64),
441 Polynomial(f64, u32),
442 Exponential,
443 None,
444}
445#[allow(dead_code)]
447#[derive(Debug, Clone)]
448pub struct FPTMethod {
449 pub name: String,
450 pub problem: String,
451 pub parameter: String,
452 pub runtime_base: f64,
453}
454#[allow(dead_code)]
455impl FPTMethod {
456 pub fn new(name: &str, problem: &str, param: &str, base: f64) -> Self {
457 FPTMethod {
458 name: name.to_string(),
459 problem: problem.to_string(),
460 parameter: param.to_string(),
461 runtime_base: base,
462 }
463 }
464 pub fn runtime(&self, k: usize, n: usize) -> f64 {
465 self.runtime_base.powi(k as i32) * n as f64
466 }
467 pub fn is_single_exponential(&self) -> bool {
468 self.runtime_base <= 2.0
469 }
470 pub fn kernel_vertex_cover() -> Self {
471 FPTMethod::new("Crown-reduction", "VertexCover", "k", 1.0)
472 }
473 pub fn bounded_search_tree_vc() -> Self {
474 FPTMethod::new("BoundedSearchTree", "VertexCover", "k", 2.0)
475 }
476}
477#[allow(dead_code)]
479#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord)]
480pub enum WClass {
481 FPT,
482 W1,
483 W2,
484 W3,
485 WP,
486 ParaNP,
487}
488#[allow(dead_code)]
489impl WClass {
490 pub fn is_fpt(&self) -> bool {
491 *self == WClass::FPT
492 }
493 pub fn harder_than_w1(&self) -> bool {
494 *self > WClass::W1
495 }
496 pub fn vertex_cover_class() -> Self {
497 WClass::FPT
498 }
499 pub fn clique_class() -> Self {
500 WClass::W1
501 }
502 pub fn dominating_set_class() -> Self {
503 WClass::W2
504 }
505 pub fn description(&self) -> &'static str {
506 match self {
507 WClass::FPT => "Fixed-parameter tractable",
508 WClass::W1 => "W[1]-hard (parametric intractable)",
509 WClass::W2 => "W[2]-hard",
510 WClass::W3 => "W[3]-hard",
511 WClass::WP => "W[P]-hard",
512 WClass::ParaNP => "para-NP-hard",
513 }
514 }
515}
516#[derive(Debug, Clone)]
518pub struct CognizantFPT {
519 pub kernel: String,
521}
522impl CognizantFPT {
523 pub fn compression(&self) -> String {
525 format!("Compress {} using a cognizant FPT algorithm", self.kernel)
526 }
527 pub fn cross_composition_lower_bound(&self) -> String {
529 format!(
530 "Cross-composition into {} shows no polynomial kernel unless NP ⊆ coNP/poly",
531 self.kernel
532 )
533 }
534}
535#[allow(dead_code)]
537#[derive(Debug, Clone)]
538pub struct ParamReduction {
539 pub from_problem: String,
540 pub to_problem: String,
541 pub from_param: String,
542 pub to_param: String,
543 pub param_function: String,
544}
545#[allow(dead_code)]
546impl ParamReduction {
547 pub fn new(from: &str, to: &str, from_p: &str, to_p: &str, f: &str) -> Self {
548 ParamReduction {
549 from_problem: from.to_string(),
550 to_problem: to.to_string(),
551 from_param: from_p.to_string(),
552 to_param: to_p.to_string(),
553 param_function: f.to_string(),
554 }
555 }
556 pub fn is_fpt_reduction(&self) -> bool {
557 !self.param_function.contains("n")
558 }
559}
560#[derive(Debug, Clone)]
562pub struct ColorCodingFPT {
563 pub k: usize,
565 pub use_perfect_hash: bool,
567 pub repetitions: usize,
569}
570impl ColorCodingFPT {
571 pub fn new(k: usize, use_perfect_hash: bool) -> Self {
573 let repetitions = if use_perfect_hash {
574 1
575 } else {
576 (std::f64::consts::E.powi(k as i32) as usize).max(1)
577 };
578 Self {
579 k,
580 use_perfect_hash,
581 repetitions,
582 }
583 }
584 pub fn running_time(&self) -> String {
586 if self.use_perfect_hash {
587 format!("O({}^{} * n * log n) deterministic", self.k, self.k)
588 } else {
589 format!("O(e^{k} * {k}^{k} * n) randomized", k = self.k)
590 }
591 }
592 pub fn find_k_path(&self, adj: &[Vec<usize>]) -> bool {
595 for rep in 0..self.repetitions {
596 let seed = (rep as u64)
597 .wrapping_mul(6364136223846793005)
598 .wrapping_add(1442695040888963407);
599 if color_coding_k_path(adj, self.k, seed) {
600 return true;
601 }
602 }
603 false
604 }
605 pub fn can_detect_hamiltonian_path(&self, n: usize) -> bool {
607 self.k >= n
608 }
609}
610#[derive(Debug, Clone, PartialEq, Eq)]
612pub enum WClasses {
613 FPT,
615 W1,
617 W2,
619 WP,
621 XP,
623}
624impl WClasses {
625 pub fn containment_chain(&self) -> String {
627 "FPT ⊆ W[1] ⊆ W[2] ⊆ W[P] ⊆ XP".to_string()
628 }
629 pub fn is_tractable(&self) -> bool {
631 matches!(self, WClasses::FPT)
632 }
633}
634#[allow(dead_code)]
636#[derive(Debug, Clone)]
637pub struct TreewidthAlgorithm {
638 pub problem: String,
639 pub treewidth_exponent: u32,
640 pub n_exponent: u32,
641}
642#[allow(dead_code)]
643impl TreewidthAlgorithm {
644 pub fn new(problem: &str, tw_exp: u32, n_exp: u32) -> Self {
645 TreewidthAlgorithm {
646 problem: problem.to_string(),
647 treewidth_exponent: tw_exp,
648 n_exponent: n_exp,
649 }
650 }
651 pub fn runtime(&self, tw: usize, n: usize) -> f64 {
652 let tw_part = (2.0f64).powi(self.treewidth_exponent as i32 * tw as i32);
653 let n_part = (n as f64).powi(self.n_exponent as i32);
654 tw_part * n_part
655 }
656 pub fn satisfiability_tw() -> Self {
657 TreewidthAlgorithm::new("SAT", 1, 1)
658 }
659 pub fn is_linear_time_in_n(&self) -> bool {
660 self.n_exponent == 1
661 }
662}
663#[derive(Debug, Clone)]
665pub struct TreeDecomp {
666 pub bags: Vec<Vec<usize>>,
668 pub tree_adj: Vec<Vec<usize>>,
670 pub root: usize,
672}
673impl TreeDecomp {
674 pub fn width(&self) -> usize {
676 self.bags
677 .iter()
678 .map(|b| b.len())
679 .max()
680 .unwrap_or(0)
681 .saturating_sub(1)
682 }
683 pub fn verify(&self, n: usize, edges: &[(usize, usize)]) -> bool {
685 for v in 0..n {
686 if !self.bags.iter().any(|b| b.contains(&v)) {
687 return false;
688 }
689 }
690 for &(u, w) in edges {
691 if !self.bags.iter().any(|b| b.contains(&u) && b.contains(&w)) {
692 return false;
693 }
694 }
695 let num_bags = self.bags.len();
696 if num_bags == 0 {
697 return true;
698 }
699 let mut visited = vec![false; num_bags];
700 let mut queue = std::collections::VecDeque::new();
701 queue.push_back(self.root);
702 visited[self.root] = true;
703 while let Some(t) = queue.pop_front() {
704 for &nb in &self.tree_adj[t] {
705 if !visited[nb] {
706 visited[nb] = true;
707 queue.push_back(nb);
708 }
709 }
710 }
711 visited.iter().all(|&v| v)
712 }
713}
714#[derive(Debug, Clone)]
716pub struct PlanarGraphFPT {
717 pub parameter: String,
719}
720impl PlanarGraphFPT {
721 pub fn bidimensionality(&self) -> String {
723 format!(
724 "{} is bidimensional: grows as Ω(k^2) on (k×k)-grids",
725 self.parameter
726 )
727 }
728 pub fn subexponential_fpt(&self) -> String {
730 format!(
731 "Planar {} FPT: 2^O(sqrt(k)) * n via bidimensionality + treewidth",
732 self.parameter
733 )
734 }
735}