#[cfg(all(test, feature = "broken-tests"))]
mod coverage_tests {
use super::*;
use crate::cli::commands::{
AnalyzeCommands, Commands, EmbedCommands, QddCommands, RefactorCommands, RoadmapCommands,
ScaffoldCommands, SemanticCommands, TestSuite, WorkCommands,
};
use crate::cli::handlers::cache::CacheCommand;
use crate::cli::handlers::memory::MemoryCommand;
use crate::cli::{ContextFormat, DemoProtocol, OutputFormat};
use crate::stateless_server::StatelessTemplateServer;
use std::path::PathBuf;
use std::sync::Arc;
fn create_test_server() -> Arc<StatelessTemplateServer> {
Arc::new(StatelessTemplateServer::new().expect("internal error"))
}
#[tokio::test]
async fn test_scaffold_project_command_routing() {
let server = create_test_server();
let command = Commands::Scaffold {
command: ScaffoldCommands::Project {
toolchain: "rust".to_string(),
templates: vec!["basic".to_string()],
params: vec![],
parallel: 1,
},
};
let result = CommandDispatcher::execute_command(command, server).await;
assert!(result.is_ok() || result.is_err());
}
#[tokio::test]
async fn test_scaffold_agent_command_routing() {
let server = create_test_server();
let command = Commands::Scaffold {
command: ScaffoldCommands::Agent {
name: "test-agent".to_string(),
template: "mcp-server".to_string(),
features: vec![],
quality: "standard".to_string(),
output: None,
force: false,
dry_run: true, interactive: false,
deterministic_core: None,
probabilistic_wrapper: None,
},
};
let result = CommandDispatcher::execute_command(command, server).await;
assert!(result.is_ok() || result.is_err());
}
#[tokio::test]
async fn test_scaffold_wasm_command_routing() {
let server = create_test_server();
let command = Commands::Scaffold {
command: ScaffoldCommands::Wasm {
name: "test-wasm".to_string(),
framework: "wasm-labs".to_string(),
features: vec![],
quality: "standard".to_string(),
output: None,
force: false,
dry_run: true,
},
};
let result = CommandDispatcher::execute_command(command, server).await;
assert!(result.is_ok() || result.is_err());
}
#[tokio::test]
async fn test_scaffold_list_subagents() {
let server = create_test_server();
let command = Commands::Scaffold {
command: ScaffoldCommands::ListSubagents { all: false },
};
let result = CommandDispatcher::execute_command(command, server).await;
assert!(result.is_ok());
}
#[tokio::test]
async fn test_scaffold_create_subagent() {
let server = create_test_server();
let command = Commands::Scaffold {
command: ScaffoldCommands::CreateSubagent {
agent_name: "complexity-analyst".to_string(),
output: None,
},
};
let result = CommandDispatcher::execute_command(command, server).await;
assert!(result.is_ok() || result.is_err());
}
#[tokio::test]
async fn test_scaffold_show_tool_mapping() {
let server = create_test_server();
let command = Commands::Scaffold {
command: ScaffoldCommands::ShowToolMapping { agent: None },
};
let result = CommandDispatcher::execute_command(command, server).await;
assert!(result.is_ok());
}
#[tokio::test]
async fn test_scaffold_export_tool_mapping() {
let server = create_test_server();
let temp_output = tempfile::NamedTempFile::new().expect("internal error");
let command = Commands::Scaffold {
command: ScaffoldCommands::ExportToolMapping {
output: temp_output.path().to_path_buf(),
},
};
let result = CommandDispatcher::execute_command(command, server).await;
assert!(result.is_ok());
}
#[tokio::test]
async fn test_execute_analyze_command_routing() {
let analyze_cmd = AnalyzeCommands::DeadCode {
project_path: PathBuf::from("."),
file: None,
files: vec![],
toolchain: None,
format: crate::cli::DeadCodeOutputFormat::Summary,
output: None,
threshold: 5,
include: vec![],
include_tests: false,
include_cfg: false,
watch: false,
fail_on_violation: false,
timeout: 60,
top_files: 10,
};
let result = CommandDispatcher::execute_analyze_command(analyze_cmd).await;
assert!(result.is_ok() || result.is_err());
}
#[tokio::test]
async fn test_execute_qdd_command_routing() {
let qdd_cmd = QddCommands::Status {
path: PathBuf::from("."),
format: crate::cli::OutputFormat::Table,
};
let result = CommandDispatcher::execute_qdd_command(qdd_cmd).await;
assert!(result.is_ok() || result.is_err());
}
#[tokio::test]
async fn test_execute_refactor_command_routing() {
let refactor_cmd = RefactorCommands::Auto {
path: PathBuf::from("."),
format: crate::cli::RefactorAutoOutputFormat::Summary,
confidence_threshold: 0.9,
dry_run: true,
include: vec![],
exclude: vec![],
output: None,
perf: false,
};
let result = CommandDispatcher::execute_refactor_command(refactor_cmd).await;
assert!(result.is_ok() || result.is_err());
}
#[tokio::test]
async fn test_execute_roadmap_init_routing() {
let roadmap_cmd = RoadmapCommands::Init {
version: "v1.0.0".to_string(),
title: "Test Sprint".to_string(),
duration_days: 14,
priority: "P0".to_string(),
};
let result = CommandDispatcher::execute_roadmap_command(roadmap_cmd).await;
assert!(result.is_ok() || result.is_err());
}
#[tokio::test]
async fn test_execute_roadmap_todos_routing() {
let roadmap_cmd = RoadmapCommands::Todos {
sprint: None,
output: PathBuf::from("/tmp/todos.md"),
include_quality_gates: false,
};
let result = CommandDispatcher::execute_roadmap_command(roadmap_cmd).await;
assert!(result.is_ok() || result.is_err());
}
#[tokio::test]
async fn test_execute_roadmap_start_routing() {
let roadmap_cmd = RoadmapCommands::Start {
task_id: "PMAT-0001".to_string(),
create_branch: false,
};
let result = CommandDispatcher::execute_roadmap_command(roadmap_cmd).await;
assert!(result.is_ok() || result.is_err());
}
#[tokio::test]
async fn test_execute_roadmap_complete_routing() {
let roadmap_cmd = RoadmapCommands::Complete {
task_id: "PMAT-0001".to_string(),
skip_quality_check: true,
};
let result = CommandDispatcher::execute_roadmap_command(roadmap_cmd).await;
assert!(result.is_ok() || result.is_err());
}
#[tokio::test]
async fn test_execute_roadmap_status_routing() {
let roadmap_cmd = RoadmapCommands::Status {
sprint: None,
task: None,
format: OutputFormat::Table,
};
let result = CommandDispatcher::execute_roadmap_command(roadmap_cmd).await;
assert!(result.is_ok() || result.is_err());
}
#[tokio::test]
async fn test_execute_roadmap_validate_routing() {
let roadmap_cmd = RoadmapCommands::Validate {
sprint: "v1.0.0".to_string(),
strict: false,
};
let result = CommandDispatcher::execute_roadmap_command(roadmap_cmd).await;
assert!(result.is_ok() || result.is_err());
}
#[tokio::test]
async fn test_execute_roadmap_quality_check_routing() {
let roadmap_cmd = RoadmapCommands::QualityCheck {
task_id: "PMAT-0001".to_string(),
};
let result = CommandDispatcher::execute_roadmap_command(roadmap_cmd).await;
assert!(result.is_ok() || result.is_err());
}
#[tokio::test]
async fn test_execute_test_command_all_suites() {
let result = CommandDispatcher::execute_test_command(
TestSuite::Property,
1,
false,
false,
false,
5, None,
false,
)
.await;
assert!(result.is_ok() || result.is_err());
}
#[tokio::test]
async fn test_execute_test_command_memory_suite() {
let result = CommandDispatcher::execute_test_command(
TestSuite::Memory,
1,
false,
false,
false,
5,
None,
false,
)
.await;
assert!(result.is_ok() || result.is_err());
}
#[tokio::test]
async fn test_execute_test_command_throughput_suite() {
let result = CommandDispatcher::execute_test_command(
TestSuite::Throughput,
1,
false,
false,
false,
5,
None,
false,
)
.await;
assert!(result.is_ok() || result.is_err());
}
#[tokio::test]
async fn test_execute_test_command_integration_suite() {
let result = CommandDispatcher::execute_test_command(
TestSuite::Integration,
1,
false,
false,
false,
5,
None,
false,
)
.await;
assert!(result.is_ok() || result.is_err());
}
#[tokio::test]
async fn test_execute_test_command_regression_suite() {
let result = CommandDispatcher::execute_test_command(
TestSuite::Regression,
1,
false,
false,
false,
5,
None,
false,
)
.await;
assert!(result.is_ok() || result.is_err());
}
#[test]
fn test_create_test_config_performance_suite() {
let config = CommandDispatcher::create_test_config(
&TestSuite::Performance,
10,
false,
false,
false,
);
assert_eq!(config.test_iterations, 10);
}
#[test]
fn test_create_test_config_regression_suite() {
let config = CommandDispatcher::create_test_config(
&TestSuite::Regression,
5,
false,
false,
false,
);
assert!(config.enable_regression_tests);
}
#[test]
fn test_create_test_config_throughput_suite() {
let config = CommandDispatcher::create_test_config(
&TestSuite::Throughput,
5,
false,
false,
false,
);
assert!(config.enable_throughput_tests);
}
#[test]
fn test_create_test_config_with_explicit_flags() {
let config = CommandDispatcher::create_test_config(
&TestSuite::Performance,
5,
true, true, true, );
assert!(config.enable_memory_tests);
assert!(config.enable_throughput_tests);
assert!(config.enable_regression_tests);
}
#[test]
fn test_print_test_startup_info_all_suites() {
for suite in [
TestSuite::Performance,
TestSuite::Property,
TestSuite::Integration,
TestSuite::Regression,
TestSuite::Memory,
TestSuite::Throughput,
TestSuite::All,
] {
CommandDispatcher::print_test_startup_info(&suite, 10, 300);
}
}
#[test]
fn test_write_test_results_with_output_passed() {
use std::time::Duration;
use tempfile::TempDir;
let temp_dir = TempDir::new().expect("internal error");
let output_path = temp_dir.path().join("results.txt");
let result: anyhow::Result<()> = Ok(());
let write_result = CommandDispatcher::write_test_results_if_requested(
Some(output_path.clone()),
&TestSuite::Memory,
Duration::from_secs(5),
100,
&result,
);
assert!(write_result.is_ok());
assert!(output_path.exists());
let contents = std::fs::read_to_string(&output_path).expect("internal error");
assert!(contents.contains("PASSED"));
}
#[test]
fn test_write_test_results_with_output_failed() {
use std::time::Duration;
use tempfile::TempDir;
let temp_dir = TempDir::new().expect("internal error");
let output_path = temp_dir.path().join("results.txt");
let result: anyhow::Result<()> = Err(anyhow::anyhow!("Test failed"));
let write_result = CommandDispatcher::write_test_results_if_requested(
Some(output_path.clone()),
&TestSuite::Memory,
Duration::from_secs(5),
100,
&result,
);
assert!(write_result.is_ok());
assert!(output_path.exists());
let contents = std::fs::read_to_string(&output_path).expect("internal error");
assert!(contents.contains("FAILED"));
}
#[test]
fn test_generate_metric_recommendations_negative_slope() {
let recs = CommandDispatcher::generate_metric_recommendations("lint", -100.0);
assert!(!recs.iter().any(|r| r.contains("WARNING")));
}
#[test]
fn test_generate_metric_recommendations_zero_slope() {
let recs = CommandDispatcher::generate_metric_recommendations("test-fast", 0.0);
assert!(!recs.is_empty());
}
#[test]
#[cfg(feature = "tui")]
fn test_convert_demo_protocol_tui() {
let result = CommandDispatcher::convert_demo_protocol(DemoProtocol::Tui, false);
assert!(matches!(result, crate::demo::Protocol::Tui));
}
#[tokio::test]
async fn test_context_command_routing() {
let server = create_test_server();
let temp_dir = tempfile::TempDir::new().expect("internal error");
let command = Commands::Context {
toolchain: Some("rust".to_string()),
project_path: temp_dir.path().to_path_buf(),
output: None,
format: ContextFormat::Markdown,
include_large_files: false,
skip_expensive_metrics: true,
language: None,
languages: None,
};
let result = CommandDispatcher::execute_command(command, server).await;
assert!(result.is_ok() || result.is_err());
}
#[tokio::test]
async fn test_search_command_routing() {
let server = create_test_server();
let command = Commands::Search {
query: "test".to_string(),
toolchain: None,
limit: 10,
};
let result = CommandDispatcher::execute_command(command, server).await;
assert!(result.is_ok() || result.is_err());
}
#[tokio::test]
async fn test_validate_command_routing() {
let server = create_test_server();
let command = Commands::Validate {
uri: "test://template".to_string(),
params: vec![],
};
let result = CommandDispatcher::execute_command(command, server).await;
assert!(result.is_ok() || result.is_err());
}
#[tokio::test]
async fn test_execute_quality_gate_all_check_types() {
use tempfile::TempDir;
let temp_dir = TempDir::new().expect("internal error");
for check in [
"dead_code",
"dead-code",
"complexity",
"coverage",
"sections",
"provability",
"satd",
"entropy",
"security",
"duplicates",
"all",
] {
let result = CommandDispatcher::execute_quality_gate_command(
Some(temp_dir.path().to_path_buf()),
None,
OutputFormat::Json,
false,
vec![check.to_string()],
None,
None,
None,
false,
None,
false,
)
.await;
assert!(result.is_ok() || result.is_err());
}
}
#[tokio::test]
async fn test_execute_report_all_analysis_types() {
use tempfile::TempDir;
let temp_dir = TempDir::new().expect("internal error");
let test_file = temp_dir.path().join("test.rs");
std::fs::write(&test_file, "fn main() {}").expect("internal error");
for analysis in [
"complexity",
"dead_code",
"dead-code",
"duplication",
"technical_debt",
"technical-debt",
"big_o",
"big-o",
"all",
] {
let result = CommandDispatcher::execute_report_command(
Some(temp_dir.path().to_path_buf()),
OutputFormat::Json,
false,
false,
false,
vec![analysis.to_string()],
None,
None,
false,
false,
false,
false,
)
.await;
assert!(result.is_ok() || result.is_err());
}
}
#[tokio::test]
async fn test_execute_config_validate() {
let result = CommandDispatcher::execute_config_command(
false, false, true, false, None, None, None, )
.await;
assert!(result.is_ok() || result.is_err());
}
#[tokio::test]
async fn test_execute_config_with_section() {
let result = CommandDispatcher::execute_config_command(
true, false, false, false, Some("general".to_string()), None, None, )
.await;
assert!(result.is_ok() || result.is_err());
}
#[tokio::test]
async fn test_execute_config_with_set() {
let result = CommandDispatcher::execute_config_command(
false, false, false, false, None, Some(vec!["key=value".to_string()]), None, )
.await;
assert!(result.is_ok() || result.is_err());
}
#[tokio::test]
async fn test_execute_show_metrics_no_trend() {
let result = CommandDispatcher::execute_show_metrics_command(
false, 30,
None,
OutputFormat::Table,
false,
)
.await;
assert!(result.is_err());
}
#[tokio::test]
async fn test_execute_show_metrics_with_specific_metric() {
let result = CommandDispatcher::execute_show_metrics_command(
true,
30,
Some("lint".to_string()),
OutputFormat::Table,
false,
)
.await;
assert!(result.is_ok() || result.is_err());
}
#[tokio::test]
async fn test_execute_show_metrics_json_format() {
let result = CommandDispatcher::execute_show_metrics_command(
true,
30,
None,
OutputFormat::Json,
false,
)
.await;
assert!(result.is_ok() || result.is_err());
}
#[tokio::test]
async fn test_execute_show_metrics_failures_only() {
let result = CommandDispatcher::execute_show_metrics_command(
true,
30,
None,
OutputFormat::Table,
true, )
.await;
assert!(result.is_ok() || result.is_err());
}
#[tokio::test]
async fn test_execute_record_metric_command_basic() {
let result =
CommandDispatcher::execute_record_metric_command("test-metric".to_string(), 100.0, None)
.await;
assert!(result.is_ok() || result.is_err());
}
#[tokio::test]
async fn test_execute_record_metric_command_with_timestamp() {
let timestamp = chrono::Utc::now().timestamp();
let result = CommandDispatcher::execute_record_metric_command(
"test-metric".to_string(),
100.0,
Some(timestamp),
)
.await;
assert!(result.is_ok() || result.is_err());
}
#[tokio::test]
async fn test_execute_memory_clear_command() {
let result = CommandDispatcher::execute_memory_command(MemoryCommand::Clear {
force: true,
pattern: None,
})
.await;
assert!(result.is_ok() || result.is_err());
}
#[tokio::test]
async fn test_execute_memory_compact_command() {
let result = CommandDispatcher::execute_memory_command(MemoryCommand::Compact {
aggressive: false,
})
.await;
assert!(result.is_ok() || result.is_err());
}
#[tokio::test]
async fn test_execute_cache_clear_command() {
let result = CommandDispatcher::execute_cache_command(CacheCommand::Clear {
force: true,
pattern: None,
})
.await;
assert!(result.is_ok() || result.is_err());
}
#[tokio::test]
async fn test_execute_cache_stats_detailed() {
let result = CommandDispatcher::execute_cache_command(CacheCommand::Stats {
detailed: true,
format: "json".to_string(),
history: true,
})
.await;
assert!(result.is_ok() || result.is_err());
}
#[tokio::test]
async fn test_execute_work_init_command() {
let temp_dir = tempfile::TempDir::new().expect("internal error");
let command = WorkCommands::Init {
github_repo: None,
no_github: true,
path: Some(temp_dir.path().to_path_buf()),
};
let result = CommandDispatcher::execute_work_command(&command).await;
assert!(result.is_ok() || result.is_err());
}
#[tokio::test]
async fn test_execute_work_start_command() {
let temp_dir = tempfile::TempDir::new().expect("internal error");
let command = WorkCommands::Start {
id: "123".to_string(),
with_spec: false,
epic: false,
path: Some(temp_dir.path().to_path_buf()),
create_github: false,
profile: None,
without: None,
iteration: 1,
};
let result = CommandDispatcher::execute_work_command(&command).await;
assert!(result.is_ok() || result.is_err());
}
#[tokio::test]
async fn test_execute_work_continue_command() {
let temp_dir = tempfile::TempDir::new().expect("internal error");
let command = WorkCommands::Continue {
id: "123".to_string(),
path: Some(temp_dir.path().to_path_buf()),
};
let result = CommandDispatcher::execute_work_command(&command).await;
assert!(result.is_ok() || result.is_err());
}
#[tokio::test]
async fn test_execute_work_complete_command() {
let temp_dir = tempfile::TempDir::new().expect("internal error");
let command = WorkCommands::Complete {
id: "123".to_string(),
skip_quality: true,
override_claims: None,
ticket: None,
path: Some(temp_dir.path().to_path_buf()),
};
let result = CommandDispatcher::execute_work_command(&command).await;
assert!(result.is_ok() || result.is_err());
}
#[tokio::test]
async fn test_execute_work_status_command() {
let temp_dir = tempfile::TempDir::new().expect("internal error");
let command = WorkCommands::Status {
id: None,
path: Some(temp_dir.path().to_path_buf()),
active: true,
};
let result = CommandDispatcher::execute_work_command(&command).await;
assert!(result.is_ok() || result.is_err());
}
#[tokio::test]
async fn test_execute_work_sync_command() {
use crate::cli::commands::SyncDirection;
let temp_dir = tempfile::TempDir::new().expect("internal error");
let command = WorkCommands::Sync {
direction: SyncDirection::Full,
path: Some(temp_dir.path().to_path_buf()),
dry_run: true,
};
let result = CommandDispatcher::execute_work_command(&command).await;
assert!(result.is_ok() || result.is_err());
}
#[tokio::test]
async fn test_execute_work_validate_command() {
let temp_dir = tempfile::TempDir::new().expect("internal error");
let command = WorkCommands::Validate {
path: Some(temp_dir.path().to_path_buf()),
verbose: true,
fix: false,
};
let result = CommandDispatcher::execute_work_command(&command).await;
assert!(result.is_ok() || result.is_err());
}
#[tokio::test]
async fn test_execute_work_migrate_command() {
let temp_dir = tempfile::TempDir::new().expect("internal error");
let command = WorkCommands::Migrate {
path: Some(temp_dir.path().to_path_buf()),
dry_run: true,
backup: true,
};
let result = CommandDispatcher::execute_work_command(&command).await;
assert!(result.is_ok() || result.is_err());
}
#[tokio::test]
async fn test_execute_work_list_statuses_command() {
let command = WorkCommands::ListStatuses;
let result = CommandDispatcher::execute_work_command(&command).await;
assert!(result.is_ok());
}
#[tokio::test]
async fn test_handle_spec_score_command() {
use crate::cli::commands::{SpecCommands, SpecOutputFormat};
let temp_file = tempfile::NamedTempFile::new().expect("internal error");
std::fs::write(temp_file.path(), "# Test Spec\n\n## Summary\nTest content").expect("internal error");
let command = SpecCommands::Score {
spec: temp_file.path().to_path_buf(),
format: SpecOutputFormat::Text,
output: None,
verbose: false,
};
let result = CommandDispatcher::handle_spec_command(command).await;
assert!(result.is_ok() || result.is_err());
}
#[tokio::test]
async fn test_handle_spec_comply_command() {
use crate::cli::commands::{SpecCommands, SpecOutputFormat};
let temp_file = tempfile::NamedTempFile::new().expect("internal error");
std::fs::write(temp_file.path(), "# Test Spec\n\n## Summary\nTest content").expect("internal error");
let command = SpecCommands::Comply {
spec: temp_file.path().to_path_buf(),
dry_run: true,
format: SpecOutputFormat::Text,
};
let result = CommandDispatcher::handle_spec_command(command).await;
assert!(result.is_ok() || result.is_err());
}
#[tokio::test]
async fn test_handle_spec_create_command() {
use crate::cli::commands::{SpecCommands, SpecOutputFormat};
use tempfile::TempDir;
let temp_dir = TempDir::new().expect("internal error");
let command = SpecCommands::Create {
name: "test-spec".to_string(),
issue: Some("GH-123".to_string()),
epic: Some("test-epic".to_string()),
output: Some(temp_dir.path().to_path_buf()),
};
let result = CommandDispatcher::handle_spec_command(command).await;
assert!(result.is_ok() || result.is_err());
}
#[tokio::test]
async fn test_handle_spec_list_command() {
use crate::cli::commands::{SpecCommands, SpecOutputFormat};
use tempfile::TempDir;
let temp_dir = TempDir::new().expect("internal error");
let command = SpecCommands::List {
path: temp_dir.path().to_path_buf(),
min_score: Some(80),
failing_only: true,
format: SpecOutputFormat::Json,
};
let result = CommandDispatcher::handle_spec_command(command).await;
assert!(result.is_ok() || result.is_err());
}
#[test]
fn test_create_demo_args_merge_threshold_rounding() {
let args = CommandDispatcher::create_demo_args(
None,
None,
None,
None,
crate::demo::Protocol::Cli,
false,
true,
8080,
true,
None,
None,
Some(50.7), false,
None,
false,
false,
None,
);
assert_eq!(args.merge_threshold, 50);
}
#[tokio::test]
async fn test_execute_scaffold_agent_with_deterministic_core() {
let result = CommandDispatcher::execute_scaffold_agent_command(
"test-agent".to_string(),
"hybrid".to_string(),
vec!["logging".to_string()],
"strict".to_string(),
None,
false,
true, false,
true, true, )
.await;
assert!(result.is_ok() || result.is_err());
}
#[tokio::test]
#[cfg(not(feature = "org-intelligence"))]
async fn test_org_command_without_feature() {
use crate::cli::commands::OrgCommands;
let server = create_test_server();
let command = Commands::Org(OrgCommands::Dashboard {
org: "test-org".to_string(),
port: 8080,
no_browser: true,
});
let result = CommandDispatcher::execute_command(command, server).await;
assert!(result.is_err());
}
}