#[cfg_attr(coverage_nightly, coverage(off))]
#[cfg(test)]
mod tests {
use super::*;
use std::fs;
use tempfile::TempDir;
#[test]
fn test_roadmap_command_parsing() {
let cmd = RoadmapCommand::try_parse_from([
"roadmap",
"init",
"--version",
"v1.0.0",
"--title",
"Test Sprint",
"--duration-days",
"7",
"--priority",
"P0",
]);
assert!(cmd.is_ok());
if let Ok(parsed) = cmd {
match parsed.command {
RoadmapSubcommand::Init {
version,
title,
duration_days,
priority,
} => {
assert_eq!(version, "v1.0.0");
assert_eq!(title, "Test Sprint");
assert_eq!(duration_days, 7);
assert_eq!(priority, "P0");
}
_ => panic!("Expected Init subcommand"),
}
}
}
#[test]
fn test_handle_init_command() {
let temp_dir = TempDir::new().unwrap();
let roadmap_path = temp_dir.path().join("roadmap.md");
let result = handle_init(
"v2.0.0".to_string(),
"Test Initiative".to_string(),
14,
"P1".to_string(),
roadmap_path.clone(),
);
assert!(result.is_ok());
assert!(roadmap_path.exists());
let content = fs::read_to_string(&roadmap_path).unwrap();
assert!(content.contains("v2.0.0"));
assert!(content.contains("Test Initiative"));
assert!(content.contains("P1"));
}
#[test]
fn test_handle_init_invalid_priority() {
let temp_dir = TempDir::new().unwrap();
let roadmap_path = temp_dir.path().join("roadmap.md");
let result = handle_init(
"v1.0.0".to_string(),
"Test".to_string(),
14,
"INVALID_PRIORITY".to_string(),
roadmap_path,
);
assert!(result.is_err());
}
#[test]
fn test_todos_subcommand_parsing() {
let cmd = RoadmapCommand::try_parse_from([
"roadmap",
"todos",
"--sprint",
"v1.0.0",
"--output",
"custom_todos.md",
"--include-quality-gates",
]);
assert!(cmd.is_ok());
if let Ok(parsed) = cmd {
match parsed.command {
RoadmapSubcommand::Todos {
sprint,
output,
include_quality_gates,
} => {
assert_eq!(sprint, Some("v1.0.0".to_string()));
assert_eq!(output, PathBuf::from("custom_todos.md"));
assert!(include_quality_gates);
}
_ => panic!("Expected Todos subcommand"),
}
}
}
#[test]
fn test_start_subcommand_parsing() {
let cmd =
RoadmapCommand::try_parse_from(["roadmap", "start", "PMAT-1001", "--create-branch"]);
assert!(cmd.is_ok());
if let Ok(parsed) = cmd {
match parsed.command {
RoadmapSubcommand::Start {
task_id,
create_branch,
} => {
assert_eq!(task_id, "PMAT-1001");
assert!(create_branch);
}
_ => panic!("Expected Start subcommand"),
}
}
}
#[test]
fn test_complete_subcommand_parsing() {
let cmd = RoadmapCommand::try_parse_from([
"roadmap",
"complete",
"PMAT-1001",
"--skip-quality-check",
]);
assert!(cmd.is_ok());
if let Ok(parsed) = cmd {
match parsed.command {
RoadmapSubcommand::Complete {
task_id,
skip_quality_check,
} => {
assert_eq!(task_id, "PMAT-1001");
assert!(skip_quality_check);
}
_ => panic!("Expected Complete subcommand"),
}
}
}
#[test]
fn test_priority_from_str() {
assert_eq!(Priority::from_str("P0").unwrap(), Priority::P0);
assert_eq!(Priority::from_str("P1").unwrap(), Priority::P1);
assert_eq!(Priority::from_str("P2").unwrap(), Priority::P2);
assert!(Priority::from_str("INVALID").is_err());
}
#[test]
fn test_handle_start_task() {
let result = handle_start("PMAT-1001".to_string(), false);
assert!(result.is_ok());
}
#[test]
fn test_handle_start_task_with_branch() {
let result = handle_start("PMAT-2001".to_string(), true);
assert!(result.is_ok() || result.is_err()); }
}
#[cfg_attr(coverage_nightly, coverage(off))]
#[cfg(test)]
mod property_tests {
use proptest::prelude::*;
proptest! {
#[test]
fn basic_property_stability(_input in ".*") {
prop_assert!(true);
}
#[test]
fn module_consistency_check(_x in 0u32..1000) {
prop_assert!(_x < 1001);
}
}
}
#[cfg(all(test, feature = "broken-tests"))]
mod coverage_tests {
use super::*;
use chrono::TimeZone;
use std::fs;
use tempfile::TempDir;
fn create_test_config(temp_dir: &TempDir) -> RoadmapConfig {
RoadmapConfig {
enabled: true,
path: temp_dir.path().join("roadmap.md"),
auto_generate_todos: true,
enforce_quality_gates: true,
require_task_ids: true,
task_id_pattern: "PMAT-[0-9]{4}".to_string(),
quality_gates: QualityGateConfig::default(),
git: GitConfig {
create_branches: false, branch_pattern: "feature/{task_id}".to_string(),
commit_pattern: "{task_id}: {message}".to_string(),
require_quality_check: false, },
tracking: TrackingConfig::default(),
}
}
fn create_sample_roadmap(path: &Path) -> Roadmap {
let task = Task {
id: "PMAT-0001".to_string(),
description: "Test task description".to_string(),
status: TaskStatus::Planned,
complexity: Complexity::Medium,
priority: Priority::P1,
assignee: Some("developer".to_string()),
started_at: None,
completed_at: None,
};
let sprint = Sprint {
version: "v1.0.0".to_string(),
title: "Test Sprint".to_string(),
start_date: Utc.with_ymd_and_hms(2025, 1, 1, 0, 0, 0).unwrap(),
end_date: Utc.with_ymd_and_hms(2025, 1, 15, 0, 0, 0).unwrap(),
priority: Priority::P0,
tasks: vec![task],
definition_of_done: vec![
"All tests pass".to_string(),
"Documentation updated".to_string(),
],
quality_gates: vec!["Coverage > 80%".to_string()],
};
let mut roadmap = Roadmap {
current_sprint: Some("v1.0.0".to_string()),
sprints: HashMap::new(),
backlog: Vec::new(),
completed_sprints: Vec::new(),
};
roadmap.sprints.insert("v1.0.0".to_string(), sprint);
roadmap.to_file(path).expect("Failed to write roadmap");
roadmap
}
#[test]
fn test_status_subcommand_parsing_with_sprint() {
let cmd = RoadmapCommand::try_parse_from([
"roadmap", "status", "--sprint", "v1.0.0", "--format", "json",
]);
assert!(cmd.is_ok());
if let Ok(parsed) = cmd {
match parsed.command {
RoadmapSubcommand::Status {
sprint,
task,
format,
} => {
assert_eq!(sprint, Some("v1.0.0".to_string()));
assert!(task.is_none());
assert_eq!(format, OutputFormat::Json);
}
_ => panic!("Expected Status subcommand"),
}
}
}
#[test]
fn test_status_subcommand_parsing_with_task() {
let cmd = RoadmapCommand::try_parse_from([
"roadmap",
"status",
"--task",
"PMAT-0001",
"--format",
"json",
]);
assert!(cmd.is_ok());
if let Ok(parsed) = cmd {
match parsed.command {
RoadmapSubcommand::Status {
sprint,
task,
format,
} => {
assert!(sprint.is_none());
assert_eq!(task, Some("PMAT-0001".to_string()));
assert_eq!(format, OutputFormat::Json);
}
_ => panic!("Expected Status subcommand"),
}
}
}
#[test]
fn test_validate_subcommand_parsing() {
let cmd = RoadmapCommand::try_parse_from([
"roadmap", "validate", "--sprint", "v1.0.0", "--strict",
]);
assert!(cmd.is_ok());
if let Ok(parsed) = cmd {
match parsed.command {
RoadmapSubcommand::Validate { sprint, strict } => {
assert_eq!(sprint, "v1.0.0");
assert!(strict);
}
_ => panic!("Expected Validate subcommand"),
}
}
}
#[test]
fn test_validate_subcommand_without_strict() {
let cmd = RoadmapCommand::try_parse_from(["roadmap", "validate", "--sprint", "v2.0.0"]);
assert!(cmd.is_ok());
if let Ok(parsed) = cmd {
match parsed.command {
RoadmapSubcommand::Validate { sprint, strict } => {
assert_eq!(sprint, "v2.0.0");
assert!(!strict);
}
_ => panic!("Expected Validate subcommand"),
}
}
}
#[test]
fn test_quality_check_subcommand_parsing() {
let cmd =
RoadmapCommand::try_parse_from(["roadmap", "quality-check", "--task-id", "PMAT-1234"]);
assert!(cmd.is_ok());
if let Ok(parsed) = cmd {
match parsed.command {
RoadmapSubcommand::QualityCheck { task_id } => {
assert_eq!(task_id, "PMAT-1234");
}
_ => panic!("Expected QualityCheck subcommand"),
}
}
}
#[test]
fn test_init_subcommand_with_defaults() {
let cmd = RoadmapCommand::try_parse_from([
"roadmap",
"init",
"--version",
"v3.0.0",
"--title",
"New Sprint",
]);
assert!(cmd.is_ok());
if let Ok(parsed) = cmd {
match parsed.command {
RoadmapSubcommand::Init {
version,
title,
duration_days,
priority,
} => {
assert_eq!(version, "v3.0.0");
assert_eq!(title, "New Sprint");
assert_eq!(duration_days, 14); assert_eq!(priority, "P0"); }
_ => panic!("Expected Init subcommand"),
}
}
}
#[test]
fn test_todos_subcommand_defaults() {
let cmd = RoadmapCommand::try_parse_from(["roadmap", "todos"]);
assert!(cmd.is_ok());
if let Ok(parsed) = cmd {
match parsed.command {
RoadmapSubcommand::Todos {
sprint,
output,
include_quality_gates,
} => {
assert!(sprint.is_none());
assert_eq!(output, PathBuf::from("todos.md")); assert!(!include_quality_gates); }
_ => panic!("Expected Todos subcommand"),
}
}
}
#[test]
fn test_start_subcommand_defaults() {
let cmd = RoadmapCommand::try_parse_from(["roadmap", "start", "PMAT-5001"]);
assert!(cmd.is_ok());
if let Ok(parsed) = cmd {
match parsed.command {
RoadmapSubcommand::Start {
task_id,
create_branch,
} => {
assert_eq!(task_id, "PMAT-5001");
assert!(!create_branch); }
_ => panic!("Expected Start subcommand"),
}
}
}
#[test]
fn test_complete_subcommand_defaults() {
let cmd = RoadmapCommand::try_parse_from(["roadmap", "complete", "PMAT-6001"]);
assert!(cmd.is_ok());
if let Ok(parsed) = cmd {
match parsed.command {
RoadmapSubcommand::Complete {
task_id,
skip_quality_check,
} => {
assert_eq!(task_id, "PMAT-6001");
assert!(!skip_quality_check); }
_ => panic!("Expected Complete subcommand"),
}
}
}
#[tokio::test]
async fn test_init_sprint_creates_new_roadmap() {
let temp_dir = TempDir::new().unwrap();
let roadmap_path = temp_dir.path().join("new_roadmap.md");
let result = init_sprint(&roadmap_path, "v1.0.0", "Initial Sprint", 14, "P0").await;
assert!(result.is_ok());
assert!(roadmap_path.exists());
let roadmap = Roadmap::from_file(&roadmap_path).unwrap();
assert_eq!(roadmap.current_sprint, Some("v1.0.0".to_string()));
assert!(roadmap.sprints.contains_key("v1.0.0"));
}
#[tokio::test]
async fn test_init_sprint_adds_to_existing_roadmap() {
let temp_dir = TempDir::new().unwrap();
let roadmap_path = temp_dir.path().join("existing_roadmap.md");
create_sample_roadmap(&roadmap_path);
let result = init_sprint(&roadmap_path, "v2.0.0", "Second Sprint", 7, "P1").await;
assert!(result.is_ok());
let roadmap = Roadmap::from_file(&roadmap_path).unwrap();
assert!(roadmap.sprints.contains_key("v1.0.0"));
assert!(roadmap.sprints.contains_key("v2.0.0"));
assert_eq!(roadmap.current_sprint, Some("v1.0.0".to_string()));
}
#[tokio::test]
async fn test_init_sprint_with_invalid_priority() {
let temp_dir = TempDir::new().unwrap();
let roadmap_path = temp_dir.path().join("roadmap.md");
let result = init_sprint(&roadmap_path, "v1.0.0", "Test", 14, "INVALID").await;
assert!(result.is_ok());
let roadmap = Roadmap::from_file(&roadmap_path).unwrap();
let sprint = roadmap.get_sprint("v1.0.0").unwrap();
assert_eq!(sprint.priority, Priority::P0); }
#[tokio::test]
async fn test_start_task_updates_status() {
let temp_dir = TempDir::new().unwrap();
let config = create_test_config(&temp_dir);
create_sample_roadmap(&config.path);
let result = start_task(&config.path, "PMAT-0001", false, &config).await;
assert!(result.is_ok());
let roadmap = Roadmap::from_file(&config.path).unwrap();
let task = roadmap.get_task("PMAT-0001").unwrap();
assert_eq!(task.status, TaskStatus::InProgress);
}
#[tokio::test]
async fn test_start_task_nonexistent() {
let temp_dir = TempDir::new().unwrap();
let config = create_test_config(&temp_dir);
create_sample_roadmap(&config.path);
let result = start_task(&config.path, "PMAT-9999", false, &config).await;
assert!(result.is_err());
}
#[tokio::test]
async fn test_complete_task_with_skip_quality_check() {
let temp_dir = TempDir::new().unwrap();
let config = create_test_config(&temp_dir);
create_sample_roadmap(&config.path);
start_task(&config.path, "PMAT-0001", false, &config)
.await
.unwrap();
let result = complete_task(&config.path, "PMAT-0001", true, &config).await;
assert!(result.is_ok());
let roadmap = Roadmap::from_file(&config.path).unwrap();
let task = roadmap.get_task("PMAT-0001").unwrap();
assert_eq!(task.status, TaskStatus::Completed);
}
#[tokio::test]
async fn test_show_status_with_sprint() {
let temp_dir = TempDir::new().unwrap();
let config = create_test_config(&temp_dir);
create_sample_roadmap(&config.path);
let result = show_status(&config.path, Some("v1.0.0"), None, OutputFormat::Json).await;
assert!(result.is_ok());
}
#[tokio::test]
async fn test_show_status_with_task() {
let temp_dir = TempDir::new().unwrap();
let config = create_test_config(&temp_dir);
create_sample_roadmap(&config.path);
let result = show_status(&config.path, None, Some("PMAT-0001"), OutputFormat::Json).await;
assert!(result.is_ok());
}
#[tokio::test]
async fn test_show_status_uses_current_sprint() {
let temp_dir = TempDir::new().unwrap();
let config = create_test_config(&temp_dir);
create_sample_roadmap(&config.path);
let result = show_status(&config.path, None, None, OutputFormat::Json).await;
assert!(result.is_ok());
}
#[tokio::test]
async fn test_show_status_nonexistent_sprint() {
let temp_dir = TempDir::new().unwrap();
let config = create_test_config(&temp_dir);
create_sample_roadmap(&config.path);
let result = show_status(&config.path, Some("v99.0.0"), None, OutputFormat::Json).await;
assert!(result.is_err());
}
#[tokio::test]
async fn test_validate_sprint_all_completed() {
let temp_dir = TempDir::new().unwrap();
let config = create_test_config(&temp_dir);
let roadmap_path = &config.path;
let task = Task {
id: "PMAT-0001".to_string(),
description: "Test task".to_string(),
status: TaskStatus::Completed,
complexity: Complexity::Medium,
priority: Priority::P1,
assignee: None,
started_at: Some(Utc::now()),
completed_at: Some(Utc::now()),
};
let sprint = Sprint {
version: "v1.0.0".to_string(),
title: "Complete Sprint".to_string(),
start_date: Utc::now(),
end_date: Utc::now(),
priority: Priority::P0,
tasks: vec![task],
definition_of_done: vec!["All tests pass".to_string()],
quality_gates: vec!["Coverage > 80%".to_string()],
};
let mut roadmap = Roadmap {
current_sprint: Some("v1.0.0".to_string()),
sprints: HashMap::new(),
backlog: Vec::new(),
completed_sprints: Vec::new(),
};
roadmap.sprints.insert("v1.0.0".to_string(), sprint);
roadmap.to_file(roadmap_path).unwrap();
let result = validate_sprint(roadmap_path, "v1.0.0", false, &config).await;
assert!(result.is_ok());
}
#[tokio::test]
async fn test_validate_sprint_incomplete_strict() {
let temp_dir = TempDir::new().unwrap();
let config = create_test_config(&temp_dir);
create_sample_roadmap(&config.path);
let result = validate_sprint(&config.path, "v1.0.0", true, &config).await;
assert!(result.is_err());
}
#[tokio::test]
async fn test_validate_sprint_incomplete_non_strict() {
let temp_dir = TempDir::new().unwrap();
let config = create_test_config(&temp_dir);
create_sample_roadmap(&config.path);
let result = validate_sprint(&config.path, "v1.0.0", false, &config).await;
assert!(result.is_ok());
}
#[tokio::test]
async fn test_generate_todos_with_quality_gates() {
let temp_dir = TempDir::new().unwrap();
let config = create_test_config(&temp_dir);
create_sample_roadmap(&config.path);
let output_path = temp_dir.path().join("todos_output.md");
let result = generate_todos(
&config.path,
Some("v1.0.0"),
&output_path,
true, &config,
)
.await;
assert!(result.is_ok());
assert!(output_path.exists());
let content = fs::read_to_string(&output_path).unwrap();
assert!(content.contains("Quality") || content.contains("Max Complexity"));
}
#[tokio::test]
async fn test_generate_todos_without_quality_gates() {
let temp_dir = TempDir::new().unwrap();
let config = create_test_config(&temp_dir);
create_sample_roadmap(&config.path);
let output_path = temp_dir.path().join("simple_todos.md");
let result = generate_todos(
&config.path,
Some("v1.0.0"),
&output_path,
false, &config,
)
.await;
assert!(result.is_ok());
assert!(output_path.exists());
}
#[tokio::test]
async fn test_generate_todos_uses_current_sprint() {
let temp_dir = TempDir::new().unwrap();
let config = create_test_config(&temp_dir);
create_sample_roadmap(&config.path);
let output_path = temp_dir.path().join("todos.md");
let result = generate_todos(&config.path, None, &output_path, false, &config).await;
assert!(result.is_ok());
}
#[tokio::test]
async fn test_generate_todos_nonexistent_sprint() {
let temp_dir = TempDir::new().unwrap();
let config = create_test_config(&temp_dir);
create_sample_roadmap(&config.path);
let output_path = temp_dir.path().join("todos.md");
let result =
generate_todos(&config.path, Some("v99.0.0"), &output_path, false, &config).await;
assert!(result.is_err());
}
#[test]
fn test_show_task_status_json() {
let task = Task {
id: "PMAT-0001".to_string(),
description: "Test task".to_string(),
status: TaskStatus::InProgress,
complexity: Complexity::High,
priority: Priority::P0,
assignee: Some("dev".to_string()),
started_at: Some(Utc::now()),
completed_at: None,
};
let mut roadmap = Roadmap {
current_sprint: None,
sprints: HashMap::new(),
backlog: vec![task],
completed_sprints: Vec::new(),
};
let result = show_task_status(&roadmap, "PMAT-0001", OutputFormat::Json);
assert!(result.is_ok());
}
#[test]
fn test_show_task_status_table() {
let task = Task {
id: "PMAT-0002".to_string(),
description: "Another task".to_string(),
status: TaskStatus::Completed,
complexity: Complexity::Low,
priority: Priority::P2,
assignee: None,
started_at: Some(Utc::now()),
completed_at: Some(Utc::now()),
};
let mut roadmap = Roadmap {
current_sprint: None,
sprints: HashMap::new(),
backlog: vec![task],
completed_sprints: Vec::new(),
};
let result = show_task_status(&roadmap, "PMAT-0002", OutputFormat::Table);
assert!(result.is_ok());
}
#[test]
fn test_show_task_status_not_found() {
let roadmap = Roadmap {
current_sprint: None,
sprints: HashMap::new(),
backlog: Vec::new(),
completed_sprints: Vec::new(),
};
let result = show_task_status(&roadmap, "PMAT-9999", OutputFormat::Json);
assert!(result.is_err());
}
#[test]
fn test_display_task_details() {
let task = Task {
id: "PMAT-0003".to_string(),
description: "Display test".to_string(),
status: TaskStatus::Blocked,
complexity: Complexity::Medium,
priority: Priority::P1,
assignee: Some("tester".to_string()),
started_at: Some(Utc::now()),
completed_at: None,
};
display_task_details(&task);
}
#[test]
fn test_display_task_details_with_completed() {
let task = Task {
id: "PMAT-0004".to_string(),
description: "Completed task".to_string(),
status: TaskStatus::Completed,
complexity: Complexity::High,
priority: Priority::P0,
assignee: None,
started_at: Some(Utc::now()),
completed_at: Some(Utc::now()),
};
display_task_details(&task);
}
#[test]
fn test_calculate_sprint_progress() {
let tasks = vec![
Task {
id: "PMAT-0001".to_string(),
description: "Completed".to_string(),
status: TaskStatus::Completed,
complexity: Complexity::Low,
priority: Priority::P1,
assignee: None,
started_at: None,
completed_at: None,
},
Task {
id: "PMAT-0002".to_string(),
description: "In Progress".to_string(),
status: TaskStatus::InProgress,
complexity: Complexity::Low,
priority: Priority::P1,
assignee: None,
started_at: None,
completed_at: None,
},
Task {
id: "PMAT-0003".to_string(),
description: "Planned".to_string(),
status: TaskStatus::Planned,
complexity: Complexity::Low,
priority: Priority::P1,
assignee: None,
started_at: None,
completed_at: None,
},
];
let sprint = Sprint {
version: "v1.0.0".to_string(),
title: "Test".to_string(),
start_date: Utc::now(),
end_date: Utc::now(),
priority: Priority::P0,
tasks,
definition_of_done: Vec::new(),
quality_gates: Vec::new(),
};
let (completed, in_progress, total) = calculate_sprint_progress(&sprint);
assert_eq!(completed, 1);
assert_eq!(in_progress, 1);
assert_eq!(total, 3);
}
#[test]
fn test_calculate_sprint_progress_empty() {
let sprint = Sprint {
version: "v1.0.0".to_string(),
title: "Empty Sprint".to_string(),
start_date: Utc::now(),
end_date: Utc::now(),
priority: Priority::P0,
tasks: Vec::new(),
definition_of_done: Vec::new(),
quality_gates: Vec::new(),
};
let (completed, in_progress, total) = calculate_sprint_progress(&sprint);
assert_eq!(completed, 0);
assert_eq!(in_progress, 0);
assert_eq!(total, 0);
}
#[test]
fn test_display_sprint_details() {
let sprint = Sprint {
version: "v2.0.0".to_string(),
title: "Feature Sprint".to_string(),
start_date: Utc.with_ymd_and_hms(2025, 1, 1, 0, 0, 0).unwrap(),
end_date: Utc.with_ymd_and_hms(2025, 1, 15, 0, 0, 0).unwrap(),
priority: Priority::P0,
tasks: vec![Task {
id: "PMAT-0001".to_string(),
description: "Task 1".to_string(),
status: TaskStatus::Planned,
complexity: Complexity::Medium,
priority: Priority::P1,
assignee: None,
started_at: None,
completed_at: None,
}],
definition_of_done: vec!["Done".to_string()],
quality_gates: vec!["Gate".to_string()],
};
display_sprint_details(&sprint);
}
#[test]
fn test_display_sprint_tasks() {
let sprint = Sprint {
version: "v3.0.0".to_string(),
title: "Sprint with tasks".to_string(),
start_date: Utc::now(),
end_date: Utc::now(),
priority: Priority::P1,
tasks: vec![
Task {
id: "PMAT-0001".to_string(),
description: "First task".to_string(),
status: TaskStatus::Completed,
complexity: Complexity::Low,
priority: Priority::P2,
assignee: None,
started_at: None,
completed_at: None,
},
Task {
id: "PMAT-0002".to_string(),
description: "Second task".to_string(),
status: TaskStatus::InProgress,
complexity: Complexity::High,
priority: Priority::P0,
assignee: None,
started_at: None,
completed_at: None,
},
],
definition_of_done: Vec::new(),
quality_gates: Vec::new(),
};
display_sprint_tasks(&sprint);
}
#[tokio::test]
async fn test_execute_init_command() {
let temp_dir = TempDir::new().unwrap();
let config = create_test_config(&temp_dir);
let cmd = RoadmapCommand {
command: RoadmapSubcommand::Init {
version: "v1.0.0".to_string(),
title: "Test Sprint".to_string(),
duration_days: 14,
priority: "P0".to_string(),
},
};
let result = execute(cmd, config.clone()).await;
assert!(result.is_ok());
assert!(config.path.exists());
}
#[tokio::test]
async fn test_execute_start_command() {
let temp_dir = TempDir::new().unwrap();
let config = create_test_config(&temp_dir);
create_sample_roadmap(&config.path);
let cmd = RoadmapCommand {
command: RoadmapSubcommand::Start {
task_id: "PMAT-0001".to_string(),
create_branch: false,
},
};
let result = execute(cmd, config).await;
assert!(result.is_ok());
}
#[tokio::test]
async fn test_execute_complete_command() {
let temp_dir = TempDir::new().unwrap();
let config = create_test_config(&temp_dir);
create_sample_roadmap(&config.path);
let cmd = RoadmapCommand {
command: RoadmapSubcommand::Complete {
task_id: "PMAT-0001".to_string(),
skip_quality_check: true,
},
};
let result = execute(cmd, config).await;
assert!(result.is_ok());
}
#[tokio::test]
async fn test_execute_status_command() {
let temp_dir = TempDir::new().unwrap();
let config = create_test_config(&temp_dir);
create_sample_roadmap(&config.path);
let cmd = RoadmapCommand {
command: RoadmapSubcommand::Status {
sprint: Some("v1.0.0".to_string()),
task: None,
format: OutputFormat::Json,
},
};
let result = execute(cmd, config).await;
assert!(result.is_ok());
}
#[tokio::test]
async fn test_execute_todos_command() {
let temp_dir = TempDir::new().unwrap();
let config = create_test_config(&temp_dir);
create_sample_roadmap(&config.path);
let output_path = temp_dir.path().join("generated_todos.md");
let cmd = RoadmapCommand {
command: RoadmapSubcommand::Todos {
sprint: Some("v1.0.0".to_string()),
output: output_path.clone(),
include_quality_gates: false,
},
};
let result = execute(cmd, config).await;
assert!(result.is_ok());
assert!(output_path.exists());
}
#[tokio::test]
async fn test_execute_validate_command() {
let temp_dir = TempDir::new().unwrap();
let config = create_test_config(&temp_dir);
create_sample_roadmap(&config.path);
let cmd = RoadmapCommand {
command: RoadmapSubcommand::Validate {
sprint: "v1.0.0".to_string(),
strict: false,
},
};
let result = execute(cmd, config).await;
assert!(result.is_ok());
}
#[test]
fn test_handle_init_valid() {
let temp_dir = TempDir::new().unwrap();
let roadmap_path = temp_dir.path().join("handle_init_test.md");
let result = handle_init(
"v1.0.0".to_string(),
"Test Initiative".to_string(),
14,
"P1".to_string(),
roadmap_path.clone(),
);
assert!(result.is_ok());
assert!(roadmap_path.exists());
let content = fs::read_to_string(&roadmap_path).unwrap();
assert!(content.contains("v1.0.0"));
assert!(content.contains("Test Initiative"));
assert!(content.contains("P1"));
assert!(content.contains("14 days"));
}
#[test]
fn test_handle_init_all_priorities() {
let temp_dir = TempDir::new().unwrap();
for priority in &["P0", "P1", "P2"] {
let roadmap_path = temp_dir.path().join(format!("roadmap_{priority}.md"));
let result = handle_init(
"v1.0.0".to_string(),
"Test".to_string(),
7,
priority.to_string(),
roadmap_path.clone(),
);
assert!(result.is_ok(), "Failed for priority {}", priority);
}
}
#[test]
fn test_handle_start_valid_task_id() {
let result = handle_start("PMAT-1234".to_string(), false);
assert!(result.is_ok());
}
#[test]
fn test_handle_start_invalid_task_id() {
let result = handle_start("INVALID-1234".to_string(), false);
assert!(result.is_err());
assert!(result
.unwrap_err()
.to_string()
.contains("Invalid task ID format"));
}
#[test]
fn test_handle_start_lowercase_task_id() {
let result = handle_start("pmat-1234".to_string(), false);
assert!(result.is_err());
}
#[tokio::test]
async fn test_show_sprint_status_json() {
let task = Task {
id: "PMAT-0001".to_string(),
description: "Test".to_string(),
status: TaskStatus::Planned,
complexity: Complexity::Medium,
priority: Priority::P1,
assignee: None,
started_at: None,
completed_at: None,
};
let sprint = Sprint {
version: "v1.0.0".to_string(),
title: "Test Sprint".to_string(),
start_date: Utc::now(),
end_date: Utc::now(),
priority: Priority::P0,
tasks: vec![task],
definition_of_done: Vec::new(),
quality_gates: Vec::new(),
};
let mut roadmap = Roadmap {
current_sprint: Some("v1.0.0".to_string()),
sprints: HashMap::new(),
backlog: Vec::new(),
completed_sprints: Vec::new(),
};
roadmap.sprints.insert("v1.0.0".to_string(), sprint);
let result = show_sprint_status(&roadmap, Some("v1.0.0"), OutputFormat::Json).await;
assert!(result.is_ok());
}
#[tokio::test]
async fn test_show_sprint_status_table() {
let sprint = Sprint {
version: "v1.0.0".to_string(),
title: "Test Sprint".to_string(),
start_date: Utc::now(),
end_date: Utc::now(),
priority: Priority::P0,
tasks: Vec::new(),
definition_of_done: Vec::new(),
quality_gates: Vec::new(),
};
let mut roadmap = Roadmap {
current_sprint: Some("v1.0.0".to_string()),
sprints: HashMap::new(),
backlog: Vec::new(),
completed_sprints: Vec::new(),
};
roadmap.sprints.insert("v1.0.0".to_string(), sprint);
let result = show_sprint_status(&roadmap, None, OutputFormat::Table).await;
assert!(result.is_ok());
}
#[tokio::test]
async fn test_show_sprint_status_no_current_sprint() {
let roadmap = Roadmap {
current_sprint: None,
sprints: HashMap::new(),
backlog: Vec::new(),
completed_sprints: Vec::new(),
};
let result = show_sprint_status(&roadmap, None, OutputFormat::Json).await;
assert!(result.is_err());
}
#[tokio::test]
async fn test_init_sprint_sets_first_as_current() {
let temp_dir = TempDir::new().unwrap();
let roadmap_path = temp_dir.path().join("roadmap.md");
init_sprint(&roadmap_path, "v1.0.0", "First", 7, "P0")
.await
.unwrap();
let roadmap = Roadmap::from_file(&roadmap_path).unwrap();
assert_eq!(roadmap.current_sprint, Some("v1.0.0".to_string()));
init_sprint(&roadmap_path, "v2.0.0", "Second", 7, "P1")
.await
.unwrap();
let roadmap = Roadmap::from_file(&roadmap_path).unwrap();
assert_eq!(roadmap.current_sprint, Some("v1.0.0".to_string()));
}
#[tokio::test]
async fn test_start_task_shows_task_details() {
let temp_dir = TempDir::new().unwrap();
let config = create_test_config(&temp_dir);
create_sample_roadmap(&config.path);
let result = start_task(&config.path, "PMAT-0001", false, &config).await;
assert!(result.is_ok());
}
#[test]
fn test_task_status_yaml_output() {
let task = Task {
id: "PMAT-0001".to_string(),
description: "YAML test".to_string(),
status: TaskStatus::Deferred,
complexity: Complexity::Low,
priority: Priority::P2,
assignee: Some("yaml_tester".to_string()),
started_at: None,
completed_at: None,
};
let mut roadmap = Roadmap {
current_sprint: None,
sprints: HashMap::new(),
backlog: vec![task],
completed_sprints: Vec::new(),
};
let result = show_task_status(&roadmap, "PMAT-0001", OutputFormat::Yaml);
assert!(result.is_ok());
}
#[tokio::test]
async fn test_sprint_status_yaml_output() {
let sprint = Sprint {
version: "v1.0.0".to_string(),
title: "YAML Sprint".to_string(),
start_date: Utc::now(),
end_date: Utc::now(),
priority: Priority::P1,
tasks: Vec::new(),
definition_of_done: Vec::new(),
quality_gates: Vec::new(),
};
let mut roadmap = Roadmap {
current_sprint: Some("v1.0.0".to_string()),
sprints: HashMap::new(),
backlog: Vec::new(),
completed_sprints: Vec::new(),
};
roadmap.sprints.insert("v1.0.0".to_string(), sprint);
let result = show_sprint_status(&roadmap, Some("v1.0.0"), OutputFormat::Yaml).await;
assert!(result.is_ok());
}
#[tokio::test]
async fn test_validate_sprint_with_quality_gates_disabled() {
let temp_dir = TempDir::new().unwrap();
let mut config = create_test_config(&temp_dir);
config.enforce_quality_gates = false;
create_sample_roadmap(&config.path);
let result = validate_sprint(&config.path, "v1.0.0", false, &config).await;
assert!(result.is_ok());
}
#[tokio::test]
async fn test_validate_sprint_with_quality_gates_enabled() {
let temp_dir = TempDir::new().unwrap();
let mut config = create_test_config(&temp_dir);
config.enforce_quality_gates = true;
create_sample_roadmap(&config.path);
let result = validate_sprint(&config.path, "v1.0.0", false, &config).await;
assert!(result.is_ok());
}
#[tokio::test]
async fn test_complete_task_without_quality_enforcement() {
let temp_dir = TempDir::new().unwrap();
let mut config = create_test_config(&temp_dir);
config.enforce_quality_gates = false;
create_sample_roadmap(&config.path);
let result = complete_task(&config.path, "PMAT-0001", false, &config).await;
assert!(result.is_ok());
}
#[tokio::test]
async fn test_start_task_with_branch_disabled() {
let temp_dir = TempDir::new().unwrap();
let mut config = create_test_config(&temp_dir);
config.git.create_branches = false;
create_sample_roadmap(&config.path);
let result = start_task(&config.path, "PMAT-0001", true, &config).await;
assert!(result.is_ok());
}
}