use super::*;
mod property_tests {
use proptest::prelude::*;
proptest! {
#[test]
fn basic_property_stability(_input in ".*") {
prop_assert!(true);
}
#[test]
fn module_consistency_check(_x in 0u32..1000) {
prop_assert!(_x < 1001);
}
}
}
mod coverage_tests {
use super::*;
use proptest::prelude::*;
use std::fs;
use tempfile::TempDir;
#[test]
fn test_select_quality_profile_extreme() {
let profile = select_quality_profile(Some("extreme"));
assert_eq!(profile.name, "extreme");
assert_eq!(profile.thresholds.max_complexity, 5);
assert!(profile.thresholds.zero_satd);
}
#[test]
fn test_select_quality_profile_standard() {
let profile = select_quality_profile(Some("standard"));
assert_eq!(profile.name, "standard");
assert_eq!(profile.thresholds.max_complexity, 10);
}
#[test]
fn test_select_quality_profile_relaxed() {
let profile = select_quality_profile(Some("relaxed"));
assert_eq!(profile.name, "relaxed");
assert_eq!(profile.thresholds.max_complexity, 20);
assert!(!profile.thresholds.zero_satd);
}
#[test]
fn test_select_quality_profile_default() {
let profile = select_quality_profile(None);
assert_eq!(profile.name, "standard");
}
#[test]
fn test_select_quality_profile_unknown() {
let profile = select_quality_profile(Some("unknown_profile"));
assert_eq!(profile.name, "standard"); }
#[test]
fn test_parse_code_type_function() {
let code_type = parse_code_type(Some("function"));
assert!(matches!(code_type, CodeType::Function));
}
#[test]
fn test_parse_code_type_module() {
let code_type = parse_code_type(Some("module"));
assert!(matches!(code_type, CodeType::Module));
}
#[test]
fn test_parse_code_type_service() {
let code_type = parse_code_type(Some("service"));
assert!(matches!(code_type, CodeType::Service));
}
#[test]
fn test_parse_code_type_test() {
let code_type = parse_code_type(Some("test"));
assert!(matches!(code_type, CodeType::Test));
}
#[test]
fn test_parse_code_type_default() {
let code_type = parse_code_type(None);
assert!(matches!(code_type, CodeType::Function));
}
#[test]
fn test_parse_code_type_unknown() {
let code_type = parse_code_type(Some("unknown_type"));
assert!(matches!(code_type, CodeType::Function)); }
#[tokio::test]
async fn test_analyze_complexity_empty_paths() {
let result = analyze_complexity(&[], None, None).await;
assert!(result.is_err());
assert!(result
.unwrap_err()
.to_string()
.contains("At least one path"));
}
#[tokio::test]
async fn test_analyze_satd_empty_paths() {
let result = analyze_satd(&[], false).await;
assert!(result.is_err());
assert!(result
.unwrap_err()
.to_string()
.contains("At least one path"));
}
#[tokio::test]
async fn test_analyze_dead_code_empty_paths() {
let result = analyze_dead_code(&[], false).await;
assert!(result.is_err());
assert!(result
.unwrap_err()
.to_string()
.contains("At least one path"));
}
#[tokio::test]
async fn test_analyze_lint_hotspots_empty_paths() {
let result = analyze_lint_hotspots(&[], None).await;
assert!(result.is_err());
assert!(result
.unwrap_err()
.to_string()
.contains("At least one path"));
}
#[tokio::test]
async fn test_analyze_churn_empty_paths() {
let result = analyze_churn(&[], None, None).await;
assert!(result.is_err());
assert!(result
.unwrap_err()
.to_string()
.contains("At least one path"));
}
#[tokio::test]
async fn test_analyze_coupling_empty_paths() {
let result = analyze_coupling(&[], None).await;
assert!(result.is_err());
assert!(result
.unwrap_err()
.to_string()
.contains("At least one path"));
}
#[tokio::test]
async fn test_check_quality_gates_empty_paths() {
let result = check_quality_gates(&[], false).await;
assert!(result.is_err());
assert!(result
.unwrap_err()
.to_string()
.contains("At least one path"));
}
#[tokio::test]
async fn test_quality_gate_summary_empty_paths() {
let result = quality_gate_summary(&[]).await;
assert!(result.is_err());
assert!(result
.unwrap_err()
.to_string()
.contains("At least one path"));
}
#[tokio::test]
async fn test_quality_gate_baseline_empty_paths() {
let result = quality_gate_baseline(&[], None).await;
assert!(result.is_err());
assert!(result
.unwrap_err()
.to_string()
.contains("At least one path"));
}
#[tokio::test]
async fn test_quality_gate_compare_empty_paths() {
let temp_file = TempDir::new().unwrap();
let baseline_path = temp_file.path().join("baseline.json");
fs::write(&baseline_path, "{}").unwrap();
let result = quality_gate_compare(&baseline_path, &[]).await;
assert!(result.is_err());
assert!(result
.unwrap_err()
.to_string()
.contains("At least one path"));
}
#[tokio::test]
async fn test_check_quality_gate_file_nonexistent() {
let path = Path::new("/tmp/nonexistent_file_12345.rs");
let result = check_quality_gate_file(path, false).await;
assert!(result.is_err());
assert!(result.unwrap_err().to_string().contains("does not exist"));
}
#[tokio::test]
async fn test_generate_context_empty_paths() {
let result = generate_context(&[], None, false).await;
assert!(result.is_err());
assert!(result
.unwrap_err()
.to_string()
.contains("At least one path"));
}
#[tokio::test]
async fn test_generate_deep_context_empty_paths() {
let result = generate_deep_context(&[], None).await;
assert!(result.is_err());
assert!(result
.unwrap_err()
.to_string()
.contains("At least one path"));
}
#[tokio::test]
async fn test_analyze_context_empty_paths() {
let result = analyze_context(&[], &[]).await;
assert!(result.is_err());
assert!(result
.unwrap_err()
.to_string()
.contains("At least one path"));
}
#[tokio::test]
async fn test_context_summary_empty_paths() {
let result = context_summary(&[], None).await;
assert!(result.is_err());
assert!(result
.unwrap_err()
.to_string()
.contains("At least one path"));
}
#[tokio::test]
async fn test_analyze_tdg_empty_paths() {
let result = analyze_tdg(&[], None, None, None, None).await;
assert!(result.is_err());
assert!(result
.unwrap_err()
.to_string()
.contains("At least one path"));
}
#[tokio::test]
async fn test_git_clone_extracts_repo_name() {
let result = git_clone("https://github.com/user/test-repo.git", None, None, None).await;
assert!(result.is_ok());
assert_eq!(result.unwrap(), PathBuf::from("test-repo"));
}
#[tokio::test]
async fn test_git_clone_with_target_dir() {
let target = Path::new("/tmp/my-target");
let result = git_clone("https://github.com/user/repo.git", Some(target), None, None).await;
assert!(result.is_ok());
assert_eq!(result.unwrap(), PathBuf::from("/tmp/my-target"));
}
#[tokio::test]
async fn test_git_clone_url_without_git_suffix() {
let result = git_clone("https://github.com/user/some-repo", None, None, None).await;
assert!(result.is_ok());
assert_eq!(result.unwrap(), PathBuf::from("some-repo"));
}
#[test]
fn test_create_storage_backend_inmemory() {
let result = create_storage_backend(Some("inmemory"));
assert!(result.is_ok());
}
#[test]
fn test_create_storage_backend_libsql() {
let result = create_storage_backend(Some("libsql"));
assert!(result.is_ok());
}
#[test]
fn test_create_storage_backend_default() {
let result = create_storage_backend(None);
assert!(result.is_ok()); }
#[test]
fn test_create_storage_backend_unsupported() {
let result = create_storage_backend(Some("nonexistent_backend"));
assert!(result.is_err());
assert!(result
.err()
.unwrap()
.to_string()
.contains("Unsupported storage backend"));
}
#[test]
fn test_create_file_identity() {
let path = Path::new("/tmp/test.rs");
let content = b"fn main() {}";
let hash = blake3::hash(content);
let identity = create_file_identity(path, &hash, content);
assert_eq!(identity.path, PathBuf::from("/tmp/test.rs"));
assert_eq!(identity.content_hash, hash);
assert_eq!(identity.size_bytes, 12);
}
#[test]
fn test_create_component_scores() {
let scores = create_component_scores();
assert!(scores.complexity_breakdown.is_empty());
assert!(scores.duplication_sources.is_empty());
assert!(scores.coupling_dependencies.is_empty());
assert!(scores.doc_missing_items.is_empty());
assert!(scores.consistency_violations.is_empty());
}
#[test]
fn test_create_semantic_signature() {
let content = b"fn main() {}";
let hash = blake3::hash(content);
let signature = create_semantic_signature(&hash);
assert_eq!(signature.identifier_pattern, "mcp_analysis");
assert_eq!(signature.control_flow_pattern, "function_call");
assert!(signature.import_dependencies.is_empty());
}
#[test]
fn test_create_analysis_metadata() {
let score = crate::tdg::TdgScore::default();
let metadata = create_analysis_metadata(&score);
assert_eq!(metadata.analyzer_version, "2.38.0-mcp");
assert_eq!(metadata.analysis_duration_ms, 10);
assert!(!metadata.cache_hit);
}
#[test]
fn test_create_success_result() {
let path = Path::new("/tmp/test");
let project_score = crate::tdg::ProjectScore::default();
let result = create_success_result(path, &project_score);
assert!(result.get("path").is_some());
assert!(result.get("total_files").is_some());
assert!(result.get("average_score").is_some());
assert!(result.get("average_grade").is_some());
}
#[test]
fn test_create_error_result() {
let path = Path::new("/tmp/test");
let error = anyhow::anyhow!("Test error message");
let result = create_error_result(path, &error);
assert_eq!(result.get("status").unwrap(), "failed");
assert!(result
.get("error")
.unwrap()
.as_str()
.unwrap()
.contains("Test error message"));
}
#[tokio::test]
async fn test_quality_driven_development_unknown_operation() {
let result =
quality_driven_development("unknown_op", None, None, None, None, None, None, None)
.await;
assert!(result.is_ok());
let json = result.unwrap();
assert_eq!(json.get("status").unwrap(), "failed");
assert!(json
.get("message")
.unwrap()
.as_str()
.unwrap()
.contains("Unknown QDD operation"));
}
#[tokio::test]
async fn test_quality_driven_development_refactor_missing_path() {
let result = quality_driven_development(
"refactor",
Some("standard"),
None,
Some("test_func"),
None,
None, None,
None,
)
.await;
assert!(result.is_ok());
let json = result.unwrap();
assert_eq!(json.get("status").unwrap(), "failed");
assert!(json
.get("message")
.unwrap()
.as_str()
.unwrap()
.contains("file_path"));
}
#[tokio::test]
async fn test_generate_defect_aware_prompt_missing_file() {
let result = generate_defect_aware_prompt(
"test task".to_string(),
"test context".to_string(),
PathBuf::from("/tmp/nonexistent_summary_12345.yaml"),
)
.await;
assert!(result.is_ok());
let json = result.unwrap();
assert_eq!(json.get("status").unwrap(), "failed");
assert!(json
.get("error")
.unwrap()
.as_str()
.unwrap()
.contains("FILE_NOT_FOUND"));
}
#[tokio::test]
async fn test_tdg_storage_management_unknown_action() {
let result = tdg_storage_management("unknown_action".to_string(), json!({})).await;
assert!(result.is_ok());
let json = result.unwrap();
assert_eq!(json.get("status").unwrap(), "error");
assert!(json
.get("message")
.unwrap()
.as_str()
.unwrap()
.contains("Unknown storage action"));
}
#[tokio::test]
async fn test_tdg_configure_storage_unsupported_backend() {
let result =
tdg_configure_storage("unsupported_backend".to_string(), None, None, None).await;
assert!(result.is_ok());
let json = result.unwrap();
assert_eq!(json.get("status").unwrap(), "error");
}
#[tokio::test]
async fn test_tdg_configure_storage_inmemory() {
let result =
tdg_configure_storage("inmemory".to_string(), None, Some(64), Some(true)).await;
assert!(result.is_ok());
let json = result.unwrap();
assert_eq!(json.get("status").unwrap(), "completed");
}
#[tokio::test]
async fn test_tdg_performance_metrics() {
let result = tdg_performance_metrics().await;
assert!(result.is_ok());
let json = result.unwrap();
assert_eq!(json.get("status").unwrap(), "completed");
assert!(json.get("adaptive_thresholds").is_some());
assert!(json.get("performance_stats").is_some());
assert!(json.get("scheduler_stats").is_some());
}
#[tokio::test]
async fn test_tdg_health_check() {
let result = tdg_health_check().await;
assert!(result.is_ok());
let json = result.unwrap();
assert_eq!(json.get("status").unwrap(), "completed");
assert!(json.get("overall_status").is_some());
assert!(json.get("health_score").is_some());
assert!(json.get("components").is_some());
}
#[tokio::test]
async fn test_analyze_complexity_with_nonexistent_paths() {
let paths = vec![PathBuf::from("/tmp/nonexistent_12345.rs")];
let result = analyze_complexity(&paths, None, None).await;
assert!(result.is_ok());
let json = result.unwrap();
assert_eq!(json["results"]["total_files"], 0);
}
#[tokio::test]
async fn test_analyze_satd_with_nonexistent_paths() {
let paths = vec![PathBuf::from("/tmp/nonexistent_12345.rs")];
let result = analyze_satd(&paths, false).await;
assert!(result.is_ok());
let json = result.unwrap();
assert_eq!(json["results"]["total_satd"], 0);
}
#[tokio::test]
async fn test_analyze_dead_code_with_nonexistent_paths() {
let paths = vec![PathBuf::from("/tmp/nonexistent_12345.rs")];
let result = analyze_dead_code(&paths, false).await;
assert!(result.is_ok());
let json = result.unwrap();
assert_eq!(json["results"]["total_dead_code"], 0);
}
#[tokio::test]
async fn test_generate_context_with_nonexistent_paths() {
let paths = vec![PathBuf::from("/tmp/nonexistent_12345.rs")];
let result = generate_context(&paths, None, false).await;
assert!(result.is_ok());
let json = result.unwrap();
assert_eq!(json["context"]["total_files"], 0);
}
#[tokio::test]
async fn test_analyze_complexity_with_real_file() -> Result<()> {
let temp_dir = TempDir::new()?;
let file_path = temp_dir.path().join("test.rs");
fs::write(&file_path, "fn main() { println!(\"Hello\"); }")?;
let paths = vec![file_path];
let result = analyze_complexity(&paths, None, Some(5)).await;
assert!(result.is_ok());
let json = result.unwrap();
assert_eq!(json["status"], "completed");
Ok(())
}
#[tokio::test]
async fn test_analyze_satd_with_real_file() -> Result<()> {
let temp_dir = TempDir::new()?;
let file_path = temp_dir.path().join("test.rs");
fs::write(
&file_path,
"fn main() {\n // TODO: fix this\n println!(\"Hello\");\n}",
)?;
let paths = vec![file_path];
let result = analyze_satd(&paths, false).await;
assert!(result.is_ok());
let json = result.unwrap();
assert_eq!(json["status"], "completed");
Ok(())
}
#[tokio::test]
async fn test_context_summary_with_real_directory() -> Result<()> {
let temp_dir = TempDir::new()?;
let file_path = temp_dir.path().join("test.rs");
fs::write(&file_path, "fn main() {}")?;
let paths = vec![temp_dir.path().to_path_buf()];
let result = context_summary(&paths, None).await;
assert!(result.is_ok());
let json = result.unwrap();
assert_eq!(json["status"], "completed");
assert!(json["summary"]["total_files"].as_u64().unwrap() >= 1);
Ok(())
}
#[tokio::test]
async fn test_quality_gate_compare_missing_baseline() {
let paths = vec![PathBuf::from("/tmp")];
let result =
quality_gate_compare(Path::new("/tmp/nonexistent_baseline_12345.json"), &paths).await;
assert!(result.is_err());
assert!(result
.unwrap_err()
.to_string()
.contains("Baseline file not found"));
}
proptest! {
#[test]
fn prop_select_quality_profile_never_panics(profile_name in ".*") {
let _ = select_quality_profile(Some(&profile_name));
}
#[test]
fn prop_parse_code_type_never_panics(code_type in ".*") {
let _ = parse_code_type(Some(&code_type));
}
#[test]
fn prop_create_file_identity_with_any_content(content in proptest::collection::vec(any::<u8>(), 0..1000)) {
let path = Path::new("/tmp/test.rs");
let hash = blake3::hash(&content);
let identity = create_file_identity(path, &hash, &content);
prop_assert_eq!(identity.size_bytes, content.len() as u64);
prop_assert_eq!(identity.content_hash, hash);
}
#[test]
fn prop_create_semantic_signature_deterministic(content in proptest::collection::vec(any::<u8>(), 0..100)) {
let hash = blake3::hash(&content);
let sig1 = create_semantic_signature(&hash);
let sig2 = create_semantic_signature(&hash);
prop_assert_eq!(sig1.ast_structure_hash, sig2.ast_structure_hash);
prop_assert_eq!(sig1.identifier_pattern, sig2.identifier_pattern);
}
#[test]
fn prop_git_clone_url_parsing(repo_name in "[a-zA-Z0-9-]+") {
let url = format!("https://github.com/user/{}.git", repo_name);
let rt = tokio::runtime::Runtime::new().unwrap();
let result = rt.block_on(git_clone(&url, None, None, None));
prop_assert!(result.is_ok());
prop_assert_eq!(result.unwrap(), PathBuf::from(&repo_name));
}
}
#[tokio::test]
async fn test_tdg_system_diagnostics_all_components() {
let result = tdg_system_diagnostics(true, vec!["all".to_string()]).await;
assert!(result.is_ok());
let json = result.unwrap();
assert!(json.get("status").is_some());
}
#[tokio::test]
async fn test_tdg_system_diagnostics_specific_components() {
let result = tdg_system_diagnostics(false, vec!["storage".to_string()]).await;
assert!(result.is_ok());
let json = result.unwrap();
assert!(json.get("status").is_some());
}
#[tokio::test]
async fn test_tdg_system_diagnostics_empty_components() {
let result = tdg_system_diagnostics(false, vec![]).await;
assert!(result.is_ok());
let json = result.unwrap();
assert!(json.get("status").is_some());
}
#[test]
fn test_create_file_identity_empty_content() {
let path = Path::new("/tmp/empty.rs");
let content = b"";
let hash = blake3::hash(content);
let identity = create_file_identity(path, &hash, content);
assert_eq!(identity.size_bytes, 0);
}
#[test]
fn test_create_semantic_signature_hash_bytes() {
let content = b"deterministic content";
let hash = blake3::hash(content);
let signature = create_semantic_signature(&hash);
assert!(signature.ast_structure_hash > 0 || content.is_empty());
}
#[tokio::test]
async fn test_analyze_lint_hotspots_file_path_error() {
let temp_dir = TempDir::new().unwrap();
let file_path = temp_dir.path().join("test.rs");
fs::write(&file_path, "fn main() {}").unwrap();
let paths = vec![file_path];
let result = analyze_lint_hotspots(&paths, None).await;
assert!(result.is_err());
assert!(result.unwrap_err().to_string().contains("directory"));
}
#[tokio::test]
async fn test_analyze_complexity_with_threshold() -> Result<()> {
let temp_dir = TempDir::new()?;
let file_path = temp_dir.path().join("test.rs");
fs::write(&file_path, "fn simple() {}")?;
let paths = vec![file_path];
let result = analyze_complexity(&paths, Some(5), Some(100)).await;
assert!(result.is_ok());
let json = result.unwrap();
assert_eq!(json["results"]["violations"].as_array().unwrap().len(), 0);
Ok(())
}
#[tokio::test]
async fn test_analyze_complexity_with_top_files_limit() -> Result<()> {
let temp_dir = TempDir::new()?;
for i in 0..5 {
let file_path = temp_dir.path().join(format!("test{}.rs", i));
fs::write(&file_path, "fn main() {}")?;
}
let paths: Vec<PathBuf> = (0..5)
.map(|i| temp_dir.path().join(format!("test{}.rs", i)))
.collect();
let result = analyze_complexity(&paths, Some(2), None).await;
assert!(result.is_ok());
let json = result.unwrap();
assert!(json["results"]["top_files"].as_array().unwrap().len() <= 2);
Ok(())
}
#[tokio::test]
async fn test_analyze_complexity_walks_directory() -> Result<()> {
let temp_dir = TempDir::new()?;
fs::write(
temp_dir.path().join("sample.rs"),
"fn a() { if true { 1 } else { 2 }; } fn b() {}",
)?;
fs::write(
temp_dir.path().join("nested.rs"),
"fn c() { for i in 0..10 { let _ = i; } }",
)?;
let paths = vec![temp_dir.path().to_path_buf()];
let json = analyze_complexity(&paths, None, None).await.unwrap();
let total_files = json["results"]["total_files"].as_u64().unwrap_or(0);
assert!(
total_files >= 2,
"expected >=2 files analyzed, got {total_files}; json={json}"
);
Ok(())
}
#[tokio::test]
async fn test_analyze_satd_walks_directory() -> Result<()> {
let temp_dir = TempDir::new()?;
fs::write(
temp_dir.path().join("debt.rs"),
"// TODO: refactor this later\nfn x() {}\n// FIXME: edge case\n",
)?;
let paths = vec![temp_dir.path().to_path_buf()];
let json = analyze_satd(&paths, false).await.unwrap();
let total = json["results"]["total_satd"].as_u64().unwrap_or(0);
assert!(
total >= 1,
"expected >=1 SATD item found, got {total}; json={json}"
);
Ok(())
}
#[tokio::test]
async fn test_analyze_dead_code_walks_directory() -> Result<()> {
let temp_dir = TempDir::new()?;
fs::write(
temp_dir.path().join("Cargo.toml"),
"[package]\nname=\"t\"\nversion=\"0.1.0\"\n",
)?;
let src = temp_dir.path().join("src");
fs::create_dir(&src)?;
fs::write(
src.join("lib.rs"),
"pub fn used() {}\nfn unused_helper() { let _=1; }\n",
)?;
let paths = vec![temp_dir.path().to_path_buf()];
let json = analyze_dead_code(&paths, false).await.unwrap();
assert_eq!(json["status"], "completed");
let total_functions = json["results"]["total_functions"].as_u64().unwrap_or(0);
assert!(
total_functions >= 1,
"expected analyzer to see >=1 function, got {total_functions}; json={json}"
);
Ok(())
}
#[tokio::test]
async fn test_generate_context_walks_directory() -> Result<()> {
let temp_dir = TempDir::new()?;
fs::write(temp_dir.path().join("a.rs"), "pub fn alpha() {}\n")?;
fs::write(temp_dir.path().join("b.rs"), "pub struct Beta;\n")?;
let paths = vec![temp_dir.path().to_path_buf()];
let json = generate_context(&paths, None, false).await.unwrap();
let total = json["context"]["total_files"].as_u64().unwrap_or(0);
assert!(
total >= 2,
"expected >=2 files in context, got {total}; json={json}"
);
Ok(())
}
fn make_rust_project(temp: &TempDir) -> Result<(PathBuf, PathBuf, PathBuf)> {
let root = temp.path().join("root.rs");
fs::write(&root, "fn root_fn() { let _ = 1 + 1; }")?;
let src = temp.path().join("src");
fs::create_dir_all(&src)?;
let lib = src.join("lib.rs");
fs::write(&lib, "pub fn lib_fn() { if true { 1 } else { 2 }; }")?;
let nested = src.join("nested");
fs::create_dir_all(&nested)?;
let deep = nested.join("deep.rs");
fs::write(&deep, "pub fn deep_fn() { for i in 0..3 { let _ = i; } }")?;
Ok((root, lib, deep))
}
#[test]
fn test_resolve_paths_with_globs_recursive_star_star() -> Result<()> {
let temp = TempDir::new()?;
make_rust_project(&temp)?;
let pattern = temp.path().join("**/*.rs");
let resolved = resolve_paths_with_globs(&[pattern]);
let rs_count = resolved
.iter()
.filter(|p| p.extension().and_then(|e| e.to_str()) == Some("rs"))
.count();
assert_eq!(
rs_count, 3,
"expected 3 .rs files from **/*.rs, got {rs_count}: {resolved:?}"
);
Ok(())
}
#[test]
fn test_resolve_paths_with_globs_shallow_star() -> Result<()> {
let temp = TempDir::new()?;
make_rust_project(&temp)?;
let pattern = temp.path().join("*.rs");
let resolved = resolve_paths_with_globs(&[pattern]);
let rs_files: Vec<_> = resolved
.iter()
.filter(|p| p.extension().and_then(|e| e.to_str()) == Some("rs"))
.collect();
assert!(
!rs_files.is_empty(),
"expected at least 1 .rs file from *.rs, got 0: {resolved:?}"
);
Ok(())
}
#[test]
fn test_resolve_paths_with_globs_dir_star_star() -> Result<()> {
let temp = TempDir::new()?;
make_rust_project(&temp)?;
let pattern = temp.path().join("src").join("**");
let resolved = resolve_paths_with_globs(&[pattern]);
assert!(
!resolved.is_empty(),
"expected src/** to resolve to at least one entry, got {resolved:?}"
);
Ok(())
}
#[test]
fn test_resolve_paths_with_globs_dir_star_star_ext() -> Result<()> {
let temp = TempDir::new()?;
make_rust_project(&temp)?;
let pattern = temp.path().join("src").join("**").join("*.rs");
let resolved = resolve_paths_with_globs(&[pattern]);
let rs_files: Vec<_> = resolved
.iter()
.filter(|p| p.extension().and_then(|e| e.to_str()) == Some("rs"))
.collect();
assert_eq!(
rs_files.len(),
2,
"expected 2 .rs files from src/**/*.rs (lib + deep), got {}: {resolved:?}",
rs_files.len()
);
Ok(())
}
#[test]
fn test_resolve_paths_with_globs_plain_path_passthrough() -> Result<()> {
let temp = TempDir::new()?;
let (root, _, _) = make_rust_project(&temp)?;
let resolved = resolve_paths_with_globs(std::slice::from_ref(&root));
assert_eq!(resolved.len(), 1);
assert_eq!(resolved[0], root);
Ok(())
}
#[test]
fn test_resolve_paths_with_globs_no_match_drops_pattern() {
let pattern = PathBuf::from("/tmp/pmat-r21-4-definitely-does-not-exist-*.rs");
let resolved = resolve_paths_with_globs(&[pattern]);
assert!(resolved.is_empty(), "expected empty, got {resolved:?}");
}
#[tokio::test]
async fn test_analyze_complexity_with_double_star_glob() -> Result<()> {
let temp = TempDir::new()?;
make_rust_project(&temp)?;
let pattern = temp.path().join("**/*.rs");
let json = analyze_complexity(&[pattern], None, None).await?;
let total_files = json["results"]["total_files"].as_u64().unwrap_or(0);
assert!(
total_files >= 3,
"expected >=3 files from **/*.rs, got {total_files}; json={json}"
);
Ok(())
}
#[tokio::test]
async fn test_analyze_satd_with_double_star_glob() -> Result<()> {
let temp = TempDir::new()?;
fs::write(
temp.path().join("root.rs"),
"// TODO: top-level debt\nfn root_fn() {}",
)?;
let src = temp.path().join("src");
fs::create_dir_all(&src)?;
fs::write(
src.join("nested.rs"),
"// FIXME: nested debt\nfn nested_fn() {}",
)?;
let pattern = temp.path().join("**/*.rs");
let json = analyze_satd(&[pattern], false).await?;
let total = json["results"]["total_satd"].as_u64().unwrap_or(0);
assert!(
total >= 2,
"expected >=2 SATD entries from **/*.rs walk, got {total}; json={json}"
);
Ok(())
}
#[tokio::test]
async fn test_analyze_complexity_with_dir_star_star_glob() -> Result<()> {
let temp = TempDir::new()?;
make_rust_project(&temp)?;
let pattern = temp.path().join("src").join("**").join("*.rs");
let json = analyze_complexity(&[pattern], None, None).await?;
let total_files = json["results"]["total_files"].as_u64().unwrap_or(0);
assert!(
total_files >= 2,
"expected >=2 files from src/**/*.rs, got {total_files}; json={json}"
);
Ok(())
}
}