use crate::config::{load_project_config, ProjectConfig};
use crate::detectors::{
default_detectors_with_config, DetectorEngine, Detector, IncrementalCache,
VotingEngine, VotingStrategy, ConfidenceMethod, SeverityResolution,
};
use crate::git;
use crate::graph::{GraphStore, CodeNode, CodeEdge, NodeKind};
use crate::models::{Finding, FindingsSummary, HealthReport, Severity};
use crate::parsers::{parse_file, ParseResult};
use crate::reporters;
use anyhow::{Context, Result};
use console::style;
use ignore::WalkBuilder;
use indicatif::{MultiProgress, ProgressBar, ProgressStyle};
use std::collections::HashSet;
use std::path::{Path, PathBuf};
use std::process::Command;
use std::sync::Arc;
use std::time::Instant;
const SUPPORTED_EXTENSIONS: &[&str] = &[
"py", "pyi", "ts", "tsx", "js", "jsx", "mjs", "rs", "go", "java", "c", "h", "cpp", "hpp", "cc", "cs", "kt", "kts", "rb", "php", "swift", ];
pub fn run(
path: &Path,
format: &str,
output_path: Option<&Path>,
severity: Option<String>,
top: Option<usize>,
page: usize,
per_page: usize,
skip_detector: Vec<String>,
thorough: bool,
no_git: bool,
workers: usize,
fail_on: Option<String>,
no_emoji: bool,
incremental: bool,
since: Option<String>,
) -> Result<()> {
let start_time = Instant::now();
let repo_path = path
.canonicalize()
.with_context(|| format!("Repository path does not exist: {}", path.display()))?;
if !repo_path.is_dir() {
anyhow::bail!("Path is not a directory: {}", repo_path.display());
}
let project_config = load_project_config(&repo_path);
let no_emoji = no_emoji || project_config.defaults.no_emoji.unwrap_or(false);
let thorough = thorough || project_config.defaults.thorough.unwrap_or(false);
let no_git = no_git || project_config.defaults.no_git.unwrap_or(false);
let workers = if workers == 8 { project_config.defaults.workers.unwrap_or(workers)
} else {
workers
};
let per_page = if per_page == 20 { project_config.defaults.per_page.unwrap_or(per_page)
} else {
per_page
};
let fail_on = fail_on.or_else(|| project_config.defaults.fail_on.clone());
let icon_analyze = if no_emoji { "" } else { "🎼 " };
let icon_search = if no_emoji { "" } else { "🔍 " };
println!("\n{}Repotoire Analysis\n", style(icon_analyze).bold());
println!(
"{}Analyzing: {}\n",
style(icon_search).bold(),
style(repo_path.display()).cyan()
);
let repotoire_dir = crate::cache::ensure_cache_dir(&repo_path)
.with_context(|| "Failed to create cache directory")?;
let incremental_cache_dir = repotoire_dir.join("incremental");
let mut incremental_cache = IncrementalCache::new(&incremental_cache_dir);
let is_incremental_mode = incremental || since.is_some();
let db_path = repotoire_dir.join("graph_db");
let icon_graph = if no_emoji { "" } else { "🕸️ " };
println!("{}Initializing graph database...", style(icon_graph).bold());
let graph = Arc::new(GraphStore::new(&db_path).with_context(|| "Failed to initialize graph database")?);
let graph_ref = &graph;
let multi = MultiProgress::new();
let spinner_style = ProgressStyle::default_spinner()
.tick_chars("⠁⠂⠄⡀⢀⠠⠐⠈ ")
.template("{spinner:.green} {msg}")
.unwrap();
let bar_style = ProgressStyle::default_bar()
.template("{spinner:.green} [{bar:40.cyan/blue}] {pos}/{len} {msg}")
.unwrap()
.progress_chars("█▓▒░ ");
let walk_spinner = multi.add(ProgressBar::new_spinner());
walk_spinner.set_style(spinner_style.clone());
let (all_files, files_to_parse, cached_findings) = if let Some(ref commit) = since {
walk_spinner.set_message(format!("Finding files changed since {}...", commit));
walk_spinner.enable_steady_tick(std::time::Duration::from_millis(100));
let changed = get_changed_files_since(&repo_path, commit)?;
let all = collect_source_files(&repo_path)?;
walk_spinner.finish_with_message(format!(
"{}Found {} changed files (since {}) out of {} total",
style("✓ ").green(),
style(changed.len()).cyan(),
style(commit).yellow(),
style(all.len()).dim()
));
let unchanged: Vec<_> = all.iter()
.filter(|f| !changed.contains(f))
.cloned()
.collect();
let mut cached: Vec<Finding> = Vec::new();
for file in &unchanged {
cached.extend(incremental_cache.get_cached_findings(file));
}
(all, changed, cached)
} else if incremental {
walk_spinner.set_message("Discovering source files (incremental mode)...");
walk_spinner.enable_steady_tick(std::time::Duration::from_millis(100));
let all = collect_source_files(&repo_path)?;
let changed = incremental_cache.get_changed_files(&all);
let cache_stats = incremental_cache.get_stats();
walk_spinner.finish_with_message(format!(
"{}Found {} changed files out of {} total ({} cached)",
style("✓ ").green(),
style(changed.len()).cyan(),
style(all.len()).dim(),
style(cache_stats.cached_files).dim()
));
let unchanged: Vec<_> = all.iter()
.filter(|f| !changed.contains(f))
.cloned()
.collect();
let mut cached: Vec<Finding> = Vec::new();
for file in &unchanged {
cached.extend(incremental_cache.get_cached_findings(file));
}
(all, changed, cached)
} else {
walk_spinner.set_message("Discovering source files...");
walk_spinner.enable_steady_tick(std::time::Duration::from_millis(100));
let files = collect_source_files(&repo_path)?;
walk_spinner.finish_with_message(format!(
"{}Found {} source files",
style("✓ ").green(),
style(files.len()).cyan()
));
(files.clone(), files, Vec::new())
};
let files = files_to_parse;
if all_files.is_empty() {
println!("\n{}No source files found to analyze.", style("⚠️ ").yellow());
return Ok(());
}
if files.is_empty() && is_incremental_mode {
println!("\n{}No files changed since last run. Using cached results.", style("✓ ").green());
}
let parse_bar = multi.add(ProgressBar::new(files.len() as u64));
parse_bar.set_style(bar_style.clone());
let parse_msg = if is_incremental_mode {
"Parsing changed files (parallel)..."
} else {
"Parsing files (parallel)..."
};
parse_bar.set_message(parse_msg);
use rayon::prelude::*;
use std::sync::atomic::{AtomicUsize, Ordering};
let counter = AtomicUsize::new(0);
let total_files = files.len();
let parse_results: Vec<(std::path::PathBuf, ParseResult)> = files
.par_iter()
.filter_map(|file_path| {
let count = counter.fetch_add(1, Ordering::Relaxed);
if count % 100 == 0 {
parse_bar.set_position(count as u64);
}
match parse_file(file_path) {
Ok(result) => Some((file_path.clone(), result)),
Err(e) => {
tracing::warn!("Failed to parse {}: {}", file_path.display(), e);
None
}
}
})
.collect();
let total_functions: usize = parse_results.iter().map(|(_, r)| r.functions.len()).sum();
let total_classes: usize = parse_results.iter().map(|(_, r)| r.classes.len()).sum();
parse_bar.finish_with_message(format!(
"{}Parsed {} files ({} functions, {} classes)",
style("✓ ").green(),
style(total_files).cyan(),
style(total_functions).cyan(),
style(total_classes).cyan(),
));
let graph_bar = multi.add(ProgressBar::new(parse_results.len() as u64));
graph_bar.set_style(bar_style.clone());
graph_bar.set_message("Building code graph...");
let mut file_nodes = Vec::with_capacity(parse_results.len());
let mut func_nodes = Vec::with_capacity(total_functions);
let mut class_nodes = Vec::with_capacity(total_classes);
let mut edges: Vec<(String, String, CodeEdge)> = Vec::new();
let mut global_func_map: std::collections::HashMap<String, String> = std::collections::HashMap::new();
for (_, result) in &parse_results {
for func in &result.functions {
global_func_map.insert(func.name.clone(), func.qualified_name.clone());
}
}
for (file_path, result) in &parse_results {
let relative_path = file_path.strip_prefix(&repo_path).unwrap_or(file_path);
let relative_str = relative_path.display().to_string();
let language = detect_language(file_path);
let loc = count_lines(file_path).unwrap_or(0);
file_nodes.push(
CodeNode::new(NodeKind::File, &relative_str, &relative_str)
.with_qualified_name(&relative_str)
.with_language(&language)
.with_property("loc", loc as i64)
);
for func in &result.functions {
let loc = if func.line_end >= func.line_start {
func.line_end - func.line_start + 1
} else { 1 };
let complexity = func.complexity.unwrap_or(1);
func_nodes.push(
CodeNode::new(NodeKind::Function, &func.name, &relative_str)
.with_qualified_name(&func.qualified_name)
.with_lines(func.line_start, func.line_end)
.with_property("is_async", func.is_async)
.with_property("complexity", complexity as i64)
.with_property("loc", loc as i64)
);
edges.push((relative_str.clone(), func.qualified_name.clone(), CodeEdge::contains()));
}
for class in &result.classes {
class_nodes.push(
CodeNode::new(NodeKind::Class, &class.name, &relative_str)
.with_qualified_name(&class.qualified_name)
.with_lines(class.line_start, class.line_end)
.with_property("methodCount", class.methods.len() as i64)
);
edges.push((relative_str.clone(), class.qualified_name.clone(), CodeEdge::contains()));
}
for (caller, callee) in &result.calls {
let parts: Vec<&str> = callee.rsplitn(2, "::").collect();
let callee_name = parts[0];
let callee_module = if parts.len() > 1 { Some(parts[1]) } else { None };
let callee_name = callee_name.rsplit('.').next().unwrap_or(callee_name);
let callee_qn = if let Some(callee_func) = result.functions.iter().find(|f| f.name == callee_name) {
callee_func.qualified_name.clone()
} else {
let mut found = None;
if let Some(module) = callee_module {
for (other_path, other_result) in &parse_results {
let other_relative = other_path.strip_prefix(&repo_path).unwrap_or(other_path);
let other_str = other_relative.display().to_string();
let file_stem = other_relative.file_stem()
.and_then(|s| s.to_str())
.unwrap_or("");
if file_stem == module || other_str.contains(&format!("/{}.rs", module)) {
if let Some(func) = other_result.functions.iter().find(|f| f.name == callee_name) {
found = Some(func.qualified_name.clone());
break;
}
}
}
}
if found.is_none() {
found = global_func_map.get(callee_name).cloned();
}
match found {
Some(qn) => qn,
None => continue, }
};
edges.push((caller.clone(), callee_qn, CodeEdge::calls()));
}
for import_info in &result.imports {
let clean_import = import_info.path
.trim_start_matches("./")
.trim_start_matches("../")
.trim_start_matches("crate::")
.trim_start_matches("super::");
let module_parts: Vec<&str> = clean_import.split("::").collect();
let first_module = module_parts.first().copied().unwrap_or("");
for (other_file, _) in &parse_results {
let other_relative = other_file.strip_prefix(&repo_path).unwrap_or(other_file);
let other_str = other_relative.display().to_string();
if other_str == relative_str {
continue; }
let other_name = other_relative.file_stem()
.and_then(|s| s.to_str())
.unwrap_or("");
let python_path = clean_import.replace('.', "/");
let matches =
other_str.contains(clean_import) ||
(clean_import == other_name) ||
other_str.ends_with(&format!("{}.ts", clean_import)) ||
other_str.ends_with(&format!("{}.tsx", clean_import)) ||
other_str.ends_with(&format!("{}.js", clean_import)) ||
other_str.ends_with(&format!("{}/index.ts", clean_import)) ||
other_str.ends_with(&format!("{}.rs", clean_import.replace("::", "/"))) ||
other_str.ends_with(&format!("{}/mod.rs", first_module)) ||
(other_name == first_module && other_str.ends_with(".rs")) ||
other_str.ends_with(&format!("{}.py", python_path)) ||
other_str.contains(&format!("{}/", python_path)) ||
other_str.ends_with(&format!("{}/__init__.py", python_path));
if matches {
let import_edge = CodeEdge::imports()
.with_property("is_type_only", import_info.is_type_only);
edges.push((relative_str.clone(), other_str, import_edge));
break;
}
}
}
graph_bar.inc(1);
}
graph_bar.set_message("Inserting nodes...");
graph.add_nodes_batch(file_nodes);
graph.add_nodes_batch(func_nodes);
graph.add_nodes_batch(class_nodes);
graph_bar.set_message("Inserting edges...");
graph.add_edges_batch(edges);
graph_bar.finish_with_message(format!("{}Built code graph", style("✓ ").green(),));
graph.save().with_context(|| "Failed to save graph database")?;
let graph_stats = serde_json::json!({
"total_files": graph.get_files().len(),
"total_functions": graph.get_functions().len(),
"total_classes": graph.get_classes().len(),
"total_nodes": graph.node_count(),
"total_edges": graph.edge_count(),
"calls": graph.get_calls().len(),
"imports": graph.get_imports().len(),
});
let stats_path = crate::cache::get_graph_stats_path(&repo_path);
std::fs::write(&stats_path, serde_json::to_string_pretty(&graph_stats)?)?;
crate::cache::warm_global_cache(&repo_path, SUPPORTED_EXTENSIONS);
let git_result = if !no_git {
let git_spinner = multi.add(ProgressBar::new_spinner());
git_spinner.set_style(spinner_style.clone());
git_spinner.set_message("Enriching with git history (async)...");
git_spinner.enable_steady_tick(std::time::Duration::from_millis(100));
let repo_path_clone = repo_path.clone();
let graph_clone = Arc::clone(&graph);
let git_handle = std::thread::spawn(move || {
git::enrichment::enrich_graph_with_git(&repo_path_clone, &graph_clone, None)
});
Some((git_handle, git_spinner))
} else {
println!("{}Skipping git enrichment (--no-git)", style("⏭ ").dim());
None
};
println!("\n{}Running detectors...", style("🕵️ ").bold());
let mut engine = DetectorEngine::new(workers);
let skip_set: HashSet<&str> = skip_detector.iter().map(|s| s.as_str()).collect();
for detector in default_detectors_with_config(&repo_path, &project_config) {
let name = detector.name();
if !skip_set.contains(name) {
engine.register(detector);
}
}
if thorough {
let external = crate::detectors::all_external_detectors(&repo_path);
let external_count = external.len();
for detector in external {
engine.register(detector);
}
tracing::info!("Thorough mode: added {} external detectors ({} total)", external_count, engine.detector_count());
}
let detector_bar = multi.add(ProgressBar::new_spinner());
detector_bar.set_style(spinner_style.clone());
detector_bar.set_message("Running detectors...");
detector_bar.enable_steady_tick(std::time::Duration::from_millis(100));
let findings = engine.run(&graph)?;
detector_bar.finish_with_message(format!(
"{}Ran {} detectors, found {} raw issues",
style("✓ ").green(),
style(engine.detector_count()).cyan(),
style(findings.len()).cyan(),
));
let voting_spinner = multi.add(ProgressBar::new_spinner());
voting_spinner.set_style(spinner_style.clone());
voting_spinner.set_message("Consolidating findings with voting engine...");
voting_spinner.enable_steady_tick(std::time::Duration::from_millis(100));
let voting_engine = VotingEngine::with_config(
VotingStrategy::Weighted,
ConfidenceMethod::Bayesian,
SeverityResolution::Highest,
0.5, 2, );
let (consolidated_findings, voting_stats) = voting_engine.vote(findings);
let mut findings = consolidated_findings;
let cached_findings_count = cached_findings.len();
if is_incremental_mode && !cached_findings.is_empty() {
findings.extend(cached_findings);
tracing::debug!(
"Merged {} cached findings with {} new findings",
cached_findings_count,
voting_stats.total_output
);
}
if is_incremental_mode {
for file_path in &files {
let file_findings: Vec<_> = findings
.iter()
.filter(|f| f.affected_files.iter().any(|af| af == file_path))
.cloned()
.collect();
incremental_cache.cache_findings(file_path, &file_findings);
}
if let Err(e) = incremental_cache.save_cache() {
tracing::warn!("Failed to save incremental cache: {}", e);
}
}
voting_spinner.finish_with_message(format!(
"{}Consolidated {} -> {} findings ({} merged, {} rejected{})",
style("✓ ").green(),
style(voting_stats.total_input).cyan(),
style(voting_stats.total_output).cyan(),
style(voting_stats.boosted_by_consensus).dim(),
style(voting_stats.rejected_low_confidence).dim(),
if cached_findings_count > 0 {
format!(", {} from cache", style(cached_findings_count).dim())
} else {
String::new()
}
));
if let Some((git_handle, git_spinner)) = git_result {
match git_handle.join() {
Ok(Ok(stats)) => {
if stats.functions_enriched > 0 || stats.classes_enriched > 0 {
let cache_info = if stats.cache_hits > 0 {
format!(" ({} cached)", stats.cache_hits)
} else {
String::new()
};
git_spinner.finish_with_message(format!(
"{}Enriched {} functions, {} classes{}",
style("✓ ").green(),
style(stats.functions_enriched).cyan(),
style(stats.classes_enriched).cyan(),
style(cache_info).dim(),
));
} else {
git_spinner.finish_with_message(format!(
"{}No git history to enrich",
style("- ").dim(),
));
}
}
Ok(Err(e)) => {
git_spinner.finish_with_message(format!(
"{}Git enrichment skipped: {}",
style("⚠ ").yellow(),
e
));
}
Err(_) => {
git_spinner.finish_with_message(format!(
"{}Git enrichment failed",
style("⚠ ").yellow(),
));
}
}
}
if !project_config.detectors.is_empty() {
let detector_configs = &project_config.detectors;
findings.retain(|f| {
let detector_name = crate::config::normalize_detector_name(&f.detector);
if let Some(config) = detector_configs.get(&detector_name) {
if let Some(false) = config.enabled {
return false;
}
}
true
});
for finding in &mut findings {
let detector_name = crate::config::normalize_detector_name(&finding.detector);
if let Some(config) = detector_configs.get(&detector_name) {
if let Some(ref sev) = config.severity {
finding.severity = parse_severity(sev);
}
}
}
}
let all_findings_summary = FindingsSummary::from_findings(&findings);
let (overall_score, structure_score, quality_score, architecture_score) =
calculate_health_scores(&findings, files.len(), total_functions, total_classes, &project_config);
if let Some(min_severity) = &severity {
let min = parse_severity(min_severity);
findings.retain(|f| f.severity >= min);
}
findings.sort_by(|a, b| b.severity.cmp(&a.severity));
if let Some(n) = top {
findings.truncate(n);
}
let display_summary = FindingsSummary::from_findings(&findings);
let displayed_findings = findings.len();
let all_findings = findings.clone();
let (paginated_findings, pagination_info) = if per_page > 0 {
let total_pages = (displayed_findings + per_page - 1) / per_page;
let page = page.max(1).min(total_pages.max(1));
let start = (page - 1) * per_page;
let end = (start + per_page).min(displayed_findings);
let paginated: Vec<_> = findings[start..end].to_vec();
(
paginated,
Some((page, total_pages, per_page, displayed_findings)),
)
} else {
(findings, None)
};
let mut grade = HealthReport::grade_from_score(overall_score);
if all_findings_summary.critical > 0 {
if grade == "A" || grade == "B" {
grade = "C".to_string();
}
} else if all_findings_summary.high > 0 {
if grade == "A" {
grade = "B".to_string();
}
}
let report = HealthReport {
overall_score,
grade: grade.clone(),
structure_score,
quality_score,
architecture_score: Some(architecture_score),
findings: paginated_findings,
findings_summary: display_summary,
total_files: files.len(),
total_functions,
total_classes,
};
let output = reporters::report(&report, format)?;
let write_to_file = output_path.is_some()
|| matches!(format, "html" | "sarif" | "markdown" | "md");
if write_to_file {
let out_path = if let Some(p) = output_path {
p.to_path_buf()
} else {
let ext = match format {
"html" => "html",
"sarif" => "sarif.json",
"markdown" | "md" => "md",
"json" => "json",
_ => "txt",
};
repotoire_dir.join(format!("report.{}", ext))
};
std::fs::write(&out_path, &output)?;
println!(
"\n{}Report written to: {}",
style("📄 ").bold(),
style(out_path.display()).cyan()
);
} else {
println!();
println!("{}", output);
}
cache_results(&repotoire_dir, &report, &all_findings)?;
if let Some((current_page, total_pages, per_page, total)) = pagination_info {
println!(
"\n{}Showing page {} of {} ({} findings per page, {} total)",
style("📑 ").bold(),
style(current_page).cyan(),
style(total_pages).cyan(),
style(per_page).dim(),
style(total).cyan(),
);
if current_page < total_pages {
println!(
" Use {} to see more",
style(format!("--page {}", current_page + 1)).yellow()
);
}
}
let elapsed = start_time.elapsed();
let icon_done = if no_emoji { "" } else { "✨ " };
println!(
"\n{}Analysis complete in {:.2}s",
style(icon_done).bold(),
elapsed.as_secs_f64()
);
if let Some(ref threshold) = fail_on {
let should_fail = match threshold.to_lowercase().as_str() {
"critical" => report.findings_summary.critical > 0,
"high" => report.findings_summary.critical > 0 || report.findings_summary.high > 0,
"medium" => {
report.findings_summary.critical > 0
|| report.findings_summary.high > 0
|| report.findings_summary.medium > 0
}
"low" => {
report.findings_summary.critical > 0
|| report.findings_summary.high > 0
|| report.findings_summary.medium > 0
|| report.findings_summary.low > 0
}
_ => false,
};
if should_fail {
eprintln!("Failing due to --fail-on={} threshold", threshold);
std::process::exit(1);
}
}
Ok(())
}
fn collect_source_files(repo_path: &Path) -> Result<Vec<std::path::PathBuf>> {
let mut files = Vec::new();
let mut builder = WalkBuilder::new(repo_path);
builder
.hidden(true) .git_ignore(true) .git_global(true) .git_exclude(true) .require_git(false) .add_custom_ignore_filename(".repotoireignore");
let walker = builder.build();
for entry in walker.flatten() {
let path = entry.path();
if !path.is_file() {
continue;
}
if let Some(ext) = path.extension().and_then(|e| e.to_str()) {
if SUPPORTED_EXTENSIONS.contains(&ext) {
files.push(path.to_path_buf());
}
}
}
Ok(files)
}
fn detect_language(path: &Path) -> String {
let ext = path.extension().and_then(|e| e.to_str()).unwrap_or("");
match ext {
"py" | "pyi" => "Python",
"ts" | "tsx" => "TypeScript",
"js" | "jsx" | "mjs" => "JavaScript",
"rs" => "Rust",
"go" => "Go",
"java" => "Java",
"c" | "h" => "C",
"cpp" | "hpp" | "cc" => "C++",
"cs" => "C#",
"kt" | "kts" => "Kotlin",
"rb" => "Ruby",
"php" => "PHP",
"swift" => "Swift",
_ => "Unknown",
}
.to_string()
}
fn count_lines(path: &Path) -> Result<usize> {
let content = std::fs::read_to_string(path)?;
Ok(content.lines().count())
}
fn truncate_path(path: &Path, max_len: usize) -> String {
let s = path.display().to_string();
if s.len() <= max_len {
s
} else {
format!("...{}", &s[s.len() - max_len + 3..])
}
}
fn escape_cypher(s: &str) -> String {
s.replace('\\', "\\\\")
.replace('\'', "\\'")
.replace('"', "\\\"")
.replace('\n', "\\n")
.replace('\r', "\\r")
.replace('\t', "\\t")
}
fn parse_severity(s: &str) -> Severity {
match s.to_lowercase().as_str() {
"critical" => Severity::Critical,
"high" => Severity::High,
"medium" => Severity::Medium,
"low" => Severity::Low,
_ => Severity::Info,
}
}
fn calculate_health_scores(
findings: &[crate::models::Finding],
total_files: usize,
total_functions: usize,
_total_classes: usize,
project_config: &ProjectConfig,
) -> (f64, f64, f64, f64) {
let mut structure_score: f64 = 100.0;
let mut quality_score: f64 = 100.0;
let mut architecture_score: f64 = 100.0;
let size_factor = ((total_files + total_functions) as f64).sqrt().max(5.0);
let config_security_multiplier = project_config.scoring.security_multiplier;
for finding in findings {
let base_deduction: f64 = match finding.severity {
Severity::Critical => 10.0,
Severity::High => 5.0,
Severity::Medium => 1.5,
Severity::Low => 0.3,
Severity::Info => 0.0,
};
let scaled = base_deduction / size_factor;
let category = finding.category.as_deref().unwrap_or("");
let detector = finding.detector.to_lowercase();
let is_security = category.contains("security")
|| category.contains("inject")
|| detector.contains("sql")
|| detector.contains("xss")
|| detector.contains("secret")
|| detector.contains("credential")
|| detector.contains("command")
|| detector.contains("path_traversal")
|| detector.contains("ssrf")
|| finding.cwe_id.is_some();
let security_multiplier = if is_security { config_security_multiplier } else { 1.0 };
let effective_deduction = scaled * security_multiplier;
if is_security {
quality_score -= effective_deduction;
} else if category.contains("architect") || category.contains("bottleneck") || category.contains("circular") {
architecture_score -= effective_deduction;
} else if category.contains("complex") || category.contains("naming") || category.contains("readab") {
structure_score -= effective_deduction;
} else {
quality_score -= effective_deduction / 3.0;
structure_score -= effective_deduction / 3.0;
architecture_score -= effective_deduction / 3.0;
}
}
structure_score = structure_score.max(25.0_f64).min(100.0);
quality_score = quality_score.max(25.0_f64).min(100.0);
architecture_score = architecture_score.max(25.0_f64).min(100.0);
let weights = &project_config.scoring.pillar_weights;
let overall = structure_score * weights.structure
+ quality_score * weights.quality
+ architecture_score * weights.architecture;
let overall = overall.max(5.0);
(overall, structure_score, quality_score, architecture_score)
}
fn normalize_path(path: &Path) -> String {
let s = path.display().to_string();
if let Some(stripped) = s.strip_prefix("/tmp/") {
if let Some(pos) = stripped.find('/') {
return stripped[pos + 1..].to_string();
}
}
if let Ok(home) = std::env::var("HOME") {
if let Some(stripped) = s.strip_prefix(&home) {
return stripped.trim_start_matches('/').to_string();
}
}
s
}
fn cache_results(repotoire_dir: &Path, report: &HealthReport, all_findings: &[Finding]) -> Result<()> {
use std::fs;
let health_cache = repotoire_dir.join("last_health.json");
let health_json = serde_json::json!({
"health_score": report.overall_score,
"structure_score": report.structure_score,
"quality_score": report.quality_score,
"architecture_score": report.architecture_score,
"grade": report.grade,
"total_files": report.total_files,
"total_functions": report.total_functions,
"total_classes": report.total_classes,
});
fs::write(&health_cache, serde_json::to_string_pretty(&health_json)?)?;
let findings_cache = repotoire_dir.join("last_findings.json");
let findings_json = serde_json::json!({
"findings": all_findings.iter().map(|f| {
serde_json::json!({
"id": f.id,
"detector": f.detector,
"title": f.title,
"description": f.description,
"severity": f.severity.to_string(),
"affected_files": f.affected_files.iter().map(|p| normalize_path(p)).collect::<Vec<_>>(),
"line_start": f.line_start,
"line_end": f.line_end,
"suggested_fix": f.suggested_fix,
"category": f.category,
"cwe_id": f.cwe_id,
"why_it_matters": f.why_it_matters,
"confidence": f.confidence,
})
}).collect::<Vec<_>>()
});
fs::write(&findings_cache, serde_json::to_string_pretty(&findings_json)?)?;
tracing::debug!("Cached analysis results to {}", repotoire_dir.display());
Ok(())
}
fn normalize_detector_name_for_config(name: &str) -> String {
let mut result = String::new();
let chars: Vec<char> = name.chars().collect();
for (i, c) in chars.iter().enumerate() {
if c.is_uppercase() {
if i > 0 && !chars[i - 1].is_uppercase() {
result.push('-');
}
result.push(c.to_lowercase().next().unwrap());
} else if *c == '_' {
result.push('-');
} else {
result.push(*c);
}
}
result
.trim_end_matches("-detector")
.to_string()
}
fn get_changed_files_since(repo_path: &Path, since: &str) -> Result<Vec<PathBuf>> {
let output = Command::new("git")
.args(["diff", "--name-only", since, "HEAD"])
.current_dir(repo_path)
.output()
.with_context(|| format!("Failed to run git diff since '{}'", since))?;
if !output.status.success() {
let stderr = String::from_utf8_lossy(&output.stderr);
anyhow::bail!("git diff failed: {}", stderr.trim());
}
let stdout = String::from_utf8_lossy(&output.stdout);
let mut files: Vec<PathBuf> = stdout
.lines()
.filter(|l| !l.is_empty())
.map(|l| repo_path.join(l))
.filter(|p| p.exists()) .collect();
let untracked = Command::new("git")
.args(["ls-files", "--others", "--exclude-standard"])
.current_dir(repo_path)
.output();
if let Ok(out) = untracked {
if out.status.success() {
let new_files = String::from_utf8_lossy(&out.stdout);
for line in new_files.lines().filter(|l| !l.is_empty()) {
let path = repo_path.join(line);
if path.exists() && !files.contains(&path) {
files.push(path);
}
}
}
}
files.retain(|p| {
p.extension()
.and_then(|e| e.to_str())
.map(|ext| SUPPORTED_EXTENSIONS.contains(&ext))
.unwrap_or(false)
});
Ok(files)
}
fn get_uncommitted_files(repo_path: &Path) -> Result<Vec<PathBuf>> {
let output = Command::new("git")
.args(["diff", "--name-only", "HEAD"])
.current_dir(repo_path)
.output()
.with_context(|| "Failed to run git diff HEAD")?;
let stdout = String::from_utf8_lossy(&output.stdout);
let mut files: Vec<PathBuf> = stdout
.lines()
.filter(|l| !l.is_empty())
.map(|l| repo_path.join(l))
.filter(|p| p.exists())
.collect();
let staged = Command::new("git")
.args(["diff", "--name-only", "--cached"])
.current_dir(repo_path)
.output();
if let Ok(out) = staged {
let staged_files = String::from_utf8_lossy(&out.stdout);
for line in staged_files.lines().filter(|l| !l.is_empty()) {
let path = repo_path.join(line);
if path.exists() && !files.contains(&path) {
files.push(path);
}
}
}
let untracked = Command::new("git")
.args(["ls-files", "--others", "--exclude-standard"])
.current_dir(repo_path)
.output();
if let Ok(out) = untracked {
let new_files = String::from_utf8_lossy(&out.stdout);
for line in new_files.lines().filter(|l| !l.is_empty()) {
let path = repo_path.join(line);
if path.exists() && !files.contains(&path) {
files.push(path);
}
}
}
files.retain(|p| {
p.extension()
.and_then(|e| e.to_str())
.map(|ext| SUPPORTED_EXTENSIONS.contains(&ext))
.unwrap_or(false)
});
Ok(files)
}