#![cfg_attr(coverage_nightly, coverage(off))]
use crate::cli::colors as c;
use anyhow::{Context, Result};
use serde::{Deserialize, Serialize};
use std::path::PathBuf;
fn run_changed_module_tests(project_path: &PathBuf) -> Result<bool> {
use std::process::Command;
println!(" {}", c::dim("Running tests..."));
let modules =
crate::services::git_test_filter::extract_test_modules_from_changed_files(project_path)?;
if modules.is_empty() {
println!(" {}", c::skip("No Rust files changed, skipping tests"));
return Ok(true);
}
let module_list = modules.join(", ");
let display = if module_list.len() > 60 {
format!("{}...", module_list.get(..60).unwrap_or(&module_list))
} else {
module_list
};
println!(
" {} {}",
c::label("Testing changed modules:"),
c::path(&display)
);
let test_cmd = crate::services::git_test_filter::build_test_command(&modules)
.unwrap_or_else(|| vec!["test".into(), "--lib".into(), "--quiet".into()]);
let status = Command::new("cargo")
.args(&test_cmd)
.arg("--quiet")
.current_dir(project_path)
.status()
.context("Failed to run cargo test")?;
if status.success() {
println!(" {}", c::pass("Tests passed"));
Ok(true)
} else {
println!(" {}", c::fail("Tests failed"));
Ok(false)
}
}
fn run_rust_project_checks(project_path: &PathBuf) -> Result<bool> {
use std::process::Command;
if !project_path.join("Cargo.toml").exists() {
return Ok(true);
}
println!(" {}", c::dim("Rust project detected..."));
let mut passed = true;
let examples_dir = project_path.join("examples");
if examples_dir.exists() && examples_dir.is_dir() {
println!(" {}", c::dim("Checking examples..."));
let status = Command::new("cargo")
.args(["test", "--examples", "--no-run"])
.current_dir(project_path)
.status()
.context("Failed to run cargo test --examples")?;
if status.success() {
println!(" {}", c::pass("Examples compile"));
} else {
println!(" {}", c::fail("Examples failed to compile"));
passed = false;
}
}
println!(" {}", c::dim("Capturing rust-project-score..."));
if let Ok(output) = Command::new("pmat")
.args(["rust-project-score", "--format", "json"])
.current_dir(project_path)
.output()
{
if output.status.success() {
if let Ok(json) = serde_json::from_slice::<serde_json::Value>(&output.stdout) {
if let Some(score) = json.get("total_earned").and_then(|v| v.as_f64()) {
println!(
" {}",
c::pass(&format!(
"Rust Project Score: {}",
c::score(score, 134.0, 80.0, 60.0)
))
);
}
}
} else {
println!(
" {}",
c::warn("Failed to capture rust-project-score (continuing)")
);
}
}
Ok(passed)
}
fn run_golden_trace_validation(project_path: &PathBuf) -> Result<bool> {
use std::process::Command;
if !project_path.join("renacer.toml").exists() {
return Ok(true);
}
let baseline_dir = project_path.join("golden_traces").join("baseline");
if !baseline_dir.exists() {
println!(" {}", c::skip("Golden traces config found, no baseline yet (run: renacer validate --generate golden_traces/baseline -- ./target/release/pmat --help)"));
return Ok(true);
}
println!(" {}", c::dim("Golden traces detected..."));
match Command::new("renacer")
.args([
"validate",
"--baseline",
baseline_dir.to_str().unwrap_or("golden_traces/baseline"),
"--ignore-timing",
"--",
"./target/release/pmat",
"--help",
])
.current_dir(project_path)
.status()
{
Ok(status) if status.success() => {
println!(" {}", c::pass("Golden traces match"));
Ok(true)
}
Ok(status) if status.code() == Some(2) => {
println!(" {}", c::skip("No golden baseline yet"));
Ok(true)
}
Ok(_) => {
println!(" {}", c::fail("Golden traces diverged"));
Ok(false)
}
Err(_) => {
println!(
" {}",
c::warn("renacer not installed (skipping golden trace validation)")
);
Ok(true)
}
}
}
fn run_clippy_check(project_path: &PathBuf) -> Result<bool> {
use std::process::Command;
println!(" {}", c::dim("Running clippy..."));
let status = Command::new("cargo")
.args(["clippy", "--lib", "--quiet", "--", "-D", "warnings"])
.current_dir(project_path)
.status()
.context("Failed to run cargo clippy")?;
if status.success() {
println!(" {}", c::pass("No clippy warnings"));
Ok(true)
} else {
println!(" {}", c::fail("Clippy warnings found"));
Ok(false)
}
}
#[provable_contracts_macros::contract("pmat-core.yaml", equation = "path_exists")]
pub async fn run_quality_gates(project_path: &PathBuf) -> Result<bool> {
let tests_ok = run_changed_module_tests(project_path)?;
let rust_ok = run_rust_project_checks(project_path)?;
let traces_ok = run_golden_trace_validation(project_path)?;
let clippy_ok = run_clippy_check(project_path)?;
refresh_agent_context_index(project_path);
println!();
Ok(tests_ok && rust_ok && traces_ok && clippy_ok)
}
fn refresh_agent_context_index(project_path: &PathBuf) {
use crate::services::agent_context::AgentContextIndex;
let index_path = project_path.join(".pmat/context.idx");
match AgentContextIndex::build(project_path) {
Ok(index) => {
if let Err(e) = index.save(&index_path) {
eprintln!(
" {}",
c::warn(&format!("Agent context index save failed: {}", e))
);
} else {
let m = index.manifest();
println!(
" {} {} functions in {} files",
c::pass("Agent context index refreshed:"),
c::number(&format!("{}", m.function_count)),
c::number(&format!("{}", m.file_count))
);
}
}
Err(e) => {
eprintln!(
" {}",
c::warn(&format!("Agent context index build failed: {}", e))
);
}
}
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct FalsificationResult {
pub tests_passed: bool,
pub coverage_maintained: bool,
pub coverage_before: Option<f32>,
pub coverage_after: Option<f32>,
pub binary_size_ok: bool,
pub passed: bool,
pub summary: String,
}
impl Default for FalsificationResult {
fn default() -> Self {
Self {
tests_passed: false,
coverage_maintained: false,
coverage_before: None,
coverage_after: None,
binary_size_ok: true,
passed: false,
summary: String::new(),
}
}
}
fn falsify_test_regression(
project_path: &PathBuf,
step: usize,
total: usize,
) -> Result<(bool, Vec<String>)> {
use std::process::Command;
println!(
" {} Hypothesis: No regressions introduced",
c::label(&format!("[{}/{}]", step, total))
);
println!(" {}", c::dim("Falsification: Running tests..."));
let status = Command::new("cargo")
.args(["test", "--lib", "--quiet"])
.current_dir(project_path)
.status()
.context("Failed to run cargo test")?;
if status.success() {
println!(
" {}",
c::pass(&format!("Hypothesis holds ({}/{} validated)", step, total))
);
Ok((true, vec![]))
} else {
println!(" {}", c::fail("Hypothesis falsified: Tests fail"));
Ok((false, vec!["Tests failed - regressions detected".into()]))
}
}
fn falsify_coverage_regression(
project_path: &PathBuf,
result: &mut FalsificationResult,
step: usize,
total: usize,
) -> (bool, Vec<String>) {
println!();
println!(
" {} Hypothesis: Coverage maintained or improved",
c::label(&format!("[{}/{}]", step, total))
);
println!(
" {}",
c::dim("Falsification: Checking coverage trends...")
);
let trend_file = project_path.join(".pmat-metrics/trends/test-coverage.json");
let coverage = parse_coverage_trend(&trend_file);
match coverage {
Some((previous, current)) => {
result.coverage_before = Some(previous);
result.coverage_after = Some(current);
if current >= previous {
result.coverage_maintained = true;
let delta = current - previous;
let msg = if delta > 0.0 {
format!("+{:.2}%", delta)
} else {
format!("at {:.2}%", current)
};
println!(
" {}",
c::pass(&format!(
"Hypothesis holds: Coverage {} ({}/{} validated)",
msg, step, total
))
);
(true, vec![])
} else {
let delta = previous - current;
println!(
" {}",
c::fail(&format!("Hypothesis falsified: Coverage -{:.2}%", delta))
);
(false, vec![format!("Coverage dropped by {:.2}%", delta)])
}
}
None => {
result.coverage_maintained = true;
println!(
" {}",
c::warn(&format!(
"No coverage history ({}/{} validated)",
step, total
))
);
(true, vec![])
}
}
}
fn parse_coverage_trend(path: &std::path::Path) -> Option<(f32, f32)> {
let content = std::fs::read_to_string(path).ok()?;
let json: serde_json::Value = serde_json::from_str(&content).ok()?;
let entries = json.as_array()?;
if entries.len() < 2 {
return None;
}
let current = entries.last()?.get("value")?.as_f64()? as f32;
let previous = entries.get(entries.len() - 2)?.get("value")?.as_f64()? as f32;
Some((previous, current))
}
fn falsify_binary_bloat(project_path: &PathBuf, step: usize, total: usize) -> (bool, Vec<String>) {
println!();
println!(
" {} Hypothesis: No dependency bloat",
c::label(&format!("[{}/{}]", step, total))
);
let release_binary = project_path.join("target/release/pmat");
if !release_binary.exists() {
println!(
" {}",
c::warn(&format!("No release binary ({}/{} validated)", step, total))
);
return (true, vec![]);
}
if let Ok(metadata) = std::fs::metadata(&release_binary) {
let size_mb = metadata.len() as f64 / (1024.0 * 1024.0);
if size_mb <= 50.0 {
println!(
" {}",
c::pass(&format!(
"Hypothesis holds: {}MB < 50MB ({}/{} validated)",
c::number(&format!("{:.1}", size_mb)),
step,
total
))
);
(true, vec![])
} else {
println!(
" {}",
c::fail(&format!(
"Hypothesis falsified: {}MB > 50MB limit",
c::number(&format!("{:.1}", size_mb))
))
);
(
false,
vec![format!("Binary size {:.1}MB exceeds 50MB limit", size_mb)],
)
}
} else {
(true, vec![])
}
}
#[provable_contracts_macros::contract("pmat-core.yaml", equation = "path_exists")]
pub async fn run_popper_falsification(project_path: &PathBuf) -> Result<FalsificationResult> {
let mut result = FalsificationResult::default();
let total = 3;
println!();
println!(
"{} (0/{} complete)",
c::header("Karl Popper Falsification Validation"),
total
);
println!(
" {}",
c::dim("(Scientific method: attempting to falsify your work)")
);
println!();
let (tests_ok, test_issues) = falsify_test_regression(project_path, 1, total)?;
result.tests_passed = tests_ok;
let (cov_ok, cov_issues) = falsify_coverage_regression(project_path, &mut result, 2, total);
let (size_ok, size_issues) = falsify_binary_bloat(project_path, 3, total);
result.binary_size_ok = size_ok;
result.passed = tests_ok && cov_ok && size_ok;
let validated = [tests_ok, cov_ok, size_ok].iter().filter(|v| **v).count();
let all_issues: Vec<String> = [test_issues, cov_issues, size_issues].concat();
println!();
if result.passed {
result.summary = format!(
"{}/{} hypotheses validated - work is valid",
validated, total
);
println!(
" {}",
c::pass(&format!(
"FALSIFICATION RESULT: PASSED ({}/{})",
validated, total
))
);
} else {
result.summary = format!(
"{}/{} validated, {} falsified: {}",
validated,
total,
total - validated,
all_issues.join(", ")
);
println!(
" {}",
c::fail(&format!(
"FALSIFICATION RESULT: FAILED ({}/{} validated)",
validated, total
))
);
for issue in &all_issues {
println!(" - {}", c::fail(issue));
}
}
println!();
Ok(result)
}