pmat 3.14.0

PMAT - Zero-config AI context generation and code quality toolkit (CLI, MCP, HTTP)
/// Print validation result as text
#[provable_contracts_macros::contract("pmat-core.yaml", equation = "check_compliance")]
pub fn print_validation_text(result: &QaValidationResult) {
    use crate::cli::colors as c;
    println!("Validating {}...\n", c::label(&result.task_id));

    for category in result.categories.values() {
        let status = if category.passed == category.total {
            c::pass("")
        } else if category.passed > 0 {
            c::warn("")
        } else {
            c::fail("")
        };

        println!(
            "{} {} ({}/{})",
            status, c::subheader(&category.name), category.passed, category.total
        );

        for item in &category.items {
            let item_status = match item.status {
                ValidationStatus::Passed => format!("  {}{}", c::GREEN, c::RESET),
                ValidationStatus::Failed => format!("  {}{}", c::RED, c::RESET),
                ValidationStatus::Warning => format!("  {}{}", c::YELLOW, c::RESET),
                ValidationStatus::Skipped => format!("  {}-{}", c::DIM, c::RESET),
                ValidationStatus::Manual => format!("  {}?{}", c::BLUE, c::RESET),
            };
            println!("{}  {}: {}", item_status, item.id, item.description);
        }
        println!();
    }

    println!("{}: {:.1}%", c::label("Overall Score"), result.overall_score);
    println!();

    if !result.manual_checks_required.is_empty() {
        println!("{}:", c::warn("Manual Checks Required"));
        for check in &result.manual_checks_required {
            println!("  - {}", check);
        }
        println!();
    }

    if result.passed {
        println!("{}", c::pass("QA Validation PASSED"));
    } else {
        println!("{}", c::fail("QA Validation FAILED"));
    }
}

/// Print validation result as markdown
#[provable_contracts_macros::contract("pmat-core.yaml", equation = "check_compliance")]
pub fn print_validation_markdown(result: &QaValidationResult) {
    println!("# QA Validation Report: {}\n", result.task_id);
    println!(
        "**Date**: {}",
        result.timestamp.format("%Y-%m-%d %H:%M:%S UTC")
    );
    println!("**Score**: {:.1}%\n", result.overall_score);

    for category in result.categories.values() {
        println!(
            "## {} ({}/{})\n",
            category.name, category.passed, category.total
        );

        for item in &category.items {
            let checkbox = match item.status {
                ValidationStatus::Passed => "[x]",
                ValidationStatus::Failed => "[ ] **FAILED**",
                ValidationStatus::Warning => "[ ] *warning*",
                ValidationStatus::Skipped => "[ ] *(skipped)*",
                ValidationStatus::Manual => "[ ] *(manual)*",
            };
            println!("- {} {}: {}", checkbox, item.id, item.description);
        }
        println!();
    }

    if !result.manual_checks_required.is_empty() {
        println!("## Manual Checks Required\n");
        for check in &result.manual_checks_required {
            println!("- [ ] {}", check);
        }
    }
}

/// Generate QA report for audit trail
#[provable_contracts_macros::contract("pmat-core.yaml", equation = "path_exists")]
pub async fn handle_report(
    task_id: &str,
    project_path: &Path,
    with_evidence: bool,
    output: Option<&Path>,
    format: QaOutputFormat,
) -> anyhow::Result<()> {
    use crate::cli::colors as c;
    println!("{} {}", c::label("Generating QA report for task:"), task_id);

    // First run validation
    let mut result = QaValidationResult {
        task_id: task_id.to_string(),
        timestamp: Utc::now(),
        categories: HashMap::new(),
        overall_score: 0.0,
        passed: true,
        manual_checks_required: vec![],
    };

    result.categories.insert(
        "code_quality".into(),
        run_code_quality_checks(project_path).await,
    );
    result
        .categories
        .insert("testing".into(), run_testing_checks(project_path).await);
    result.categories.insert(
        "documentation".into(),
        run_documentation_checks(project_path, task_id).await,
    );
    result.categories.insert(
        "process".into(),
        run_process_checks(project_path, task_id).await,
    );

    // Calculate score
    let (total_passed, total_items) = result
        .categories
        .values()
        .fold((0, 0), |(p, t), cat| (p + cat.passed, t + cat.total));
    result.overall_score = if total_items > 0 {
        (total_passed as f64 / total_items as f64) * 100.0
    } else {
        0.0
    };

    // Generate report content
    let report = match format {
        QaOutputFormat::Json => serde_json::to_string_pretty(&result)?,
        QaOutputFormat::Yaml => serde_yaml_ng::to_string(&result)?,
        QaOutputFormat::Markdown | QaOutputFormat::Text => {
            let mut md = String::new();
            md.push_str(&format!("# QA Report: {}\n\n", task_id));
            md.push_str("## Summary\n\n");
            md.push_str(&format!("- **Task**: {}\n", task_id));
            md.push_str(&format!(
                "- **Status**: {}\n",
                if result.passed {
                    "PASSED"
                } else {
                    "NEEDS ATTENTION"
                }
            ));
            md.push_str(&format!("- **Score**: {:.1}%\n", result.overall_score));
            md.push_str(&format!(
                "- **Date**: {}\n\n",
                result.timestamp.format("%Y-%m-%d")
            ));

            md.push_str("## Checklist Results\n\n");
            for category in result.categories.values() {
                md.push_str(&format!(
                    "### {} ({}/{})\n\n",
                    category.name, category.passed, category.total
                ));
                for item in &category.items {
                    let status_icon = match item.status {
                        ValidationStatus::Passed => "",
                        ValidationStatus::Failed => "",
                        ValidationStatus::Warning => "⚠️",
                        ValidationStatus::Skipped => "⏭️",
                        ValidationStatus::Manual => "📝",
                    };
                    md.push_str(&format!(
                        "- {} **{}**: {}\n",
                        status_icon, item.id, item.description
                    ));
                }
                md.push('\n');
            }

            if with_evidence {
                md.push_str("## Evidence\n\n");
                md.push_str("- Coverage Report: `target/llvm-cov/html/index.html`\n");
                md.push_str("- Test Results: See CI/CD logs\n");
                md.push_str("- Complexity: Run `pmat analyze complexity`\n");
            }

            md
        }
    };

    // Output
    if let Some(output_path) = output {
        fs::write(output_path, &report)?;
        println!("{} Report saved to: {}", c::pass(""), c::path(&output_path.display().to_string()));
    } else {
        println!("{}", report);
    }

    Ok(())
}