pmat 3.11.0

PMAT - Zero-config AI context generation and code quality toolkit (CLI, MCP, HTTP)
#![cfg_attr(coverage_nightly, coverage(off))]
// Score aggregation and recommendation generation

use crate::services::repo_score::error::Result;
use crate::services::repo_score::models::*;
use crate::services::repo_score::scorers::*;
use std::path::Path;
use std::time::Instant;

pub struct ScoreAggregator;

impl ScoreAggregator {
    pub fn new() -> Self {
        Self
    }

    /// Aggregate all scores for a repository
    pub async fn aggregate(&self, repo_path: &Path, config: &ScorerConfig) -> Result<RepoScore> {
        tracing::debug!("ScoreAggregator::aggregate START");
        let start = Instant::now();

        // Run all scorers
        let readme_scorer = ReadmeScorer::new();
        let precommit_scorer = PrecommitScorer::new();
        let hygiene_scorer = HygieneScorer::new();
        let makefile_scorer = MakefileScorer::new();
        let ci_scorer = CiScorer::new();
        let pmat_scorer = PmatScorer::new();

        tracing::debug!("Starting readme_scorer");
        let documentation = readme_scorer.score(repo_path, config).await?;
        tracing::debug!("Finished readme_scorer");

        tracing::debug!("Starting precommit_scorer");
        let precommit_hooks = precommit_scorer.score(repo_path, config).await?;
        tracing::debug!("Finished precommit_scorer");

        tracing::debug!("Starting hygiene_scorer");
        let repository_hygiene = hygiene_scorer.score(repo_path, config).await?;
        tracing::debug!("Finished hygiene_scorer");

        tracing::debug!("Starting makefile_scorer");
        let build_test_automation = makefile_scorer.score(repo_path, config).await?;
        tracing::debug!("Finished makefile_scorer");

        tracing::debug!("Starting ci_scorer");
        let continuous_integration = ci_scorer.score(repo_path, config).await?;
        tracing::debug!("Finished ci_scorer");

        tracing::debug!("Starting pmat_scorer");
        let pmat_compliance = pmat_scorer.score(repo_path, config).await?;
        tracing::debug!("Finished pmat_scorer");

        let categories = CategoryScores {
            documentation,
            precommit_hooks,
            repository_hygiene,
            build_test_automation,
            continuous_integration,
            pmat_compliance,
        };

        // Calculate final score (0-100)
        let total_score = categories.total();
        let grade = Grade::from_score(total_score);

        // Generate recommendations
        let recommendations = self.generate_recommendations(&categories);

        // Create metadata
        let mut metadata = ScoreMetadata::new(repo_path.to_path_buf());
        metadata.execution_time_ms = start.elapsed().as_millis() as u64;

        // Try to get git context
        if let Ok(git_branch) = self.get_git_branch(repo_path) {
            metadata.git_branch = Some(git_branch);
        }
        if let Ok(git_commit) = self.get_git_commit(repo_path) {
            metadata.git_commit = Some(git_commit);
        }

        Ok(RepoScore {
            total_score,
            grade,
            categories,
            recommendations,
            metadata,
        })
    }

    /// Generate recommendations based on findings
    fn generate_recommendations(&self, categories: &CategoryScores) -> Vec<Recommendation> {
        let mut recommendations = vec![];

        // Check each category for failures
        if categories.documentation.status == ScoreStatus::Fail {
            recommendations.push(Recommendation {
                priority: Priority::Critical,
                category: "Documentation".to_string(),
                title: "Add comprehensive README.md".to_string(),
                description: "Your repository is missing a complete README.md with required sections (Overview, Installation, Usage, License, Contributing).".to_string(),
                impact_points: 15.0 - categories.documentation.score,
                estimated_effort: "30 minutes".to_string(),
                commands: vec![
                    "# Create README.md with all required sections".to_string(),
                    "touch README.md".to_string(),
                ],
            });
        }

        if categories.precommit_hooks.status == ScoreStatus::Fail {
            recommendations.push(Recommendation {
                priority: Priority::High,
                category: "Pre-commit Hooks".to_string(),
                title: "Install pre-commit hooks".to_string(),
                description: "Add a pre-commit hook to run linting before commits.".to_string(),
                impact_points: 20.0 - categories.precommit_hooks.score,
                estimated_effort: "15 minutes".to_string(),
                commands: vec![
                    "mkdir -p .git/hooks".to_string(),
                    "cat > .git/hooks/pre-commit << 'EOF'\n#!/bin/bash\ncargo clippy -- -D warnings\nEOF".to_string(),
                    "chmod +x .git/hooks/pre-commit".to_string(),
                ],
            });
        }

        if (categories.repository_hygiene.status == ScoreStatus::Fail
            || categories.repository_hygiene.status == ScoreStatus::Warning)
            && categories.repository_hygiene.score < 15.0
        {
            recommendations.push(Recommendation {
                    priority: Priority::Medium,
                    category: "Repository Hygiene".to_string(),
                    title: "Clean up repository files".to_string(),
                    description: "Remove cruft files (.tmp, .bak) and team-specific files (.idea/, .vscode/). Add them to .gitignore.".to_string(),
                    impact_points: 15.0 - categories.repository_hygiene.score,
                    estimated_effort: "10 minutes".to_string(),
                    commands: vec![
                        "# Remove temporary files".to_string(),
                        "find . -name '*.tmp' -delete".to_string(),
                        "find . -name '*.bak' -delete".to_string(),
                        "# Add to .gitignore".to_string(),
                        "echo '.idea/' >> .gitignore".to_string(),
                        "echo '.vscode/' >> .gitignore".to_string(),
                    ],
                });
        }

        if categories.build_test_automation.status == ScoreStatus::Fail {
            recommendations.push(Recommendation {
                priority: Priority::Critical,
                category: "Build & Test".to_string(),
                title: "Create Makefile with required targets".to_string(),
                description: "Add a Makefile with targets: test-fast, test, lint, coverage".to_string(),
                impact_points: 25.0 - categories.build_test_automation.score,
                estimated_effort: "1 hour".to_string(),
                commands: vec![
                    "# Create Makefile".to_string(),
                    "cat > Makefile << 'EOF'\n.PHONY: test-fast test lint coverage\n\ntest-fast:\n\tcargo test --lib\n\ntest:\n\tcargo test\n\nlint:\n\tcargo clippy -- -D warnings\n\ncoverage:\n\tcargo llvm-cov --html\nEOF".to_string(),
                ],
            });
        }

        if categories.continuous_integration.status == ScoreStatus::Fail {
            recommendations.push(Recommendation {
                priority: Priority::High,
                category: "CI/CD".to_string(),
                title: "Add GitHub Actions workflow".to_string(),
                description: "Create a CI workflow to run tests and linting on every push"
                    .to_string(),
                impact_points: 20.0 - categories.continuous_integration.score,
                estimated_effort: "30 minutes".to_string(),
                commands: vec![
                    "mkdir -p .github/workflows".to_string(),
                    "# Create ci.yml workflow file".to_string(),
                ],
            });
        }

        if categories.pmat_compliance.status == ScoreStatus::Fail {
            recommendations.push(Recommendation {
                priority: Priority::Medium,
                category: "PMAT Compliance".to_string(),
                title: "Add PMAT quality gates configuration".to_string(),
                description: "Create .pmat-gates.toml with quality thresholds".to_string(),
                impact_points: 5.0 - categories.pmat_compliance.score,
                estimated_effort: "15 minutes".to_string(),
                commands: vec![
                    "cat > .pmat-gates.toml << 'EOF'\n[complexity]\nmax_complexity = 10\n\n[coverage]\nminimum_coverage = 80.0\nEOF".to_string(),
                ],
            });
        }

        // Sort by priority (Critical > High > Medium > Low)
        recommendations.sort_by(|a, b| b.priority.cmp(&a.priority));

        recommendations
    }

    fn get_git_branch(&self, repo_path: &Path) -> Result<String> {
        let git_head = repo_path.join(".git/HEAD");
        if git_head.exists() {
            let content = std::fs::read_to_string(git_head)?;
            if let Some(branch) = content.strip_prefix("ref: refs/heads/") {
                return Ok(branch.trim().to_string());
            }
        }
        Ok("unknown".to_string())
    }

    fn get_git_commit(&self, repo_path: &Path) -> Result<String> {
        let git_head = repo_path.join(".git/HEAD");
        if git_head.exists() {
            let content = std::fs::read_to_string(git_head)?;
            if content.starts_with("ref:") {
                // Read the ref file
                if let Some(ref_path) = content.strip_prefix("ref: ") {
                    let ref_file = repo_path.join(".git").join(ref_path.trim());
                    if ref_file.exists() {
                        let commit = std::fs::read_to_string(ref_file)?;
                        return Ok(commit.trim().to_string());
                    }
                }
            } else {
                // Direct commit hash
                return Ok(content.trim().to_string());
            }
        }
        Ok("unknown".to_string())
    }
}

impl Default for ScoreAggregator {
    fn default() -> Self {
        Self::new()
    }
}

#[cfg_attr(coverage_nightly, coverage(off))]
#[cfg(test)]
mod tests {
    use super::*;
    use std::fs;
    use tempfile::TempDir;

    fn create_temp_repo() -> TempDir {
        TempDir::new().unwrap()
    }

    fn create_file(repo_path: &Path, relative_path: &str, content: &str) {
        let file_path = repo_path.join(relative_path);
        if let Some(parent) = file_path.parent() {
            fs::create_dir_all(parent).unwrap();
        }
        fs::write(file_path, content).unwrap();
    }

    #[tokio::test]
    async fn test_aggregator_empty_repo() {
        let temp_dir = create_temp_repo();
        let repo_path = temp_dir.path();

        let aggregator = ScoreAggregator::new();
        let config = ScorerConfig::default();

        let result = aggregator.aggregate(repo_path, &config).await.unwrap();

        // Empty repo should score very low
        assert!(result.total_score < 20.0);
        assert_eq!(result.grade, Grade::F);
        assert!(!result.recommendations.is_empty());
    }

    #[tokio::test]
    async fn test_aggregator_perfect_repo() {
        let temp_dir = create_temp_repo();
        let repo_path = temp_dir.path();

        // Create perfect repository structure
        create_file(
            repo_path,
            "README.md",
            "# Project\n## Overview\n## Installation\n## Usage\n## License\n## Contributing",
        );
        create_file(
            repo_path,
            ".git/hooks/pre-commit",
            "#!/bin/bash\ncargo clippy",
        );
        create_file(repo_path, "Makefile", ".PHONY: test-fast test lint coverage\ntest-fast:\n\tcargo test\ntest:\n\tcargo test\nlint:\n\tcargo clippy\ncoverage:\n\tcargo llvm-cov");
        create_file(repo_path, ".github/workflows/ci.yml", "name: CI\non: push\njobs:\n  test:\n    runs-on: ubuntu-latest\n    steps:\n      - run: cargo test");
        create_file(
            repo_path,
            ".pmat-gates.toml",
            "[complexity]\nmax_complexity = 10",
        );
        create_file(
            repo_path,
            "Cargo.toml",
            "[dependencies]\nproptest = \"1.0\"",
        );
        create_file(repo_path, "book.toml", "[book]");
        fs::create_dir_all(repo_path.join("fuzz")).unwrap();
        create_file(repo_path, "mutants.toml", "[mutants]");

        #[cfg(unix)]
        {
            use std::os::unix::fs::PermissionsExt;
            let hook_path = repo_path.join(".git/hooks/pre-commit");
            let mut perms = fs::metadata(&hook_path).unwrap().permissions();
            perms.set_mode(0o755);
            fs::set_permissions(&hook_path, perms).unwrap();
        }

        let aggregator = ScoreAggregator::new();
        let config = ScorerConfig::default();

        let result = aggregator.aggregate(repo_path, &config).await.unwrap();

        // Should score very high (100 points max)
        assert!(result.total_score >= 80.0);
    }

    #[tokio::test]
    async fn test_aggregator_grade_assignment() {
        let temp_dir = create_temp_repo();
        let repo_path = temp_dir.path();

        // Minimal repo for F grade
        let aggregator = ScoreAggregator::new();
        let config = ScorerConfig::default();

        let result = aggregator.aggregate(repo_path, &config).await.unwrap();

        // Should be F grade
        assert_eq!(result.grade, Grade::F);
    }

    #[tokio::test]
    async fn test_aggregator_recommendations_generated() {
        let temp_dir = create_temp_repo();
        let repo_path = temp_dir.path();

        let aggregator = ScoreAggregator::new();
        let config = ScorerConfig::default();

        let result = aggregator.aggregate(repo_path, &config).await.unwrap();

        // Should have recommendations for missing components
        assert!(!result.recommendations.is_empty());
        assert!(result
            .recommendations
            .iter()
            .any(|r| r.category.contains("Documentation")));
    }

    #[tokio::test]
    async fn test_aggregator_metadata_populated() {
        let temp_dir = create_temp_repo();
        let repo_path = temp_dir.path();

        let aggregator = ScoreAggregator::new();
        let config = ScorerConfig::default();

        let result = aggregator.aggregate(repo_path, &config).await.unwrap();

        // Metadata should be populated
        assert_eq!(result.metadata.repository_path, repo_path);
        assert_eq!(result.metadata.spec_version, "1.0.0");
    }

    #[tokio::test]
    async fn test_aggregator_recommendation_priority() {
        let temp_dir = create_temp_repo();
        let repo_path = temp_dir.path();

        let aggregator = ScoreAggregator::new();
        let config = ScorerConfig::default();

        let result = aggregator.aggregate(repo_path, &config).await.unwrap();

        // Recommendations should be sorted by priority (Critical first)
        if result.recommendations.len() > 1 {
            assert!(result.recommendations[0].priority >= result.recommendations[1].priority);
        }
    }

    #[tokio::test]
    async fn test_aggregator_score_calculation() {
        let temp_dir = create_temp_repo();
        let repo_path = temp_dir.path();

        let aggregator = ScoreAggregator::new();
        let config = ScorerConfig::default();

        let result = aggregator.aggregate(repo_path, &config).await.unwrap();

        // Verify score calculation
        let calculated_total = result.categories.total();
        assert_eq!(result.total_score, calculated_total);
    }
}