pmat 3.11.0

PMAT - Zero-config AI context generation and code quality toolkit (CLI, MCP, HTTP)
impl IncrementalCoverageAnalyzer {
    fn load_cached_coverage(&self, file_id: &FileId) -> Result<Option<FileCoverage>> {
        let key = self.coverage_key(file_id);

        match self.coverage_cache.get(&key) {
            Some(data) => {
                let coverage: FileCoverage = bincode::deserialize(&data)?;
                Ok(Some(coverage))
            }
            None => Ok(None),
        }
    }

    fn store_coverage(&self, file_id: &FileId, coverage: &FileCoverage) -> Result<()> {
        let key = self.coverage_key(file_id);
        let data = bincode::serialize(coverage)?;
        self.coverage_cache.insert(key, data);
        Ok(())
    }

    fn coverage_key(&self, file_id: &FileId) -> Vec<u8> {
        let mut key = b"coverage:".to_vec();
        key.extend_from_slice(&file_id.hash);
        key
    }

    fn calculate_aggregate_coverage(
        &self,
        file_coverage: &HashMap<FileId, FileCoverage>,
    ) -> Result<AggregateCoverage> {
        let total_files = file_coverage.len();
        let covered_files = file_coverage
            .values()
            .filter(|c| c.line_coverage > 0.0)
            .count();

        let total_lines: usize = file_coverage.values().map(|c| c.total_lines).sum();

        let covered_lines: usize = file_coverage.values().map(|c| c.covered_lines.len()).sum();

        Ok(AggregateCoverage {
            line_percentage: if total_lines > 0 {
                covered_lines as f64 / total_lines as f64 * 100.0
            } else {
                0.0
            },
            branch_percentage: file_coverage
                .values()
                .map(|c| c.branch_coverage)
                .sum::<f64>()
                / total_files as f64,
            function_percentage: file_coverage
                .values()
                .map(|c| c.function_coverage)
                .sum::<f64>()
                / total_files as f64,
            total_files,
            covered_files,
        })
    }

    fn calculate_delta_coverage(
        &self,
        changeset: &ChangeSet,
        file_coverage: &HashMap<FileId, FileCoverage>,
    ) -> Result<DeltaCoverage> {
        let mut new_lines_total = 0;
        let mut new_lines_covered = 0;

        for file_id in &changeset.modified_files {
            if let Some(coverage) = file_coverage.get(file_id) {
                // In production, would diff to find actual new lines
                // For now, assume 10% of lines are new
                let new_lines = coverage.total_lines / 10;
                new_lines_total += new_lines;
                new_lines_covered += (new_lines as f64 * coverage.line_coverage / 100.0) as usize;
            }
        }

        Ok(DeltaCoverage {
            new_lines_covered,
            new_lines_total,
            percentage: if new_lines_total > 0 {
                new_lines_covered as f64 / new_lines_total as f64 * 100.0
            } else {
                100.0
            },
        })
    }
}

impl Clone for IncrementalCoverageAnalyzer {
    fn clone(&self) -> Self {
        Self {
            coverage_cache: self.coverage_cache.clone(),
            ast_cache: self.ast_cache.clone(),
            call_graph: self.call_graph.clone(),
            semaphore: self.semaphore.clone(),
        }
    }
}

impl CallGraph {
    fn new() -> Self {
        Self {
            edges: DashMap::new(),
            reverse_edges: DashMap::new(),
        }
    }

    fn get_dependents(&self, module: &str) -> Vec<String> {
        self.reverse_edges
            .get(module)
            .map(|deps| deps.iter().cloned().collect())
            .unwrap_or_default()
    }
}