heal-cli 0.2.1

Hook-driven Evaluation & Autonomous Loop — code-health harness CLI for AI coding agents
Documentation
//! Package observer reports into a `MetricsSnapshot` for the commit hook
//! to persist. Pure glue over `crate::observers`.

use std::collections::BTreeSet;
use std::path::Path;

use crate::core::calibration::Calibration;
use crate::core::config::{Config, MetricsConfig};
use crate::core::eventlog::EventLog;
use crate::core::finding::Finding;
use crate::core::snapshot::{
    ChangeCouplingDelta, ChurnDelta, ComplexityDelta, DuplicationDelta, HotspotDelta,
    MetricsSnapshot, SnapshotDelta, METRICS_SNAPSHOT_VERSION,
};
use crate::core::HealPaths;
use crate::observer::change_coupling::ChangeCouplingReport;
use crate::observer::churn::ChurnReport;
use crate::observer::complexity::{ComplexityMetric, ComplexityReport};
use crate::observer::duplication::DuplicationReport;
use crate::observer::hotspot::HotspotReport;

use crate::observers::{classify, tally_severity, ObserverReports};

/// `pack` plus a best-effort delta against the prior snapshot. The
/// caller runs the observers, both to populate the snapshot and (in
/// `heal hook commit`) to classify the same `reports` for the
/// post-commit nudge — passing them through here keeps a single pass.
pub(crate) fn pack_with_delta(
    project: &Path,
    paths: &HealPaths,
    cfg: &Config,
    reports: &ObserverReports,
    findings: &[Finding],
) -> MetricsSnapshot {
    let mut snap = pack(project, paths, cfg, reports, findings);
    let log = EventLog::new(paths.snapshots_dir());
    if let Ok(Some((prev_event, prev_metrics))) = MetricsSnapshot::latest_in(&log) {
        let delta = compute_delta(prev_event.timestamp, &prev_metrics, reports, &cfg.metrics);
        snap.delta =
            Some(serde_json::to_value(&delta).expect("SnapshotDelta serialization is infallible"));
    }
    snap
}

/// Classify `reports` once against `paths.calibration()`, returning
/// both the Findings (for the snapshot's `severity_counts`, the
/// post-commit nudge, and `heal check`'s renderer) and the loaded
/// calibration. Centralised so callers don't load + classify twice.
pub(crate) fn classify_with_calibration(
    paths: &HealPaths,
    cfg: &Config,
    reports: &ObserverReports,
) -> (Option<Calibration>, Vec<Finding>) {
    let calibration = Calibration::load(&paths.calibration())
        .ok()
        .map(|c| c.with_overrides(cfg));
    let findings = calibration
        .as_ref()
        .map(|c| classify(reports, c, cfg))
        .unwrap_or_default();
    (calibration, findings)
}

/// Build a `MetricsSnapshot` from already-computed observer reports
/// and Findings. The caller threads in the same `findings` it'll use
/// elsewhere (post-commit nudge, `heal check` renderer) so we don't
/// run `classify` twice on a single command.
pub(crate) fn pack(
    project: &Path,
    paths: &HealPaths,
    _cfg: &Config,
    reports: &ObserverReports,
    findings: &[Finding],
) -> MetricsSnapshot {
    let codebase_files = u32::try_from(
        reports
            .complexity
            .totals
            .files
            .max(reports.loc.total_files()),
    )
    .ok();
    let severity_counts = paths
        .calibration()
        .exists()
        .then(|| tally_severity(findings));
    MetricsSnapshot {
        version: METRICS_SNAPSHOT_VERSION,
        git_sha: crate::observer::git::head_sha(project),
        loc: Some(to_value(&reports.loc)),
        complexity: Some(to_value(&reports.complexity)),
        churn: reports.churn.as_ref().map(to_value),
        change_coupling: reports.change_coupling.as_ref().map(to_value),
        duplication: reports.duplication.as_ref().map(to_value),
        hotspot: reports.hotspot.as_ref().map(to_value),
        lcom: reports.lcom.as_ref().map(to_value),
        severity_counts,
        codebase_files,
        delta: None,
    }
}

fn to_value<T: serde::Serialize>(value: &T) -> serde_json::Value {
    serde_json::to_value(value).expect("observer report serialization is infallible")
}

fn decode<T: serde::de::DeserializeOwned>(v: Option<&serde_json::Value>) -> Option<T> {
    v.and_then(|val| serde_json::from_value(val.clone()).ok())
}

/// Compose every per-metric delta. `metrics` supplies the per-metric `top_n`
/// values that drive "entered / dropped from top-N" comparisons.
fn compute_delta(
    prev_ts: chrono::DateTime<chrono::Utc>,
    prev: &MetricsSnapshot,
    curr: &ObserverReports,
    metrics: &MetricsConfig,
) -> SnapshotDelta {
    SnapshotDelta {
        from_sha: prev.git_sha.clone(),
        from_timestamp: Some(prev_ts),
        complexity: decode::<ComplexityReport>(prev.complexity.as_ref())
            .map(|p| complexity_delta(&p, &curr.complexity, metrics.top_n_complexity())),
        churn: pair_curr(prev.churn.as_ref(), curr.churn.as_ref()).map(|(p, c)| churn_delta(&p, c)),
        hotspot: pair_curr(prev.hotspot.as_ref(), curr.hotspot.as_ref())
            .map(|(p, c)| hotspot_delta(&p, c, metrics.top_n_hotspot())),
        duplication: pair_curr(prev.duplication.as_ref(), curr.duplication.as_ref())
            .map(|(p, c)| duplication_delta(&p, c)),
        change_coupling: pair_curr(prev.change_coupling.as_ref(), curr.change_coupling.as_ref())
            .map(|(p, c)| change_coupling_delta(&p, c)),
    }
}

/// Pair a previous opaque `Value` with the current typed report, decoding
/// only the previous side. Returns `None` if either side is absent or the
/// previous payload's shape no longer matches.
fn pair_curr<'a, T: serde::de::DeserializeOwned>(
    prev: Option<&serde_json::Value>,
    curr: Option<&'a T>,
) -> Option<(T, &'a T)> {
    Some((decode::<T>(prev)?, curr?))
}

fn delta_i64(curr: usize, prev: usize) -> i64 {
    i64::try_from(curr).unwrap_or(i64::MAX) - i64::try_from(prev).unwrap_or(i64::MAX)
}

fn top_names(report: &ComplexityReport, n: usize, metric: ComplexityMetric) -> Vec<String> {
    report
        .worst_n(n, metric)
        .into_iter()
        .map(|f| f.name)
        .collect()
}

fn complexity_delta(
    prev: &ComplexityReport,
    curr: &ComplexityReport,
    top_n: usize,
) -> ComplexityDelta {
    let prev_ccn: BTreeSet<String> = top_names(prev, top_n, ComplexityMetric::Ccn)
        .into_iter()
        .collect();
    let curr_ccn = top_names(curr, top_n, ComplexityMetric::Ccn);
    let new_top_ccn: Vec<String> = curr_ccn
        .into_iter()
        .filter(|n| !prev_ccn.contains(n))
        .collect();

    let prev_cog: BTreeSet<String> = top_names(prev, top_n, ComplexityMetric::Cognitive)
        .into_iter()
        .collect();
    let curr_cog = top_names(curr, top_n, ComplexityMetric::Cognitive);
    let new_top_cognitive: Vec<String> = curr_cog
        .into_iter()
        .filter(|n| !prev_cog.contains(n))
        .collect();

    ComplexityDelta {
        max_ccn: i64::from(curr.totals.max_ccn) - i64::from(prev.totals.max_ccn),
        max_cognitive: i64::from(curr.totals.max_cognitive) - i64::from(prev.totals.max_cognitive),
        functions: delta_i64(curr.totals.functions, prev.totals.functions),
        files: delta_i64(curr.totals.files, prev.totals.files),
        new_top_ccn,
        new_top_cognitive,
    }
}

fn churn_delta(prev: &ChurnReport, curr: &ChurnReport) -> ChurnDelta {
    let top = |r: &ChurnReport| {
        r.worst_n(1)
            .into_iter()
            .next()
            .map(|f| f.path.display().to_string())
    };
    let prev_top = top(prev);
    let curr_top = top(curr);
    ChurnDelta {
        commits_in_window: i64::from(curr.totals.commits) - i64::from(prev.totals.commits),
        top_file_changed: prev_top != curr_top,
        previous_top_file: prev_top,
        current_top_file: curr_top,
    }
}

fn hotspot_delta(prev: &HotspotReport, curr: &HotspotReport, top_n: usize) -> HotspotDelta {
    let names = |r: &HotspotReport| -> BTreeSet<String> {
        r.worst_n(top_n)
            .into_iter()
            .map(|e| e.path.display().to_string())
            .collect()
    };
    let prev_top = names(prev);
    let curr_top = names(curr);
    HotspotDelta {
        max_score: curr.totals.max_score - prev.totals.max_score,
        top_files_added: curr_top.difference(&prev_top).cloned().collect(),
        top_files_dropped: prev_top.difference(&curr_top).cloned().collect(),
    }
}

fn duplication_delta(prev: &DuplicationReport, curr: &DuplicationReport) -> DuplicationDelta {
    DuplicationDelta {
        duplicate_blocks: delta_i64(curr.totals.duplicate_blocks, prev.totals.duplicate_blocks),
        duplicate_tokens: delta_i64(curr.totals.duplicate_tokens, prev.totals.duplicate_tokens),
        files_affected: delta_i64(curr.totals.files_affected, prev.totals.files_affected),
    }
}

fn change_coupling_delta(
    prev: &ChangeCouplingReport,
    curr: &ChangeCouplingReport,
) -> ChangeCouplingDelta {
    let prev_max = prev.pairs.iter().map(|p| p.count).max().unwrap_or(0);
    let curr_max = curr.pairs.iter().map(|p| p.count).max().unwrap_or(0);
    ChangeCouplingDelta {
        pairs: delta_i64(curr.totals.pairs, prev.totals.pairs),
        files: delta_i64(curr.totals.files, prev.totals.files),
        max_pair_count: i64::from(curr_max) - i64::from(prev_max),
    }
}

#[cfg(test)]
mod tests {
    use super::*;
    use crate::observer::churn::{ChurnTotals, FileChurn};
    use crate::observer::complexity::{ComplexityTotals, FileComplexity, FunctionMetric};
    use crate::observer::hotspot::{HotspotEntry, HotspotTotals};

    fn complexity_with(max_ccn: u32, max_cog: u32, functions: usize) -> ComplexityReport {
        ComplexityReport {
            files: vec![FileComplexity {
                path: "src/lib.rs".into(),
                language: "rust".into(),
                functions: (0..functions)
                    .map(|i| FunctionMetric {
                        name: format!("f{i}"),
                        start_line: 1,
                        end_line: 1,
                        ccn: max_ccn,
                        cognitive: max_cog,
                    })
                    .collect(),
            }],
            totals: ComplexityTotals {
                files: 1,
                functions,
                max_ccn,
                max_cognitive: max_cog,
            },
        }
    }

    #[test]
    fn complexity_delta_captures_max_movement() {
        let prev = complexity_with(10, 5, 3);
        let curr = complexity_with(13, 4, 5);
        let d = complexity_delta(&prev, &curr, 5);
        assert_eq!(d.max_ccn, 3);
        assert_eq!(d.max_cognitive, -1);
        assert_eq!(d.functions, 2);
    }

    #[test]
    fn churn_delta_flags_top_file_change() {
        let prev = ChurnReport {
            files: vec![FileChurn {
                path: "a.rs".into(),
                commits: 3,
                lines_added: 0,
                lines_deleted: 0,
            }],
            totals: ChurnTotals {
                files: 1,
                commits: 3,
                lines_added: 0,
                lines_deleted: 0,
            },
            since_days: 30,
        };
        let curr = ChurnReport {
            files: vec![FileChurn {
                path: "b.rs".into(),
                commits: 5,
                lines_added: 0,
                lines_deleted: 0,
            }],
            totals: ChurnTotals {
                files: 1,
                commits: 5,
                lines_added: 0,
                lines_deleted: 0,
            },
            since_days: 30,
        };
        let d = churn_delta(&prev, &curr);
        assert_eq!(d.commits_in_window, 2);
        assert!(d.top_file_changed);
        assert_eq!(d.previous_top_file.as_deref(), Some("a.rs"));
        assert_eq!(d.current_top_file.as_deref(), Some("b.rs"));
    }

    #[test]
    fn hotspot_delta_tracks_top_n_membership() {
        let mk = |entries: &[(&str, f64)]| HotspotReport {
            entries: entries
                .iter()
                .map(|(p, s)| HotspotEntry {
                    path: (*p).into(),
                    ccn_sum: 1,
                    churn_commits: 1,
                    score: *s,
                })
                .collect(),
            totals: HotspotTotals {
                files: entries.len(),
                max_score: entries.first().map_or(0.0, |(_, s)| *s),
            },
        };
        let prev = mk(&[("a.rs", 10.0), ("b.rs", 8.0)]);
        let curr = mk(&[("b.rs", 12.0), ("c.rs", 9.0)]);
        let d = hotspot_delta(&prev, &curr, 2);
        assert!((d.max_score - 2.0).abs() < f64::EPSILON);
        assert_eq!(d.top_files_added, vec!["c.rs".to_string()]);
        assert_eq!(d.top_files_dropped, vec!["a.rs".to_string()]);
    }
}