use std::collections::{BTreeMap, BTreeSet, HashSet};
use std::path::Path;
use anyhow::Result;
use serde::Serialize;
use crate::paths::state::StateLayout;
use crate::state::compiled as compiled_state;
use crate::state::projection_metadata;
use crate::state::reference_paths::{collect_prose_path_candidates, extract_key_files_candidates};
use crate::state::runtime as runtime_state;
pub(crate) const REFERENTIAL_INTEGRITY_AXIS: &str = "referential_integrity";
pub(crate) const PROJECTION_FRESHNESS_AXIS: &str = "projection_freshness";
pub(crate) const CROSS_ARTIFACT_AGREEMENT_AXIS: &str = "cross_artifact_agreement";
pub(crate) const MEMORY_COHERENCE_AXIS: &str = "memory_coherence";
#[derive(Clone, Copy, Debug, Eq, PartialEq, Serialize)]
#[serde(rename_all = "snake_case")]
pub(crate) enum ConsistencyStatus {
Aligned,
Drift,
NoSignal,
}
impl ConsistencyStatus {
pub(crate) fn is_drift(self) -> bool {
matches!(self, Self::Drift)
}
}
#[derive(Clone, Copy, Debug, Eq, PartialEq, Serialize)]
#[serde(rename_all = "snake_case")]
pub(crate) enum ConsistencyAggregateStatus {
Aligned,
Drift,
NoSignal,
}
#[derive(Clone, Debug, Serialize)]
pub(crate) struct ConsistencyAxis {
pub(crate) id: &'static str,
pub(crate) status: ConsistencyStatus,
pub(crate) summary: String,
pub(crate) evidence: Vec<String>,
#[serde(skip_serializing_if = "Option::is_none")]
pub(crate) recommended_correction: Option<String>,
}
#[derive(Clone, Debug, Serialize)]
pub(crate) struct ConsistencyEvaluation {
pub(crate) status: ConsistencyAggregateStatus,
pub(crate) summary: String,
pub(crate) evidence: Vec<String>,
pub(crate) recommended_corrections: Vec<String>,
pub(crate) axes: Vec<ConsistencyAxis>,
}
impl ConsistencyEvaluation {
pub(crate) fn has_drift(&self) -> bool {
matches!(self.status, ConsistencyAggregateStatus::Drift)
}
}
pub(crate) struct ConsistencyInputs<'a> {
pub(crate) repo_root: &'a Path,
pub(crate) layout: &'a StateLayout,
pub(crate) locality_id: &'a str,
pub(crate) handoff: &'a runtime_state::RuntimeHandoffState,
pub(crate) checkpoint: Option<&'a runtime_state::RuntimeCheckpointState>,
pub(crate) include_checkpoint_references: bool,
pub(crate) source_fingerprint: &'a str,
pub(crate) current_digests: &'a compiled_state::ProjectionDigests,
pub(crate) latest_observation: Option<&'a projection_metadata::ProjectionObservation>,
}
pub(crate) fn load_latest_projection_observation(
layout: &StateLayout,
) -> Result<Option<projection_metadata::ProjectionObservation>> {
Ok(projection_metadata::load_for_layout(layout)?
.and_then(|metadata| metadata.observations.into_iter().last()))
}
pub(crate) fn evaluate(
repo_root: &Path,
layout: &StateLayout,
locality_id: &str,
) -> Result<ConsistencyEvaluation> {
let runtime = runtime_state::load_runtime_state(repo_root, layout, locality_id)?;
let compiled =
compiled_state::preview_for_target(&runtime, compiled_state::ProjectionTarget::Session)?;
let current_digests = compiled
.projection_digests
.clone()
.unwrap_or_else(|| compiled_state::compute_projection_digests(&compiled));
let latest_observation = load_latest_projection_observation(layout)?;
evaluate_with_inputs(ConsistencyInputs {
repo_root,
layout,
locality_id,
handoff: &runtime.state.handoff,
checkpoint: runtime.recovery.state.checkpoint.as_ref(),
include_checkpoint_references: true,
source_fingerprint: &compiled.source_fingerprint,
current_digests: ¤t_digests,
latest_observation: latest_observation.as_ref(),
})
}
pub(crate) fn evaluate_with_inputs(inputs: ConsistencyInputs<'_>) -> Result<ConsistencyEvaluation> {
summarize_axes(vec![
evaluate_referential_integrity(
inputs.repo_root,
inputs.handoff,
inputs.checkpoint,
inputs.include_checkpoint_references,
),
evaluate_projection_freshness(
inputs.layout,
inputs.source_fingerprint,
inputs.latest_observation,
),
evaluate_cross_artifact_agreement(
inputs.layout,
inputs.source_fingerprint,
inputs.current_digests,
inputs.latest_observation,
),
evaluate_memory_coherence(inputs.layout, inputs.locality_id)?,
])
}
fn summarize_axes(axes: Vec<ConsistencyAxis>) -> Result<ConsistencyEvaluation> {
let drifted = axes
.iter()
.filter(|axis| axis.status == ConsistencyStatus::Drift)
.collect::<Vec<_>>();
if !drifted.is_empty() {
let mut recommended_corrections = Vec::new();
for correction in drifted
.iter()
.filter_map(|axis| axis.recommended_correction.clone())
{
if !recommended_corrections.contains(&correction) {
recommended_corrections.push(correction);
}
}
return Ok(ConsistencyEvaluation {
status: ConsistencyAggregateStatus::Drift,
summary: format!(
"{} consistency axis(es) need explicit repair before CCD continuity state is treated as clean.",
drifted.len()
),
evidence: drifted
.iter()
.flat_map(|axis| axis.evidence.clone())
.collect(),
recommended_corrections,
axes,
});
}
if axes
.iter()
.all(|axis| axis.status == ConsistencyStatus::NoSignal)
{
return Ok(ConsistencyEvaluation {
status: ConsistencyAggregateStatus::NoSignal,
summary: "CCD has no deterministic artifact-consistency signal beyond the currently available local state."
.to_owned(),
evidence: Vec::new(),
recommended_corrections: Vec::new(),
axes,
});
}
Ok(ConsistencyEvaluation {
status: ConsistencyAggregateStatus::Aligned,
summary: "No confirmed artifact-consistency drift was detected from the available CCD-local signals."
.to_owned(),
evidence: Vec::new(),
recommended_corrections: Vec::new(),
axes,
})
}
fn evaluate_referential_integrity(
repo_root: &Path,
handoff: &runtime_state::RuntimeHandoffState,
checkpoint: Option<&runtime_state::RuntimeCheckpointState>,
include_checkpoint_references: bool,
) -> ConsistencyAxis {
let canonical_root = match repo_root.canonicalize() {
Ok(root) => root,
Err(_) => {
return drift_axis(
REFERENTIAL_INTEGRITY_AXIS,
"Surfaced continuity references could not be validated against the repo root.",
vec![format!(
"Could not canonicalize repo root for referential-integrity validation: {}",
repo_root.display()
)],
"Re-run the consistency audit from a valid repo checkout so CCD can validate surfaced path references against the repo root.",
);
}
};
let mut audit = ReferenceAuditSummary::default();
audit_handoff_references(repo_root, &canonical_root, handoff, &mut audit);
if include_checkpoint_references {
audit_checkpoint_references(repo_root, &canonical_root, checkpoint, &mut audit);
}
let scope_label = if include_checkpoint_references {
"current handoff or recovery checkpoint"
} else {
"current handoff"
};
if audit.checked == 0 {
return no_signal_axis(
REFERENTIAL_INTEGRITY_AXIS,
format!("No surfaced repo-local path references were present in the {scope_label}."),
Vec::new(),
);
}
let mut evidence = vec![format!(
"Audited {} surfaced path reference(s) against repo root `{}`.",
audit.checked,
repo_root.display()
)];
if audit.missing.is_empty() && audit.outside.is_empty() {
evidence.push(
"All audited path references resolved to repo-local files or directories.".to_owned(),
);
return aligned_axis(
REFERENTIAL_INTEGRITY_AXIS,
"Every surfaced path reference resolves to a repo-local path under the repo root.",
evidence,
);
}
for (artifact, entries) in &audit.missing {
evidence.push(format!(
"{}: {} missing path reference(s):",
reference_artifact_label(artifact),
entries.len()
));
evidence.extend(
entries
.iter()
.map(|path| format!("- {}", markdown_list_literal(path))),
);
}
for (artifact, entries) in &audit.outside {
evidence.push(format!(
"{}: {} non-local path reference(s):",
reference_artifact_label(artifact),
entries.len()
));
evidence.extend(
entries
.iter()
.map(|(path, reason)| format!("- {} ({reason})", markdown_list_literal(path))),
);
}
drift_axis(
REFERENTIAL_INTEGRITY_AXIS,
"Surfaced continuity artifacts still reference paths that are missing or live outside the repo root.",
evidence,
if include_checkpoint_references {
"Rewrite the surfaced handoff or recovery state so its file references stay repo-local and still exist on disk, then persist the corrected continuity state before the next session inherits it."
} else {
"Rewrite the surfaced handoff so its file references stay repo-local and still exist on disk, then persist the corrected continuity state before the next session inherits it."
},
)
}
fn evaluate_projection_freshness(
layout: &StateLayout,
current_source_fingerprint: &str,
latest_observation: Option<&projection_metadata::ProjectionObservation>,
) -> ConsistencyAxis {
let metadata_path = layout.state_db_path().display().to_string();
let Some(latest_observation) = latest_observation else {
return no_signal_axis(
PROJECTION_FRESHNESS_AXIS,
"No projection observation has been recorded yet for this clone-local state DB.",
vec![format!(
"Projection metadata is absent at {}.",
metadata_path
)],
);
};
let evidence = vec![
format!("Projection metadata path: {metadata_path}"),
format!(
"Latest recorded source fingerprint: {}",
latest_observation.source_fingerprint
),
format!("Current source fingerprint: {current_source_fingerprint}"),
];
if latest_observation.source_fingerprint == current_source_fingerprint {
return aligned_axis(
PROJECTION_FRESHNESS_AXIS,
"The latest recorded projection observation matches the current canonical source fingerprint.",
evidence,
);
}
drift_axis(
PROJECTION_FRESHNESS_AXIS,
"The latest recorded projection observation is stale relative to the current canonical continuity state.",
evidence,
"Record a fresh projection observation before relying on delta or projection telemetry derived from older continuity state.",
)
}
fn evaluate_cross_artifact_agreement(
layout: &StateLayout,
current_source_fingerprint: &str,
current_digests: &compiled_state::ProjectionDigests,
latest_observation: Option<&projection_metadata::ProjectionObservation>,
) -> ConsistencyAxis {
let metadata_path = layout.state_db_path().display().to_string();
let Some(latest_observation) = latest_observation else {
return no_signal_axis(
CROSS_ARTIFACT_AGREEMENT_AXIS,
"No projection observation is available yet, so cross-artifact agreement cannot be checked.",
vec![format!(
"Projection metadata is absent at {}.",
metadata_path
)],
);
};
if latest_observation.source_fingerprint != current_source_fingerprint {
return no_signal_axis(
CROSS_ARTIFACT_AGREEMENT_AXIS,
"Cross-artifact agreement is deferred until projection freshness is restored.",
vec![format!(
"Latest recorded fingerprint `{}` does not match current fingerprint `{}`.",
latest_observation.source_fingerprint, current_source_fingerprint
)],
);
}
let Some(recorded_digests) = latest_observation.projection_digests.as_ref() else {
return no_signal_axis(
CROSS_ARTIFACT_AGREEMENT_AXIS,
"The latest projection observation did not record projection digests, so cross-artifact agreement cannot be checked.",
vec![format!(
"Projection metadata path: {}",
metadata_path
)],
);
};
let mut compared = 0usize;
let mut mismatches = Vec::new();
compare_digest_surface(
"effective_memory",
&recorded_digests.effective_memory,
¤t_digests.effective_memory,
&mut compared,
&mut mismatches,
);
compare_digest_surface(
"handoff",
&recorded_digests.handoff,
¤t_digests.handoff,
&mut compared,
&mut mismatches,
);
compare_digest_surface(
"execution_gates",
&recorded_digests.execution_gates,
¤t_digests.execution_gates,
&mut compared,
&mut mismatches,
);
compare_digest_surface(
"escalation",
&recorded_digests.escalation,
¤t_digests.escalation,
&mut compared,
&mut mismatches,
);
compare_digest_surface(
"recovery",
&recorded_digests.recovery,
¤t_digests.recovery,
&mut compared,
&mut mismatches,
);
compare_digest_surface(
"git_state",
&recorded_digests.git_state,
¤t_digests.git_state,
&mut compared,
&mut mismatches,
);
compare_digest_surface(
"session_state",
&recorded_digests.session_state,
¤t_digests.session_state,
&mut compared,
&mut mismatches,
);
if compared == 0 {
return no_signal_axis(
CROSS_ARTIFACT_AGREEMENT_AXIS,
"The latest projection observation did not carry any non-empty surface digests that can be compared for agreement.",
vec![format!(
"Projection metadata path: {}",
metadata_path
)],
);
}
let mut evidence = vec![
format!("Projection metadata path: {metadata_path}"),
format!("Compared {compared} recorded projection digest surface(s)."),
];
if mismatches.is_empty() {
return aligned_axis(
CROSS_ARTIFACT_AGREEMENT_AXIS,
"Fresh recorded projection digests agree with the current computed continuity surfaces.",
evidence,
);
}
evidence.extend(mismatches);
drift_axis(
CROSS_ARTIFACT_AGREEMENT_AXIS,
"A fresh recorded projection disagrees with the current computed continuity surfaces.",
evidence,
"Rebuild the derived projection from canonical state and replace the conflicting recorded digests before relying on artifact agreement.",
)
}
fn evaluate_memory_coherence(layout: &StateLayout, locality_id: &str) -> Result<ConsistencyAxis> {
let profile_ledger = runtime_state::load_profile_memory_automation_ledger(layout)?;
let locality_ledger = runtime_state::load_repo_memory_automation_ledger(layout, locality_id)?;
let total_records = profile_ledger.len() + locality_ledger.len();
if total_records == 0 {
return Ok(no_signal_axis(
MEMORY_COHERENCE_AXIS,
"No recorded memory-automation signal is available yet for memory-coherence validation.",
vec![
format!(
"Profile runtime state path: {}",
layout.profile_runtime_state_path().display()
),
format!(
"Locality runtime state path: {}",
layout.locality_runtime_state_path(locality_id)?.display()
),
],
));
}
let mut evidence = vec![format!(
"Audited {} memory-automation record(s) across profile and locality runtime state.",
total_records
)];
let mut contradictions = Vec::new();
contradictions.extend(collect_memory_contradictions("profile", &profile_ledger));
contradictions.extend(collect_memory_contradictions("locality", &locality_ledger));
if contradictions.is_empty() {
return Ok(aligned_axis(
MEMORY_COHERENCE_AXIS,
"Recorded memory-automation signals show no active contradiction count in remembered guidance.",
evidence,
));
}
evidence.extend(contradictions);
Ok(drift_axis(
MEMORY_COHERENCE_AXIS,
"Recorded memory-automation signals detected contradictory remembered guidance.",
evidence,
"Review the contradicting memory entries and compact, supersede, or rewrite them so surfaced continuity does not inherit conflicting guidance.",
))
}
fn compare_digest_surface(
surface: &str,
recorded: &str,
current: &str,
compared: &mut usize,
mismatches: &mut Vec<String>,
) {
if recorded.is_empty() || current.is_empty() {
return;
}
*compared += 1;
if recorded != current {
mismatches.push(format!(
"Surface `{surface}` digest mismatch: recorded `{recorded}`, current `{current}`."
));
}
}
fn collect_memory_contradictions(
scope: &str,
ledger: &runtime_state::RuntimeMemoryAutomationLedger,
) -> Vec<String> {
ledger
.iter()
.filter(|(_, record)| record.contradiction_count > 0)
.map(|(dedupe_key, record)| {
format!(
"{scope}: `{dedupe_key}` contradiction_count={} observations={} sessions={} localities={} structural_signal={}",
record.contradiction_count,
record.observation_count,
record.observed_session_count(),
record.observed_locality_count(),
record.has_structural_signal
)
})
.collect()
}
#[derive(Default)]
struct ReferenceAuditSummary {
checked: usize,
missing: BTreeMap<&'static str, BTreeSet<String>>,
outside: BTreeMap<&'static str, BTreeMap<String, &'static str>>,
}
impl ReferenceAuditSummary {
fn record_missing(&mut self, artifact: &'static str, path: &str) {
self.missing
.entry(artifact)
.or_default()
.insert(path.to_owned());
}
fn record_outside(&mut self, artifact: &'static str, path: &str, reason: &'static str) {
self.outside
.entry(artifact)
.or_default()
.insert(path.to_owned(), reason);
}
}
fn audit_handoff_references(
repo_root: &Path,
canonical_root: &Path,
handoff: &runtime_state::RuntimeHandoffState,
audit: &mut ReferenceAuditSummary,
) {
for item in handoff
.key_files
.iter()
.filter(|item| item.lifecycle.is_active())
{
let candidates = extract_key_files_candidates(&item.text);
if candidates.is_empty() {
continue;
}
audit.checked = audit.checked.saturating_add(1);
match classify_reference_entry(repo_root, canonical_root, &candidates) {
ReferenceEntryOutcome::Resolved => {}
ReferenceEntryOutcome::Missing => audit.record_missing("handoff", &item.text),
ReferenceEntryOutcome::OutsideRepo(reason) => {
audit.record_outside("handoff", &item.text, reason)
}
}
}
let mut prose_seen: HashSet<String> = HashSet::new();
let prose_fields: [&[runtime_state::RuntimeHandoffItem]; 3] = [
&handoff.immediate_actions,
&handoff.operational_guardrails,
&handoff.definition_of_done,
];
for field in prose_fields {
for item in field.iter().filter(|item| item.lifecycle.is_active()) {
let mut tokens = Vec::new();
collect_prose_path_candidates(&item.text, &mut tokens);
for token in tokens {
if !prose_seen.insert(token.clone()) {
continue;
}
audit.checked = audit.checked.saturating_add(1);
match classify_reference(repo_root, canonical_root, &token) {
ReferenceOutcome::Resolved => {}
ReferenceOutcome::Missing => audit.record_missing("handoff", &token),
ReferenceOutcome::OutsideRepo { reason } => {
audit.record_outside("handoff", &token, reason)
}
}
}
}
}
}
fn audit_checkpoint_references(
repo_root: &Path,
canonical_root: &Path,
checkpoint: Option<&runtime_state::RuntimeCheckpointState>,
audit: &mut ReferenceAuditSummary,
) {
let Some(checkpoint) = checkpoint else {
return;
};
for entry in &checkpoint.key_files {
let candidates = extract_key_files_candidates(entry);
if candidates.is_empty() {
continue;
}
audit.checked = audit.checked.saturating_add(1);
match classify_reference_entry(repo_root, canonical_root, &candidates) {
ReferenceEntryOutcome::Resolved => {}
ReferenceEntryOutcome::Missing => {
audit.record_missing("recovery_checkpoint", entry);
}
ReferenceEntryOutcome::OutsideRepo(reason) => {
audit.record_outside("recovery_checkpoint", entry, reason);
}
}
}
let mut prose_seen: HashSet<String> = HashSet::new();
for action in &checkpoint.immediate_actions {
let mut tokens = Vec::new();
collect_prose_path_candidates(action, &mut tokens);
for token in tokens {
if !prose_seen.insert(token.clone()) {
continue;
}
audit.checked = audit.checked.saturating_add(1);
match classify_reference(repo_root, canonical_root, &token) {
ReferenceOutcome::Resolved => {}
ReferenceOutcome::Missing => {
audit.record_missing("recovery_checkpoint", &token);
}
ReferenceOutcome::OutsideRepo { reason } => {
audit.record_outside("recovery_checkpoint", &token, reason);
}
}
}
}
}
fn reference_artifact_label(artifact: &str) -> &'static str {
match artifact {
"handoff" => "Handoff",
"recovery_checkpoint" => "Recovery checkpoint",
_ => "Artifact",
}
}
#[derive(Clone, Debug)]
enum ReferenceOutcome {
Resolved,
Missing,
OutsideRepo { reason: &'static str },
}
fn classify_reference(
repo_root: &Path,
canonical_root: &Path,
candidate: &str,
) -> ReferenceOutcome {
let path = Path::new(candidate);
if path.is_absolute() {
return ReferenceOutcome::OutsideRepo {
reason: "absolute path",
};
}
if path
.components()
.any(|component| matches!(component, std::path::Component::ParentDir))
{
return ReferenceOutcome::OutsideRepo {
reason: "parent-directory traversal escapes the repo root",
};
}
let target = repo_root.join(path);
if !target.exists() {
return ReferenceOutcome::Missing;
}
let canonical_target = match target.canonicalize() {
Ok(target) => target,
Err(_) => return ReferenceOutcome::Missing,
};
if canonical_target.starts_with(canonical_root) {
ReferenceOutcome::Resolved
} else {
ReferenceOutcome::OutsideRepo {
reason: "symlink target escapes the repo root",
}
}
}
enum ReferenceEntryOutcome {
Resolved,
Missing,
OutsideRepo(&'static str),
}
fn classify_reference_entry(
repo_root: &Path,
canonical_root: &Path,
candidates: &[String],
) -> ReferenceEntryOutcome {
let mut last_outside = None;
for candidate in candidates {
match classify_reference(repo_root, canonical_root, candidate) {
ReferenceOutcome::Resolved => return ReferenceEntryOutcome::Resolved,
ReferenceOutcome::Missing => {}
ReferenceOutcome::OutsideRepo { reason } => last_outside = Some(reason),
}
}
match last_outside {
Some(reason) => ReferenceEntryOutcome::OutsideRepo(reason),
None => ReferenceEntryOutcome::Missing,
}
}
fn markdown_list_literal(text: &str) -> String {
if text.contains('`') {
text.to_owned()
} else {
format!("`{text}`")
}
}
fn aligned_axis(
id: &'static str,
summary: impl Into<String>,
evidence: Vec<String>,
) -> ConsistencyAxis {
ConsistencyAxis {
id,
status: ConsistencyStatus::Aligned,
summary: summary.into(),
evidence,
recommended_correction: None,
}
}
fn no_signal_axis(
id: &'static str,
summary: impl Into<String>,
evidence: Vec<String>,
) -> ConsistencyAxis {
ConsistencyAxis {
id,
status: ConsistencyStatus::NoSignal,
summary: summary.into(),
evidence,
recommended_correction: None,
}
}
fn drift_axis(
id: &'static str,
summary: impl Into<String>,
evidence: Vec<String>,
recommended_correction: impl Into<String>,
) -> ConsistencyAxis {
ConsistencyAxis {
id,
status: ConsistencyStatus::Drift,
summary: summary.into(),
evidence,
recommended_correction: Some(recommended_correction.into()),
}
}