use crate::config::Config;
use crate::db;
use crate::errors::CoreError;
use crate::models::{Pattern, PatternStatus, Projection};
use crate::projection::claude_md;
use crate::util::backup_file;
use chrono::{Duration, Utc};
use rusqlite::Connection;
use std::path::Path;
/// A stale item discovered by the curator.
#[derive(Debug, Clone)]
pub struct StaleItem {
pub pattern: Pattern,
pub projection: Projection,
pub reason: String,
}
/// Result of a clean operation.
#[derive(Debug)]
pub struct CleanResult {
pub archived_count: usize,
pub skills_removed: usize,
pub claude_md_rules_removed: usize,
pub agents_removed: usize,
}
/// Detect stale patterns that should be archived.
///
/// A pattern/projection is stale if ALL of these are true:
/// - last_seen is older than staleness_days
/// - The pattern was generated by retro (tracked via projections table)
/// - Pattern is currently active
pub fn detect_stale(
conn: &Connection,
config: &Config,
) -> Result<Vec<StaleItem>, CoreError> {
let staleness_days = config.analysis.staleness_days as i64;
let cutoff = Utc::now() - Duration::days(staleness_days);
// Get active patterns that have projections (i.e., were generated by retro)
let active_patterns = db::get_patterns(conn, &["active"], None)?;
let projections = db::get_projections_for_active_patterns(conn)?;
let mut stale_items = Vec::new();
for pattern in &active_patterns {
if pattern.last_seen >= cutoff {
continue;
}
// Must have a projection (retro-generated)
if let Some(proj) = projections.iter().find(|p| p.pattern_id == pattern.id) {
let days_stale = (Utc::now() - pattern.last_seen).num_days();
stale_items.push(StaleItem {
pattern: pattern.clone(),
projection: proj.clone(),
reason: format!(
"not seen in {} days (threshold: {} days)",
days_stale, staleness_days
),
});
}
}
Ok(stale_items)
}
/// Archive stale items: backup files, remove projections, update DB status.
pub fn archive_stale_items(
conn: &Connection,
_config: &Config,
items: &[StaleItem],
) -> Result<CleanResult, CoreError> {
let backup_dir = crate::config::retro_dir().join("backups");
std::fs::create_dir_all(&backup_dir)
.map_err(|e| CoreError::Io(format!("creating backup dir: {e}")))?;
let mut result = CleanResult {
archived_count: 0,
skills_removed: 0,
claude_md_rules_removed: 0,
agents_removed: 0,
};
// Collect CLAUDE.md items to batch-remove from managed section
let claude_md_items: Vec<&StaleItem> = items
.iter()
.filter(|i| i.projection.target_type == "claude_md")
.collect();
// Handle CLAUDE.md removals as a batch (one file write)
if !claude_md_items.is_empty() {
let target_path = &claude_md_items[0].projection.target_path;
if let Ok(content) = std::fs::read_to_string(target_path) {
// Read current rules
if let Some(current_rules) = claude_md::read_managed_section(&content) {
// Remove stale rules
let stale_contents: Vec<&str> = claude_md_items
.iter()
.map(|i| i.projection.content.as_str())
.collect();
let remaining_rules: Vec<String> = current_rules
.into_iter()
.filter(|rule| !stale_contents.contains(&rule.as_str()))
.collect();
// Backup before modification
backup_file(target_path, &backup_dir)?;
let updated = claude_md::update_claude_md_content(&content, &remaining_rules);
std::fs::write(target_path, &updated)
.map_err(|e| CoreError::Io(format!("writing {target_path}: {e}")))?;
result.claude_md_rules_removed = claude_md_items.len();
}
}
}
// Handle skill and agent file removals individually
for item in items {
match item.projection.target_type.as_str() {
"skill" => {
let skill_path = Path::new(&item.projection.target_path);
if skill_path.exists() {
backup_file(&item.projection.target_path, &backup_dir)?;
// Remove the SKILL.md file
let _ = std::fs::remove_file(skill_path);
// Try to remove the parent directory if it's now empty
if let Some(parent) = skill_path.parent() {
let _ = std::fs::remove_dir(parent); // Only succeeds if empty
}
result.skills_removed += 1;
}
}
"global_agent" => {
let agent_path = Path::new(&item.projection.target_path);
if agent_path.exists() {
backup_file(&item.projection.target_path, &backup_dir)?;
let _ = std::fs::remove_file(agent_path);
result.agents_removed += 1;
}
}
"claude_md" => {
// Already handled above in batch
}
_ => {}
}
// Update pattern status to archived
db::update_pattern_status(conn, &item.pattern.id, &PatternStatus::Archived)?;
result.archived_count += 1;
}
Ok(result)
}
/// JSON schema for constrained decoding of context audit responses.
pub const AUDIT_RESPONSE_SCHEMA: &str = r#"{"type":"object","properties":{"findings":{"type":"array","items":{"type":"object","properties":{"finding_type":{"type":"string","enum":["redundant","contradictory","oversized","stale"]},"description":{"type":"string"},"affected_items":{"type":"array","items":{"type":"string"}},"suggestion":{"type":"string"}},"required":["finding_type","description","affected_items","suggestion"],"additionalProperties":false}}},"required":["findings"],"additionalProperties":false}"#;
/// Context audit finding from AI analysis.
#[derive(Debug, Clone, serde::Serialize, serde::Deserialize)]
pub struct AuditFinding {
pub finding_type: String,
pub description: String,
pub affected_items: Vec<String>,
pub suggestion: String,
}
/// Response from context audit AI call.
#[derive(Debug, Clone, serde::Serialize, serde::Deserialize)]
pub struct AuditResponse {
pub findings: Vec<AuditFinding>,
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_audit_response_schema_is_valid_json() {
let value: serde_json::Value = serde_json::from_str(AUDIT_RESPONSE_SCHEMA)
.expect("AUDIT_RESPONSE_SCHEMA must be valid JSON");
assert_eq!(value["type"], "object");
assert!(value["properties"]["findings"].is_object());
}
}