use std::collections::BTreeSet;
use std::fs;
use std::path::{Path, PathBuf};
use std::process::ExitCode;
use anyhow::{bail, Context, Result};
use serde::Serialize;
use sha2::{Digest, Sha256};
use crate::output::CommandReport;
use crate::paths::state::SyncConfig;
use crate::repo::marker as repo_marker;
use crate::shipped_skills::{
self, collect_relative_files, load_repo_canonical_skill_files, render_skill_for_runtime,
HostSkillHeaders,
};
const FORMAT_VERSION: &str = "1";
const SKILL_OUTPUT_TARGETS: &[(&str, &str)] = &[
("codex", ".agents/skills"),
("claude", ".claude/skills"),
("gemini", ".gemini/skills"),
];
#[derive(Debug, Clone, PartialEq)]
pub struct SyncProfile {
pub soft_line_limit: usize,
pub hard_line_limit: usize,
pub stub_soft_line_limit: usize,
pub stub_hard_line_limit: usize,
pub enforce_lf: bool,
pub claude_header: String,
pub gemini_header: String,
pub stub_body: String,
}
impl SyncProfile {
pub fn merge_config(&mut self, config: &SyncConfig) {
if let Some(val) = config.soft_line_limit {
self.soft_line_limit = val;
}
if let Some(val) = config.hard_line_limit {
self.hard_line_limit = val;
}
if let Some(val) = config.stub_soft_line_limit {
self.stub_soft_line_limit = val;
}
if let Some(val) = config.stub_hard_line_limit {
self.stub_hard_line_limit = val;
}
if let Some(val) = config.enforce_lf {
self.enforce_lf = val;
}
if let Some(ref val) = config.claude_header {
self.claude_header = val.clone();
}
if let Some(ref val) = config.gemini_header {
self.gemini_header = val.clone();
}
if let Some(ref val) = config.stub_body {
self.stub_body = val.clone();
}
}
}
impl Default for SyncProfile {
fn default() -> Self {
Self {
soft_line_limit: 120,
hard_line_limit: 140,
stub_soft_line_limit: 25,
stub_hard_line_limit: 25,
enforce_lf: false,
claude_header: String::new(),
gemini_header: shipped_skills::DEFAULT_GEMINI_HEADER.to_owned(),
stub_body:
"This compatibility stub exists for Claude variants that look for `.claude/CLAUDE.md`.\n\n\
1. Read `../AGENTS.md` first.\n\
2. Treat `../AGENTS.md` as the canonical repo policy.\n\
3. If guidance conflicts, `../AGENTS.md` wins."
.to_owned(),
}
}
}
#[derive(Serialize, Clone)]
pub struct SyncIssue {
pub kind: String,
pub label: String,
pub path: String,
pub message: String,
}
#[derive(Clone, Copy)]
pub enum SyncIssueSurface {
SyncCheck,
Doctor,
Drift,
}
pub struct SyncCheck {
pub ok: bool,
pub issues: Vec<SyncIssue>,
}
#[derive(Clone)]
pub struct GeneratedFile {
pub label: String,
pub path: PathBuf,
pub contents: String,
}
pub struct GeneratedFiles {
pub files: Vec<GeneratedFile>,
}
#[derive(Serialize)]
pub struct SyncReport {
command: &'static str,
ok: bool,
repo_root: String,
check: bool,
status: &'static str,
files: Vec<String>,
warnings: Vec<String>,
issues: Vec<SyncIssue>,
}
impl CommandReport for SyncReport {
fn exit_code(&self) -> ExitCode {
if self.check && !self.ok {
ExitCode::from(1)
} else {
ExitCode::SUCCESS
}
}
fn render_text(&self) {
for warning in &self.warnings {
eprintln!("warning: {warning}");
}
if self.check {
if self.ok {
println!("Generated files are in sync.");
} else {
for issue in &self.issues {
eprintln!("{}", issue.message);
}
}
return;
}
for file in &self.files {
println!("Wrote {file}");
}
}
}
pub fn run(repo_root: &Path, check: bool, profile: &SyncProfile) -> Result<SyncReport> {
let _ = repo_marker::load(repo_root)?;
let agents_path = repo_root.join("AGENTS.md");
let agents = read_agents(&agents_path)?;
let mut warnings = Vec::new();
if let Some(warning) = check_budget(
"AGENTS.md",
line_count(&agents),
profile.soft_line_limit,
profile.hard_line_limit,
)? {
warnings.push(warning);
}
let hash = sha256(&agents);
let policy_files = render_policy_files(repo_root, &agents, &hash, profile);
let generated = render_all(repo_root, &agents, &hash, profile)?;
let files = generated_file_paths(&generated);
if let Some(warning) = check_budget(
"CLAUDE.md",
line_count(&policy_files[0].contents),
profile.soft_line_limit,
profile.hard_line_limit,
)? {
warnings.push(warning);
}
if let Some(warning) = check_budget(
"GEMINI.md",
line_count(&policy_files[1].contents),
profile.soft_line_limit,
profile.hard_line_limit,
)? {
warnings.push(warning);
}
if let Some(warning) = check_budget(
".claude/CLAUDE.md",
line_count(&policy_files[2].contents),
profile.stub_soft_line_limit,
profile.stub_hard_line_limit,
)? {
warnings.push(warning);
}
if check {
let result = check_generated(&generated);
let issues = with_surface_messages(result.issues, SyncIssueSurface::SyncCheck, repo_root);
return Ok(SyncReport {
command: "sync",
ok: issues.is_empty(),
repo_root: repo_root.display().to_string(),
check: true,
status: if issues.is_empty() {
"in_sync"
} else {
"drift"
},
files,
warnings,
issues,
});
}
write_generated(repo_root, &generated)?;
Ok(SyncReport {
command: "sync",
ok: true,
repo_root: repo_root.display().to_string(),
check: false,
status: "written",
files,
warnings,
issues: Vec::new(),
})
}
pub fn check_generated(generated: &GeneratedFiles) -> SyncCheck {
let mut issues = Vec::new();
for target in &generated.files {
match fs::read_to_string(&target.path) {
Ok(actual) => {
if actual != target.contents {
issues.push(SyncIssue {
kind: "out_of_sync".to_owned(),
label: target.label.clone(),
path: target.path.display().to_string(),
message: format!("out of sync: {}", target.label),
});
}
}
Err(_) => issues.push(SyncIssue {
kind: "missing_file".to_owned(),
label: target.label.clone(),
path: target.path.display().to_string(),
message: format!("missing generated file: {}", target.path.display()),
}),
}
}
SyncCheck {
ok: issues.is_empty(),
issues,
}
}
pub fn issue_message(issue: &SyncIssue, surface: SyncIssueSurface, repo_root: &Path) -> String {
let sync_command = format!("ccd sync --path {}", repo_root.display());
let sync_check_command = format!("ccd sync --check --path {}", repo_root.display());
let doctor_command = format!("ccd doctor --path {}", repo_root.display());
let mirrors_clause = "generated mirrors no longer match AGENTS.md and any canonical skills";
match (surface, issue.kind.as_str()) {
(SyncIssueSurface::SyncCheck, "missing_file") => format!(
"ccd sync --check found a missing generated mirror at {}; {mirrors_clause}. Run `{sync_command}` to regenerate them, then rerun `{sync_check_command}`.",
issue.path
),
(SyncIssueSurface::SyncCheck, _) => format!(
"ccd sync --check found generated mirror drift in {}; {mirrors_clause}. Run `{sync_command}` to regenerate them, then rerun `{sync_check_command}`.",
issue.label
),
(SyncIssueSurface::Doctor, "missing_file") => format!(
"ccd doctor found a missing generated mirror at {}; {mirrors_clause}. Run `{sync_command}` to regenerate them, then rerun `{doctor_command}`.",
issue.path
),
(SyncIssueSurface::Doctor, _) => format!(
"ccd doctor found generated mirror drift in {}; {mirrors_clause}. Run `{sync_command}` to regenerate them, then rerun `{doctor_command}`.",
issue.label
),
(SyncIssueSurface::Drift, "missing_file") => format!(
"ccd drift found a missing generated mirror at {}; {mirrors_clause}. Run `{sync_command}` to regenerate it.",
issue.path
),
(SyncIssueSurface::Drift, _) => format!(
"ccd drift found generated mirror drift in {}; {mirrors_clause}. Run `{sync_command}` to regenerate it.",
issue.label
),
}
}
pub fn with_surface_messages(
issues: Vec<SyncIssue>,
surface: SyncIssueSurface,
repo_root: &Path,
) -> Vec<SyncIssue> {
issues
.into_iter()
.map(|mut issue| {
issue.message = issue_message(&issue, surface, repo_root);
issue
})
.collect()
}
pub fn render_all(
repo_root: &Path,
agents: &str,
hash: &str,
profile: &SyncProfile,
) -> Result<GeneratedFiles> {
let mut policy_files = render_policy_files(repo_root, agents, hash, profile);
policy_files.extend(render_skill_files(repo_root, profile)?);
Ok(GeneratedFiles {
files: policy_files,
})
}
pub fn line_count(contents: &str) -> usize {
if contents.is_empty() {
0
} else {
contents.lines().count()
}
}
pub fn check_budget(
label: &str,
lines: usize,
soft_limit: usize,
hard_limit: usize,
) -> Result<Option<String>> {
if lines > hard_limit {
bail!("{label} is {lines} lines (hard limit: {hard_limit})");
}
if lines > soft_limit {
return Ok(Some(format!(
"{label} is {lines} lines (soft limit: {soft_limit})"
)));
}
Ok(None)
}
fn read_agents(path: &Path) -> Result<String> {
fs::read_to_string(path).with_context(|| format!("{} not found", path.display()))
}
pub(crate) fn sha256(contents: &str) -> String {
let mut hasher = Sha256::new();
hasher.update(contents.as_bytes());
format!("{:x}", hasher.finalize())
}
fn render_claude(agents: &str, hash: &str, header: &str) -> String {
let header_line = if header.is_empty() {
String::new()
} else {
format!("> {header}\n")
};
format!(
"<!-- GENERATED: source=AGENTS.md format-version={FORMAT_VERSION} sha256={hash} -->\n\n\
> Generated from `AGENTS.md`. Edit `AGENTS.md`, then rerun `scripts/sync-ai-docs.sh --write`.\n\
> If this file conflicts with `AGENTS.md`, `AGENTS.md` wins.\n\
{header_line}\n\
{agents}"
)
}
fn render_gemini(agents: &str, hash: &str, header: &str) -> String {
let header_line = if header.is_empty() {
String::new()
} else {
format!("> {header}\n")
};
format!(
"<!-- GENERATED: source=AGENTS.md format-version={FORMAT_VERSION} sha256={hash} -->\n\n\
> Generated from `AGENTS.md`. Edit `AGENTS.md`, then rerun `scripts/sync-ai-docs.sh --write`.\n\
> If this file conflicts with `AGENTS.md`, `AGENTS.md` wins.\n\
{header_line}\n\
{agents}"
)
}
fn render_claude_stub(hash: &str, body: &str) -> String {
format!(
"<!-- GENERATED: source=AGENTS.md format-version={FORMAT_VERSION} sha256={hash} -->\n\n\
# CLAUDE.md\n\n\
{body}\n"
)
}
fn strip_cr(s: &str) -> String {
s.replace('\r', "")
}
fn write_generated(repo_root: &Path, generated: &GeneratedFiles) -> Result<()> {
prune_generated_skill_files(repo_root, generated)?;
for file in &generated.files {
if let Some(parent) = file.path.parent() {
fs::create_dir_all(parent)
.with_context(|| format!("failed to create {}", parent.display()))?;
}
write_file(&file.path, &file.contents)?;
}
Ok(())
}
fn write_file(path: &Path, contents: &str) -> Result<()> {
fs::write(path, contents).with_context(|| format!("failed to write {}", path.display()))
}
fn generated_file_paths(generated: &GeneratedFiles) -> Vec<String> {
generated
.files
.iter()
.map(|file| file.path.display().to_string())
.collect()
}
fn prune_generated_skill_files(repo_root: &Path, generated: &GeneratedFiles) -> Result<()> {
let expected: BTreeSet<PathBuf> = generated
.files
.iter()
.filter_map(|file| {
let relative = file.path.strip_prefix(repo_root).ok()?;
let label = relative.to_string_lossy().replace('\\', "/");
SKILL_OUTPUT_TARGETS
.iter()
.any(|(_, output_dir)| label.starts_with(&format!("{output_dir}/")))
.then(|| file.path.clone())
})
.collect();
if expected.is_empty() {
return Ok(());
}
for (_, output_dir) in SKILL_OUTPUT_TARGETS {
let root = repo_root.join(output_dir);
if !root.is_dir() {
continue;
}
for relative_path in collect_relative_files(&root, &root)? {
let existing = root.join(relative_path);
if !expected.contains(&existing) {
fs::remove_file(&existing)
.with_context(|| format!("failed to remove {}", existing.display()))?;
}
}
prune_empty_directories(&root, false)?;
}
Ok(())
}
fn render_skill_files(repo_root: &Path, profile: &SyncProfile) -> Result<Vec<GeneratedFile>> {
let canonical_files = load_repo_canonical_skill_files(repo_root)?;
let mut generated = Vec::new();
let headers = HostSkillHeaders {
claude: &profile.claude_header,
gemini: &profile.gemini_header,
};
for skill_file in canonical_files {
let relative_label = skill_file
.relative_path
.to_string_lossy()
.replace('\\', "/");
for (runtime_name, output_dir) in SKILL_OUTPUT_TARGETS {
let final_contents = render_skill_for_runtime(
&skill_file.relative_path,
&skill_file.contents,
runtime_name,
headers,
);
generated.push(GeneratedFile {
label: format!("{output_dir}/{relative_label}"),
path: repo_root.join(output_dir).join(&skill_file.relative_path),
contents: final_contents,
});
}
}
Ok(generated)
}
fn render_policy_files(
repo_root: &Path,
agents: &str,
hash: &str,
profile: &SyncProfile,
) -> Vec<GeneratedFile> {
let mut files = vec![
GeneratedFile {
label: "CLAUDE.md".to_owned(),
path: repo_root.join("CLAUDE.md"),
contents: render_claude(agents, hash, &profile.claude_header),
},
GeneratedFile {
label: "GEMINI.md".to_owned(),
path: repo_root.join("GEMINI.md"),
contents: render_gemini(agents, hash, &profile.gemini_header),
},
GeneratedFile {
label: ".claude/CLAUDE.md".to_owned(),
path: repo_root.join(".claude/CLAUDE.md"),
contents: render_claude_stub(hash, &profile.stub_body),
},
];
if profile.enforce_lf {
for file in &mut files {
file.contents = strip_cr(&file.contents);
}
}
files
}
fn prune_empty_directories(dir: &Path, remove_self: bool) -> Result<bool> {
if !dir.is_dir() {
return Ok(false);
}
let mut entries: Vec<_> = fs::read_dir(dir)
.with_context(|| format!("failed to read directory {}", dir.display()))?
.collect::<std::result::Result<Vec<_>, _>>()
.with_context(|| format!("failed to list directory {}", dir.display()))?;
entries.sort_by_key(|entry| entry.file_name());
let mut has_entries = false;
for entry in entries {
let path = entry.path();
let file_type = entry
.file_type()
.with_context(|| format!("failed to inspect {}", path.display()))?;
if file_type.is_dir() {
if !prune_empty_directories(&path, true)? {
has_entries = true;
}
} else {
has_entries = true;
}
}
if remove_self && !has_entries {
fs::remove_dir(dir).with_context(|| format!("failed to remove {}", dir.display()))?;
return Ok(true);
}
Ok(false)
}