use std::collections::{HashMap, HashSet};
use std::fs;
use std::path::{Path, PathBuf};
use crate::agent::{self, frontmatter::join_frontmatter};
use crate::spec::{
AgentName, Deployment, Lock, OnTargetModified, Ownership, Rule, RuleKind, Source, Spec,
};
use super::fetch::{GithubFetchError, describe_github_source};
use super::{
AmbiguityReport, DeploymentStatus, ExpandedItem, ExpandedKind, Plan, PlanReport,
PlannedDeployment, RenderedTarget, SkippedGithubFetch, builtin, effective_policy, expand_rule,
hash_content,
};
fn target_path_for(agent: &AgentName, kind: &ExpandedKind) -> anyhow::Result<PathBuf> {
if let ExpandedKind::System(_) = kind {
return system_file_path_for(agent);
}
let dir = match agent.as_str() {
"copilot" => ".github",
"claude-code" => ".claude",
"codex" => ".codex",
"pi" => ".pi",
"opencode" => ".opencode",
"plain" => "",
other => anyhow::bail!("unknown target agent: {}", other),
};
let prefix = if dir.is_empty() {
String::new()
} else {
format!("{dir}/")
};
match kind {
ExpandedKind::Skill(s) => Ok(PathBuf::from(format!("{prefix}skills/{}/SKILL.md", s.name))),
ExpandedKind::Agent(a) => Ok(PathBuf::from(format!("{prefix}agents/{}.agent.md", a.name))),
ExpandedKind::System(_) => unreachable!("handled above"),
}
}
fn system_file_path_for(agent: &AgentName) -> anyhow::Result<PathBuf> {
Ok(match agent.as_str() {
"copilot" => PathBuf::from(".github/copilot-instructions.md"),
"claude-code" => PathBuf::from("CLAUDE.md"),
"codex" => PathBuf::from("AGENTS.md"),
"pi" => PathBuf::from("AGENTS.md"),
"opencode" => PathBuf::from("AGENTS.md"),
"plain" => PathBuf::from("system/AGENTS.md"),
other => anyhow::bail!("unknown target agent: {}", other),
})
}
pub fn render_for_agent(
_root: &Path,
item: &ExpandedItem,
_schema_agent: &AgentName,
target_agent: &AgentName,
) -> anyhow::Result<RenderedTarget> {
let content = match &item.kind {
ExpandedKind::System(s) => s.body.clone(),
ExpandedKind::Skill(_) | ExpandedKind::Agent(_) => {
let writer = agent::frontmatter_writer(target_agent)
.ok_or_else(|| anyhow::anyhow!("unknown target agent: {}", target_agent))?;
let (fm, body) = match &item.kind {
ExpandedKind::Skill(s) => (&s.front_matter, &s.body),
ExpandedKind::Agent(a) => (&a.front_matter, &a.body),
ExpandedKind::System(_) => unreachable!("handled above"),
};
let yaml = writer.format_frontmatter(fm);
if yaml.is_empty() {
body.clone()
} else {
join_frontmatter(&yaml, body)
}
}
};
let target_path = target_path_for(target_agent, &item.kind)?;
let content_hash = hash_content(&content);
Ok(RenderedTarget {
rule_id: item.rule_id.clone(),
agent: target_agent.clone(),
source: item.source.clone(),
source_hash: item.source_hash.clone(),
target_path,
content,
content_hash,
})
}
fn find_lock_entry<'a>(
lock: &'a Lock,
rule_id: &str,
agent: &AgentName,
target_path: &Path,
) -> Option<&'a Deployment> {
lock.deployments
.iter()
.find(|d| d.rule_id == rule_id && d.agent == *agent && d.content == target_path)
}
fn find_reparent_candidate<'a>(
lock: &'a Lock,
agent: &AgentName,
target_path: &Path,
) -> Option<&'a Deployment> {
lock.deployments
.iter()
.find(|d| d.agent == *agent && d.content == target_path)
}
struct ExpandedRule {
rule_index: usize,
items: Vec<ExpandedItem>,
}
struct ExpansionOutcome {
expanded_rules: Vec<ExpandedRule>,
skipped_fetches: Vec<SkippedGithubFetch>,
}
pub fn compute_plan(
root: &Path,
spec: &Spec,
lock: &Lock,
force: bool,
inject_builtins: bool,
) -> anyhow::Result<Plan> {
Ok(compute_plan_with_fetch_failures(root, spec, lock, force, inject_builtins, false)?.plan)
}
pub fn compute_plan_with_fetch_failures(
root: &Path,
spec: &Spec,
lock: &Lock,
force: bool,
inject_builtins: bool,
ignore_fetch_failures: bool,
) -> anyhow::Result<PlanReport> {
let expansion = expand_rules(root, spec, ignore_fetch_failures)?;
let plan = compute_plan_from_expanded_rules(
root,
spec,
lock,
force,
inject_builtins,
&expansion.expanded_rules,
)?;
Ok(PlanReport {
plan,
skipped_fetches: expansion.skipped_fetches,
})
}
fn compute_plan_from_expanded_rules(
root: &Path,
spec: &Spec,
lock: &Lock,
force: bool,
inject_builtins: bool,
expanded_rules: &[ExpandedRule],
) -> anyhow::Result<Plan> {
let default_policy = spec.defaults.on_target_modified;
let mut items = Vec::new();
let candidates = collect_candidates_from_expanded_rules(root, spec, expanded_rules)?;
let owners = resolve_owners(spec, &candidates, lock)?;
let contested_owners: Vec<Ownership> = candidates
.iter()
.filter(|(_, cs)| cs.len() > 1)
.filter_map(|(path, _)| {
owners.get(path).map(|rule_id| Ownership {
path: path.clone(),
rule_id: rule_id.clone(),
})
})
.collect();
let mut seen: HashSet<(String, String, PathBuf)> = HashSet::new();
let mut produced: HashSet<PathBuf> = HashSet::new();
for expanded_rule in expanded_rules {
let rule = &spec.rules[expanded_rule.rule_index];
let policy = effective_policy(rule.on_target_modified, default_policy);
for exp_item in &expanded_rule.items {
let source_file = match &exp_item.kind {
ExpandedKind::Skill(s) => &s.file,
ExpandedKind::Agent(a) => &a.file,
ExpandedKind::System(s) => &s.file,
};
if owners.get(source_file).map(String::as_str) != Some(rule.id.as_str()) {
continue;
}
for target_agent in &spec.agents {
let rendered = render_for_agent(root, exp_item, &rule.schema_agent, target_agent)?;
let abs_target = root.join(&rendered.target_path);
if source_file == &abs_target {
continue;
}
if owners.get(&abs_target).map(String::as_str) != Some(rule.id.as_str()) {
continue;
}
seen.insert((
rendered.rule_id.clone(),
rendered.agent.0.clone(),
rendered.target_path.clone(),
));
produced.insert(abs_target.clone());
let disk_content = fs::read_to_string(&abs_target).ok();
let disk_hash = disk_content.as_deref().map(hash_content);
let direct = find_lock_entry(
lock,
&rendered.rule_id,
&rendered.agent,
&rendered.target_path,
);
let adopted: Option<Deployment> = match (direct, disk_hash.as_deref()) {
(None, Some(dh)) => {
find_reparent_candidate(lock, &rendered.agent, &rendered.target_path)
.filter(|cand| cand.content_hash == dh)
.map(|cand| Deployment {
rule_id: rendered.rule_id.clone(),
agent: cand.agent.clone(),
source: cand.source.clone(),
source_hash: cand.source_hash.clone(),
content: cand.content.clone(),
content_hash: cand.content_hash.clone(),
})
}
_ => None,
};
let lock_entry = direct.or(adopted.as_ref());
let status = compute_status(
lock_entry,
&rendered,
disk_hash.as_deref(),
disk_content.is_some(),
policy,
force,
);
items.push(PlannedDeployment {
rule_id: rendered.rule_id,
agent: rendered.agent,
source: rendered.source,
source_hash: rendered.source_hash,
target_path: rendered.target_path,
rendered_content: rendered.content,
status,
});
}
}
}
for dep in &lock.deployments {
let key = (
dep.rule_id.clone(),
dep.agent.0.clone(),
dep.content.clone(),
);
if seen.contains(&key) {
continue;
}
let abs = root.join(&dep.content);
if produced.contains(&abs) || owners.contains_key(&abs) {
continue;
}
items.push(PlannedDeployment {
rule_id: dep.rule_id.clone(),
agent: dep.agent.clone(),
source: dep.source.clone(),
source_hash: dep.source_hash.clone(),
target_path: dep.content.clone(),
rendered_content: String::new(),
status: DeploymentStatus::Orphan,
});
}
if inject_builtins {
let user_source_dirs: Vec<PathBuf> = spec
.rules
.iter()
.filter_map(|r| match &r.source {
Source::Local(p) => Some(root.join(p)),
_ => None,
})
.filter(|p| p.is_dir())
.collect();
for rendered in builtin::builtin_rendered_targets(root, &spec.agents) {
let abs_target = root.join(&rendered.target_path);
if produced.contains(&abs_target) {
continue;
}
if user_source_dirs
.iter()
.any(|dir| abs_target.starts_with(dir))
{
continue;
}
let disk_content = fs::read_to_string(&abs_target).ok();
let status = match disk_content {
Some(dc) if hash_content(&dc) == hash_content(&rendered.content) => {
DeploymentStatus::UpToDate
}
Some(_) => DeploymentStatus::Update,
None => DeploymentStatus::Create,
};
produced.insert(abs_target.clone());
items.push(PlannedDeployment {
rule_id: rendered.rule_id,
agent: rendered.agent,
source: rendered.source,
source_hash: rendered.source_hash,
target_path: rendered.target_path,
rendered_content: rendered.content,
status,
});
}
}
Ok(Plan {
items,
owners: contested_owners,
})
}
fn expand_rules(
root: &Path,
spec: &Spec,
ignore_fetch_failures: bool,
) -> anyhow::Result<ExpansionOutcome> {
let mut expanded_rules = Vec::new();
let mut skipped_fetches = Vec::new();
for (rule_index, rule) in spec.rules.iter().enumerate() {
match expand_rule(root, rule) {
Ok(items) => expanded_rules.push(ExpandedRule { rule_index, items }),
Err(err) => {
if let Some(skipped) = skipped_github_fetch(rule, &err, ignore_fetch_failures) {
skipped_fetches.push(skipped);
continue;
}
return Err(err);
}
}
}
Ok(ExpansionOutcome {
expanded_rules,
skipped_fetches,
})
}
fn skipped_github_fetch(
rule: &Rule,
err: &anyhow::Error,
ignore_fetch_failures: bool,
) -> Option<SkippedGithubFetch> {
if !ignore_fetch_failures {
return None;
}
let fetch_err = err.downcast_ref::<GithubFetchError>()?;
if !fetch_err.is_ignorable_fetch_failure() {
return None;
}
let source = match &rule.source {
Source::Github(g) => describe_github_source(g),
Source::Local(path) => path.display().to_string(),
};
Some(SkippedGithubFetch {
rule_id: rule.id.clone(),
source,
message: fetch_err.to_string(),
})
}
#[derive(Debug, Clone, PartialEq, Eq)]
pub struct AmbiguousPath {
pub path: PathBuf,
pub candidates: Vec<String>,
}
enum Resolution {
Owner(String),
Ambiguous(AmbiguousPath),
}
fn resolve_one(
path: &Path,
claimants: &HashSet<String>,
rule_kinds: &HashMap<&str, &RuleKind>,
lock_owners: &HashMap<&Path, &str>,
) -> anyhow::Result<Resolution> {
if claimants.len() == 1 {
return Ok(Resolution::Owner(claimants.iter().next().unwrap().clone()));
}
if let Some(&locked) = lock_owners.get(path) {
if claimants.contains(locked) {
return Ok(Resolution::Owner(locked.to_string()));
}
}
let singles: Vec<&String> = claimants
.iter()
.filter(|r| {
matches!(
rule_kinds.get(r.as_str()),
Some(RuleKind::Skill { .. })
| Some(RuleKind::Agent { .. })
| Some(RuleKind::System)
)
})
.collect();
match singles.len() {
0 => {}
1 => return Ok(Resolution::Owner(singles[0].clone())),
_ => {
let mut names: Vec<&str> = singles.iter().map(|s| s.as_str()).collect();
names.sort();
anyhow::bail!(
"ambiguous ownership for {}: single-file rules {:?} both claim this path",
path.display(),
names
);
}
}
let mut names: Vec<String> = claimants.iter().cloned().collect();
names.sort();
Ok(Resolution::Ambiguous(AmbiguousPath {
path: path.to_path_buf(),
candidates: names,
}))
}
pub fn find_ambiguities(
root: &Path,
spec: &Spec,
lock: &Lock,
) -> anyhow::Result<Vec<AmbiguousPath>> {
Ok(find_ambiguities_with_fetch_failures(root, spec, lock, false)?.ambiguities)
}
pub fn find_ambiguities_with_fetch_failures(
root: &Path,
spec: &Spec,
lock: &Lock,
ignore_fetch_failures: bool,
) -> anyhow::Result<AmbiguityReport> {
let expansion = expand_rules(root, spec, ignore_fetch_failures)?;
let candidates = collect_candidates_from_expanded_rules(root, spec, &expansion.expanded_rules)?;
let rule_kinds: HashMap<&str, &RuleKind> = spec
.rules
.iter()
.map(|r| (r.id.as_str(), &r.kind))
.collect();
let lock_owners: HashMap<&Path, &str> = lock
.owners
.iter()
.map(|o| (o.path.as_path(), o.rule_id.as_str()))
.collect();
let mut out = Vec::new();
for (path, claimants) in &candidates {
if let Resolution::Ambiguous(a) = resolve_one(path, claimants, &rule_kinds, &lock_owners)? {
out.push(a);
}
}
out.sort_by(|a, b| a.path.cmp(&b.path));
Ok(AmbiguityReport {
ambiguities: out,
skipped_fetches: expansion.skipped_fetches,
})
}
fn collect_candidates_from_expanded_rules(
root: &Path,
spec: &Spec,
expanded_rules: &[ExpandedRule],
) -> anyhow::Result<HashMap<PathBuf, HashSet<String>>> {
let mut candidates: HashMap<PathBuf, HashSet<String>> = HashMap::new();
for expanded_rule in expanded_rules {
let rule = &spec.rules[expanded_rule.rule_index];
for item in &expanded_rule.items {
let source_file = match &item.kind {
ExpandedKind::Skill(s) => s.file.clone(),
ExpandedKind::Agent(a) => a.file.clone(),
ExpandedKind::System(s) => s.file.clone(),
};
candidates
.entry(source_file)
.or_default()
.insert(rule.id.clone());
for target_agent in &spec.agents {
let tp = root.join(target_path_for(target_agent, &item.kind)?);
candidates.entry(tp).or_default().insert(rule.id.clone());
}
}
}
Ok(candidates)
}
fn resolve_owners(
spec: &Spec,
candidates: &HashMap<PathBuf, HashSet<String>>,
lock: &Lock,
) -> anyhow::Result<HashMap<PathBuf, String>> {
let rule_kinds: HashMap<&str, &RuleKind> = spec
.rules
.iter()
.map(|r| (r.id.as_str(), &r.kind))
.collect();
let lock_owners: HashMap<&Path, &str> = lock
.owners
.iter()
.map(|o| (o.path.as_path(), o.rule_id.as_str()))
.collect();
let mut resolved: HashMap<PathBuf, String> = HashMap::new();
for (path, claimants) in candidates {
match resolve_one(path, claimants, &rule_kinds, &lock_owners)? {
Resolution::Owner(id) => {
resolved.insert(path.clone(), id);
}
Resolution::Ambiguous(a) => {
anyhow::bail!(
"ambiguous ownership for {}: rules {:?} all claim this path. \
Record a decision in .rtango/lock.yaml under `owners:` or narrow the spec.",
a.path.display(),
a.candidates
);
}
}
}
Ok(resolved)
}
fn compute_status(
lock_entry: Option<&Deployment>,
rendered: &RenderedTarget,
disk_hash: Option<&str>,
disk_exists: bool,
policy: OnTargetModified,
force: bool,
) -> DeploymentStatus {
match lock_entry {
None => {
if !disk_exists {
DeploymentStatus::Create
} else if force {
DeploymentStatus::Update
} else {
DeploymentStatus::Conflict {
reason: "target file exists but is not tracked in lock".into(),
}
}
}
Some(dep) => {
if dep.source_hash == rendered.source_hash {
match disk_hash {
Some(dh) if dh == dep.content_hash => DeploymentStatus::UpToDate,
Some(_) => {
apply_policy(policy, force, "target was modified externally")
}
None => {
DeploymentStatus::Create
}
}
} else {
let target_modified = match disk_hash {
Some(dh) => dh != dep.content_hash,
None => false, };
if target_modified {
if force {
DeploymentStatus::Update
} else {
apply_policy(policy, false, "both source and target were modified")
}
} else {
DeploymentStatus::Update
}
}
}
}
}
fn apply_policy(policy: OnTargetModified, force: bool, reason: &str) -> DeploymentStatus {
if force {
return DeploymentStatus::Update;
}
match policy {
OnTargetModified::Fail => DeploymentStatus::Conflict {
reason: reason.to_string(),
},
OnTargetModified::Overwrite => DeploymentStatus::Update,
OnTargetModified::Skip => DeploymentStatus::UpToDate,
}
}
#[cfg(test)]
mod tests {
use super::*;
use std::fs;
use std::path::{Path, PathBuf};
use tempfile::TempDir;
fn setup_copilot_skill(root: &Path, name: &str, body: &str) {
let dir = root.join(format!(".github/skills/{name}"));
fs::create_dir_all(&dir).unwrap();
fs::write(dir.join("SKILL.md"), body).unwrap();
}
fn empty_lock() -> Lock {
Lock {
version: 1,
tracked_agents: vec![],
owners: vec![],
deployments: vec![],
}
}
#[test]
fn skipped_rules_do_not_contribute_to_plan_items() {
let tmp = TempDir::new().unwrap();
let root = tmp.path();
setup_copilot_skill(root, "local", "Local body");
let local_rule = Rule {
id: "local".into(),
source: Source::Local(PathBuf::from(".github/skills")),
schema_agent: AgentName::new("copilot"),
on_target_modified: None,
kind: RuleKind::skill_set(),
};
let skipped_rule = Rule {
id: "remote".into(),
source: Source::Github(crate::spec::GithubSource {
github: "owner/repo".into(),
r#ref: "main".into(),
path: "skills".into(),
}),
schema_agent: AgentName::new("copilot"),
on_target_modified: None,
kind: RuleKind::skill_set(),
};
let local_items = expand_rule(root, &local_rule).unwrap();
let spec = Spec {
version: 1,
agents: vec![AgentName::new("claude-code")],
defaults: crate::spec::Defaults::default(),
rules: vec![local_rule, skipped_rule],
};
let expanded_rules = vec![ExpandedRule {
rule_index: 0,
items: local_items,
}];
let plan = compute_plan_from_expanded_rules(
root,
&spec,
&empty_lock(),
false,
false,
&expanded_rules,
)
.unwrap();
assert_eq!(plan.items.len(), 1);
assert_eq!(plan.items[0].rule_id, "local");
assert_eq!(plan.items[0].status, DeploymentStatus::Create);
}
}