use std::collections::{HashMap, HashSet};
use std::fs;
use std::path::{Path, PathBuf};
use crate::agent::{self, frontmatter::join_frontmatter};
use crate::spec::{AgentName, Deployment, Lock, OnTargetModified, Ownership, RuleKind, Spec};
use super::{
DeploymentStatus, ExpandedItem, ExpandedKind, Plan, PlannedDeployment, RenderedTarget, builtin,
effective_policy, expand_rule, hash_content,
};
fn target_path_for(agent: &AgentName, kind: &ExpandedKind) -> anyhow::Result<PathBuf> {
if let ExpandedKind::System(_) = kind {
return system_file_path_for(agent);
}
let dir = match agent.as_str() {
"copilot" => ".github",
"claude-code" => ".claude",
"codex" => ".codex",
"pi" => ".pi",
"opencode" => ".opencode",
"plain" => "",
other => anyhow::bail!("unknown target agent: {}", other),
};
let prefix = if dir.is_empty() {
String::new()
} else {
format!("{dir}/")
};
match kind {
ExpandedKind::Skill(s) => Ok(PathBuf::from(format!("{prefix}skills/{}/SKILL.md", s.name))),
ExpandedKind::Agent(a) => Ok(PathBuf::from(format!("{prefix}agents/{}.agent.md", a.name))),
ExpandedKind::System(_) => unreachable!("handled above"),
}
}
fn system_file_path_for(agent: &AgentName) -> anyhow::Result<PathBuf> {
Ok(match agent.as_str() {
"copilot" => PathBuf::from(".github/copilot-instructions.md"),
"claude-code" => PathBuf::from("CLAUDE.md"),
"codex" => PathBuf::from("AGENTS.md"),
"pi" => PathBuf::from("AGENTS.md"),
"opencode" => PathBuf::from("AGENTS.md"),
"plain" => PathBuf::from("system/AGENTS.md"),
other => anyhow::bail!("unknown target agent: {}", other),
})
}
pub fn render_for_agent(
_root: &Path,
item: &ExpandedItem,
_schema_agent: &AgentName,
target_agent: &AgentName,
) -> anyhow::Result<RenderedTarget> {
let content = match &item.kind {
ExpandedKind::System(s) => s.body.clone(),
ExpandedKind::Skill(_) | ExpandedKind::Agent(_) => {
let writer = agent::frontmatter_writer(target_agent)
.ok_or_else(|| anyhow::anyhow!("unknown target agent: {}", target_agent))?;
let (fm, body) = match &item.kind {
ExpandedKind::Skill(s) => (&s.front_matter, &s.body),
ExpandedKind::Agent(a) => (&a.front_matter, &a.body),
ExpandedKind::System(_) => unreachable!("handled above"),
};
let yaml = writer.format_frontmatter(fm);
if yaml.is_empty() {
body.clone()
} else {
join_frontmatter(&yaml, body)
}
}
};
let target_path = target_path_for(target_agent, &item.kind)?;
let content_hash = hash_content(&content);
Ok(RenderedTarget {
rule_id: item.rule_id.clone(),
agent: target_agent.clone(),
source: item.source.clone(),
source_hash: item.source_hash.clone(),
target_path,
content,
content_hash,
})
}
fn find_lock_entry<'a>(
lock: &'a Lock,
rule_id: &str,
agent: &AgentName,
target_path: &Path,
) -> Option<&'a Deployment> {
lock.deployments
.iter()
.find(|d| d.rule_id == rule_id && d.agent == *agent && d.content == target_path)
}
fn find_reparent_candidate<'a>(
lock: &'a Lock,
agent: &AgentName,
target_path: &Path,
) -> Option<&'a Deployment> {
lock.deployments
.iter()
.find(|d| d.agent == *agent && d.content == target_path)
}
pub fn compute_plan(
root: &Path,
spec: &Spec,
lock: &Lock,
force: bool,
inject_builtins: bool,
) -> anyhow::Result<Plan> {
let default_policy = spec.defaults.on_target_modified;
let mut items = Vec::new();
let expansions: Vec<Vec<ExpandedItem>> = spec
.rules
.iter()
.map(|r| expand_rule(root, r))
.collect::<anyhow::Result<_>>()?;
let candidates = collect_candidates(root, spec)?;
let owners = resolve_owners(spec, &candidates, lock)?;
let contested_owners: Vec<Ownership> = candidates
.iter()
.filter(|(_, cs)| cs.len() > 1)
.filter_map(|(path, _)| {
owners.get(path).map(|rule_id| Ownership {
path: path.clone(),
rule_id: rule_id.clone(),
})
})
.collect();
let mut seen: HashSet<(String, String, PathBuf)> = HashSet::new();
let mut produced: HashSet<PathBuf> = HashSet::new();
for (rule, expanded) in spec.rules.iter().zip(expansions.iter()) {
let policy = effective_policy(rule.on_target_modified, default_policy);
for exp_item in expanded {
let source_file = match &exp_item.kind {
ExpandedKind::Skill(s) => &s.file,
ExpandedKind::Agent(a) => &a.file,
ExpandedKind::System(s) => &s.file,
};
if owners.get(source_file).map(String::as_str) != Some(rule.id.as_str()) {
continue;
}
for target_agent in &spec.agents {
let rendered = render_for_agent(root, exp_item, &rule.schema_agent, target_agent)?;
let abs_target = root.join(&rendered.target_path);
if source_file == &abs_target {
continue;
}
if owners.get(&abs_target).map(String::as_str) != Some(rule.id.as_str()) {
continue;
}
seen.insert((
rendered.rule_id.clone(),
rendered.agent.0.clone(),
rendered.target_path.clone(),
));
produced.insert(abs_target.clone());
let disk_content = fs::read_to_string(&abs_target).ok();
let disk_hash = disk_content.as_deref().map(hash_content);
let direct = find_lock_entry(
lock,
&rendered.rule_id,
&rendered.agent,
&rendered.target_path,
);
let adopted: Option<Deployment> = match (direct, disk_hash.as_deref()) {
(None, Some(dh)) => {
find_reparent_candidate(lock, &rendered.agent, &rendered.target_path)
.filter(|cand| cand.content_hash == dh)
.map(|cand| Deployment {
rule_id: rendered.rule_id.clone(),
agent: cand.agent.clone(),
source: cand.source.clone(),
source_hash: cand.source_hash.clone(),
content: cand.content.clone(),
content_hash: cand.content_hash.clone(),
})
}
_ => None,
};
let lock_entry = direct.or(adopted.as_ref());
let status = compute_status(
lock_entry,
&rendered,
disk_hash.as_deref(),
disk_content.is_some(),
policy,
force,
);
items.push(PlannedDeployment {
rule_id: rendered.rule_id,
agent: rendered.agent,
source: rendered.source,
source_hash: rendered.source_hash,
target_path: rendered.target_path,
rendered_content: rendered.content,
status,
});
}
}
}
for dep in &lock.deployments {
let key = (
dep.rule_id.clone(),
dep.agent.0.clone(),
dep.content.clone(),
);
if seen.contains(&key) {
continue;
}
let abs = root.join(&dep.content);
if produced.contains(&abs) || owners.contains_key(&abs) {
continue;
}
items.push(PlannedDeployment {
rule_id: dep.rule_id.clone(),
agent: dep.agent.clone(),
source: dep.source.clone(),
source_hash: dep.source_hash.clone(),
target_path: dep.content.clone(),
rendered_content: String::new(),
status: DeploymentStatus::Orphan,
});
}
if inject_builtins {
let user_source_dirs: Vec<PathBuf> = spec
.rules
.iter()
.filter_map(|r| match &r.source {
crate::spec::Source::Local(p) => Some(root.join(p)),
_ => None,
})
.filter(|p| p.is_dir())
.collect();
for rendered in builtin::builtin_rendered_targets(root, &spec.agents) {
let abs_target = root.join(&rendered.target_path);
if produced.contains(&abs_target) {
continue;
}
if user_source_dirs
.iter()
.any(|dir| abs_target.starts_with(dir))
{
continue;
}
let disk_content = fs::read_to_string(&abs_target).ok();
let status = match disk_content {
Some(dc) if hash_content(&dc) == hash_content(&rendered.content) => {
DeploymentStatus::UpToDate
}
Some(_) => DeploymentStatus::Update,
None => DeploymentStatus::Create,
};
produced.insert(abs_target.clone());
items.push(PlannedDeployment {
rule_id: rendered.rule_id,
agent: rendered.agent,
source: rendered.source,
source_hash: rendered.source_hash,
target_path: rendered.target_path,
rendered_content: rendered.content,
status,
});
}
}
Ok(Plan {
items,
owners: contested_owners,
})
}
#[derive(Debug, Clone, PartialEq, Eq)]
pub struct AmbiguousPath {
pub path: PathBuf,
pub candidates: Vec<String>,
}
enum Resolution {
Owner(String),
Ambiguous(AmbiguousPath),
}
fn resolve_one(
path: &Path,
claimants: &HashSet<String>,
rule_kinds: &HashMap<&str, &RuleKind>,
lock_owners: &HashMap<&Path, &str>,
) -> anyhow::Result<Resolution> {
if claimants.len() == 1 {
return Ok(Resolution::Owner(claimants.iter().next().unwrap().clone()));
}
if let Some(&locked) = lock_owners.get(path) {
if claimants.contains(locked) {
return Ok(Resolution::Owner(locked.to_string()));
}
}
let singles: Vec<&String> = claimants
.iter()
.filter(|r| {
matches!(
rule_kinds.get(r.as_str()),
Some(RuleKind::Skill { .. })
| Some(RuleKind::Agent { .. })
| Some(RuleKind::System)
)
})
.collect();
match singles.len() {
0 => {}
1 => return Ok(Resolution::Owner(singles[0].clone())),
_ => {
let mut names: Vec<&str> = singles.iter().map(|s| s.as_str()).collect();
names.sort();
anyhow::bail!(
"ambiguous ownership for {}: single-file rules {:?} both claim this path",
path.display(),
names
);
}
}
let mut names: Vec<String> = claimants.iter().cloned().collect();
names.sort();
Ok(Resolution::Ambiguous(AmbiguousPath {
path: path.to_path_buf(),
candidates: names,
}))
}
pub fn find_ambiguities(
root: &Path,
spec: &Spec,
lock: &Lock,
) -> anyhow::Result<Vec<AmbiguousPath>> {
let candidates = collect_candidates(root, spec)?;
let rule_kinds: HashMap<&str, &RuleKind> = spec
.rules
.iter()
.map(|r| (r.id.as_str(), &r.kind))
.collect();
let lock_owners: HashMap<&Path, &str> = lock
.owners
.iter()
.map(|o| (o.path.as_path(), o.rule_id.as_str()))
.collect();
let mut out = Vec::new();
for (path, claimants) in &candidates {
if let Resolution::Ambiguous(a) = resolve_one(path, claimants, &rule_kinds, &lock_owners)? {
out.push(a);
}
}
out.sort_by(|a, b| a.path.cmp(&b.path));
Ok(out)
}
fn collect_candidates(
root: &Path,
spec: &Spec,
) -> anyhow::Result<HashMap<PathBuf, HashSet<String>>> {
let expansions: Vec<Vec<ExpandedItem>> = spec
.rules
.iter()
.map(|r| expand_rule(root, r))
.collect::<anyhow::Result<_>>()?;
let mut candidates: HashMap<PathBuf, HashSet<String>> = HashMap::new();
for (rule, expanded) in spec.rules.iter().zip(expansions.iter()) {
for item in expanded {
let source_file = match &item.kind {
ExpandedKind::Skill(s) => s.file.clone(),
ExpandedKind::Agent(a) => a.file.clone(),
ExpandedKind::System(s) => s.file.clone(),
};
candidates
.entry(source_file)
.or_default()
.insert(rule.id.clone());
for target_agent in &spec.agents {
let tp = root.join(target_path_for(target_agent, &item.kind)?);
candidates.entry(tp).or_default().insert(rule.id.clone());
}
}
}
Ok(candidates)
}
fn resolve_owners(
spec: &Spec,
candidates: &HashMap<PathBuf, HashSet<String>>,
lock: &Lock,
) -> anyhow::Result<HashMap<PathBuf, String>> {
let rule_kinds: HashMap<&str, &RuleKind> = spec
.rules
.iter()
.map(|r| (r.id.as_str(), &r.kind))
.collect();
let lock_owners: HashMap<&Path, &str> = lock
.owners
.iter()
.map(|o| (o.path.as_path(), o.rule_id.as_str()))
.collect();
let mut resolved: HashMap<PathBuf, String> = HashMap::new();
for (path, claimants) in candidates {
match resolve_one(path, claimants, &rule_kinds, &lock_owners)? {
Resolution::Owner(id) => {
resolved.insert(path.clone(), id);
}
Resolution::Ambiguous(a) => {
anyhow::bail!(
"ambiguous ownership for {}: rules {:?} all claim this path. \
Record a decision in .rtango/lock.yaml under `owners:` or narrow the spec.",
a.path.display(),
a.candidates
);
}
}
}
Ok(resolved)
}
fn compute_status(
lock_entry: Option<&Deployment>,
rendered: &RenderedTarget,
disk_hash: Option<&str>,
disk_exists: bool,
policy: OnTargetModified,
force: bool,
) -> DeploymentStatus {
match lock_entry {
None => {
if !disk_exists {
DeploymentStatus::Create
} else if force {
DeploymentStatus::Update
} else {
DeploymentStatus::Conflict {
reason: "target file exists but is not tracked in lock".into(),
}
}
}
Some(dep) => {
if dep.source_hash == rendered.source_hash {
match disk_hash {
Some(dh) if dh == dep.content_hash => DeploymentStatus::UpToDate,
Some(_) => {
apply_policy(policy, force, "target was modified externally")
}
None => {
DeploymentStatus::Create
}
}
} else {
let target_modified = match disk_hash {
Some(dh) => dh != dep.content_hash,
None => false, };
if target_modified {
if force {
DeploymentStatus::Update
} else {
apply_policy(policy, false, "both source and target were modified")
}
} else {
DeploymentStatus::Update
}
}
}
}
}
fn apply_policy(policy: OnTargetModified, force: bool, reason: &str) -> DeploymentStatus {
if force {
return DeploymentStatus::Update;
}
match policy {
OnTargetModified::Fail => DeploymentStatus::Conflict {
reason: reason.to_string(),
},
OnTargetModified::Overwrite => DeploymentStatus::Update,
OnTargetModified::Skip => DeploymentStatus::UpToDate,
}
}