use std::collections::HashSet;
use std::fs;
use std::io::{self, BufRead, IsTerminal, Write};
use std::path::{Path, PathBuf};
use anyhow::{Context, Result, bail};
use dialoguer::Select;
use rayon::prelude::*;
use super::{
ManagedCollision, ManagedCollisionChoice, ManagedCollisionResolver, ManagedCollisionSource,
PlannedFileWrite, Resolution, ResolvedManagedPathOrigin, SyncExecutionPlan, SyncMode,
SyncSummary, TtyManagedCollisionResolver, UnmanagedCollision,
};
use crate::adapters::ManagedFile;
use crate::execution::{ExecutionMode, PreviewChange};
use crate::lockfile::Lockfile;
use crate::manifest::{LoadedManifest, load_dependency_from_dir};
use crate::paths::{display_path, strip_path_prefix};
use crate::report::Reporter;
use crate::selection::interactive_select_theme;
use crate::store::write_atomic;
#[allow(clippy::too_many_arguments)]
pub(super) fn build_sync_execution_plan(
original_root: &LoadedManifest,
working_root: &LoadedManifest,
lockfile_path: &Path,
lockfile: &Lockfile,
runtime_root: &Path,
owned_paths: &HashSet<PathBuf>,
desired_paths: &HashSet<PathBuf>,
planned_files: &[ManagedFile],
warnings: Vec<String>,
summary: SyncSummary,
sync_mode: SyncMode,
) -> Result<SyncExecutionPlan> {
let manifest_write = planned_manifest_write(original_root, working_root)?;
let mut removals = planned_stale_paths(owned_paths, desired_paths);
removals.extend(planned_paths_to_replace(
planned_files,
owned_paths,
desired_paths,
&working_root.root,
)?);
removals.sort();
removals.dedup();
let lockfile_write = if sync_mode.checks_lockfile() {
None
} else {
Some(planned_lockfile_write(lockfile_path, lockfile)?)
};
Ok(SyncExecutionPlan {
runtime_root: runtime_root.to_path_buf(),
manifest_write,
removals,
managed_writes: planned_files.to_vec(),
lockfile_write,
warnings,
summary,
})
}
pub(super) fn execute_sync_plan(
plan: &SyncExecutionPlan,
execution_mode: ExecutionMode,
reporter: &Reporter,
) -> Result<()> {
if execution_mode.is_dry_run() {
if let Some(write) = &plan.manifest_write {
reporter.preview(&planned_write_preview_change(write))?;
}
for path in &plan.removals {
reporter.preview(&PreviewChange::Remove(path.clone()))?;
}
if !plan.managed_writes.is_empty() {
reporter.status("Preview", "managed runtime outputs")?;
for file in &plan.managed_writes {
let change = if file.path.exists() {
PreviewChange::Write(file.path.clone())
} else {
PreviewChange::Create(file.path.clone())
};
reporter.preview(&change)?;
}
}
if let Some(write) = &plan.lockfile_write {
reporter.preview(&planned_write_preview_change(write))?;
}
} else {
if let Some(write) = &plan.manifest_write {
reporter.status("Writing", write.path.display())?;
write_atomic(&write.path, &write.contents)?;
}
for path in &plan.removals {
reporter.status("Removing", path.display())?;
remove_path_and_empty_parents(path, &plan.runtime_root)?;
}
reporter.status("Writing", "managed runtime outputs")?;
write_managed_files(&plan.managed_writes)?;
if let Some(write) = &plan.lockfile_write {
reporter.status("Writing", write.path.display())?;
write_atomic(&write.path, &write.contents)?;
}
}
for warning in &plan.warnings {
reporter.warning(warning)?;
}
Ok(())
}
fn planned_manifest_write(
original_root: &LoadedManifest,
working_root: &LoadedManifest,
) -> Result<Option<PlannedFileWrite>> {
let Some(path) = &working_root.manifest_path else {
return Ok(None);
};
let contents = working_root
.read_package_file(path)
.with_context(|| format!("failed to read manifest {}", path.display()))?;
let current = if path.exists() {
Some(
std::fs::read(path)
.with_context(|| format!("failed to read manifest {}", path.display()))?,
)
} else {
None
};
if original_root.manifest_path.as_deref() == Some(path)
&& current
.as_ref()
.is_some_and(|existing| *existing == contents)
{
Ok(None)
} else {
Ok(Some(PlannedFileWrite {
path: path.clone(),
contents,
create: !path.exists(),
}))
}
}
fn planned_lockfile_write(path: &Path, lockfile: &Lockfile) -> Result<PlannedFileWrite> {
let contents = toml::to_string_pretty(lockfile)
.context("failed to serialize lockfile")?
.into_bytes();
Ok(PlannedFileWrite {
path: path.to_path_buf(),
create: !path.exists(),
contents,
})
}
fn planned_stale_paths(
owned_paths: &HashSet<PathBuf>,
desired_paths: &HashSet<PathBuf>,
) -> Vec<PathBuf> {
let mut removals = owned_paths
.difference(desired_paths)
.filter(|path| fs::symlink_metadata(path).is_ok())
.cloned()
.collect::<Vec<_>>();
removals.sort();
removals
}
fn planned_paths_to_replace(
planned_files: &[ManagedFile],
owned_paths: &HashSet<PathBuf>,
desired_paths: &HashSet<PathBuf>,
project_root: &Path,
) -> Result<Vec<PathBuf>> {
let mut removed = HashSet::new();
for file in planned_files {
if file.path.is_dir()
&& path_is_owned(&file.path, owned_paths)
&& removed.insert(file.path.clone())
{
continue;
}
let mut current = file.path.parent();
while let Some(parent) = current {
if parent == project_root {
break;
}
if path_is_owned(parent, owned_paths) {
if parent.is_file() && removed.insert(parent.to_path_buf()) {
break;
}
if parent.is_dir()
&& !desired_paths
.iter()
.any(|desired| desired != parent && desired.starts_with(parent))
&& removed.insert(parent.to_path_buf())
{
break;
}
}
current = parent.parent();
}
}
let mut removals = removed.into_iter().collect::<Vec<_>>();
removals.sort();
Ok(removals)
}
fn planned_write_preview_change(write: &PlannedFileWrite) -> PreviewChange {
if write.create {
PreviewChange::Create(write.path.clone())
} else {
PreviewChange::Write(write.path.clone())
}
}
pub(super) fn enforce_capabilities(
resolution: &Resolution,
allow_high_sensitivity: bool,
reporter: &Reporter,
) -> Result<()> {
let mut high_sensitivity = Vec::new();
for package in &resolution.packages {
for capability in &package.manifest.manifest.capabilities {
reporter.note(format!(
"capability {} {} ({})",
package.alias, capability.id, capability.sensitivity
))?;
if let Some(justification) = &capability.justification {
reporter.note(format!("justification: {justification}"))?;
}
if capability.sensitivity.eq_ignore_ascii_case("high") {
high_sensitivity.push(format!("{}:{}", package.alias, capability.id));
}
}
}
if !high_sensitivity.is_empty() && !allow_high_sensitivity {
high_sensitivity.sort();
bail!(
"high-sensitivity capabilities require --allow-high-sensitivity: {}",
high_sensitivity.join(", ")
);
}
Ok(())
}
pub(super) fn find_unmanaged_collision(
planned_files: &[ManagedFile],
owned_paths: &HashSet<PathBuf>,
project_root: &Path,
) -> Option<UnmanagedCollision> {
for file in planned_files {
if file.path.exists()
&& !path_is_owned(&file.path, owned_paths)
&& !allows_managed_merge(project_root, &file.path)
{
return Some(UnmanagedCollision {
path: file.path.clone(),
});
}
let mut current = file.path.parent();
while let Some(parent) = current {
if parent == project_root {
break;
}
if parent.exists() && parent.is_file() && !path_is_owned(parent, owned_paths) {
return Some(UnmanagedCollision {
path: parent.to_path_buf(),
});
}
current = parent.parent();
}
}
None
}
fn allows_managed_merge(project_root: &Path, path: &Path) -> bool {
managed_merge_paths(project_root).contains(path)
}
pub(super) fn managed_merge_paths(project_root: &Path) -> HashSet<PathBuf> {
[
project_root.join(".agents/.gitignore"),
project_root.join(".claude/.gitignore"),
project_root.join(".claude/settings.json"),
project_root.join(".claude/settings.local.json"),
project_root.join(".codex/.gitignore"),
project_root.join(".codex/hooks.json"),
project_root.join(".mcp.json"),
project_root.join(".cursor/.gitignore"),
project_root.join("opencode.json"),
project_root.join(".opencode/.gitignore"),
project_root.join(".codex/config.toml"),
]
.into_iter()
.collect()
}
pub(super) fn find_managed_collision(
project_root: &Path,
resolution: &Resolution,
collision: &UnmanagedCollision,
) -> Option<ManagedCollision> {
for package in &resolution.packages {
for managed_path in package.managed_paths() {
let ownership_root = project_root.join(&managed_path.ownership_root);
if collision.path == ownership_root
|| collision.path.starts_with(&ownership_root)
|| ownership_root.starts_with(&collision.path)
{
return Some(ManagedCollision {
alias: package.alias.clone(),
ownership_root: managed_path.ownership_root.clone(),
collision_path: collision.path.clone(),
source: managed_collision_source(managed_path.origin),
});
}
if managed_path.files.iter().any(|file| {
let target = project_root.join(&file.target_relative);
collision.path == target || target.starts_with(&collision.path)
}) {
return Some(ManagedCollision {
alias: package.alias.clone(),
ownership_root: managed_path.ownership_root.clone(),
collision_path: collision.path.clone(),
source: managed_collision_source(managed_path.origin),
});
}
}
}
None
}
pub(super) fn find_runtime_output_collision(
planned_files: &[ManagedFile],
collision: &UnmanagedCollision,
) -> Option<ManagedCollision> {
planned_files
.iter()
.find(|file| {
collision.path == file.path
|| collision.path.starts_with(&file.path)
|| file.path.starts_with(&collision.path)
})
.map(|_| ManagedCollision {
alias: String::new(),
ownership_root: collision.path.clone(),
collision_path: collision.path.clone(),
source: ManagedCollisionSource::RuntimeOutput,
})
}
pub(super) fn unmanaged_collision_guidance(
project_root: &Path,
collision: &ManagedCollision,
sync_mode: SyncMode,
) -> String {
match collision.source {
ManagedCollisionSource::LegacyDependencyMapping => format!(
"refusing to overwrite unmanaged file {}. Managed target {} from dependency `{}` collides with an existing path. Rerun plain `nodus sync` on a TTY to choose whether to adopt that target, remove the managed mapping from `nodus.toml`, or cancel; {} cannot prompt interactively",
display_path(&collision.collision_path),
display_path(&project_root.join(&collision.ownership_root)),
collision.alias,
sync_mode.flag(),
),
ManagedCollisionSource::PackageManagedExport => format!(
"refusing to overwrite unmanaged file {}. Package-owned managed export {} from dependency `{}` collides with an existing path. Rerun plain `nodus sync` on a TTY to choose whether to adopt that target or cancel; {} cannot prompt interactively",
display_path(&collision.collision_path),
display_path(&project_root.join(&collision.ownership_root)),
collision.alias,
sync_mode.flag(),
),
ManagedCollisionSource::RuntimeOutput => format!(
"refusing to overwrite unmanaged file {}. Managed runtime output {} collides with an existing path. Rerun plain `nodus sync` on a TTY to choose whether to adopt that output or cancel; {} cannot prompt interactively",
display_path(&collision.collision_path),
display_path(&collision.collision_path),
sync_mode.flag(),
),
}
}
impl ManagedCollisionResolver for TtyManagedCollisionResolver {
fn resolve(
&mut self,
project_root: &Path,
collision: &ManagedCollision,
) -> Result<ManagedCollisionChoice> {
let stdin = io::stdin();
let mut stdin = stdin.lock();
let stderr = io::stderr();
let mut stderr = stderr.lock();
prompt_for_managed_collision(project_root, collision, &mut stdin, &mut stderr)
}
}
fn prompt_for_managed_collision(
project_root: &Path,
collision: &ManagedCollision,
input: &mut impl BufRead,
output: &mut impl Write,
) -> Result<ManagedCollisionChoice> {
render_managed_collision_notice(project_root, collision, output)?;
if !cfg!(test) && io::stdin().is_terminal() && io::stderr().is_terminal() {
writeln!(
output,
"Use arrow keys to choose how Nodus should continue, then press Enter."
)?;
output.flush()?;
return prompt_for_managed_collision_interactive(collision);
}
writeln!(output, "Choose how to continue:")?;
writeln!(
output,
" 1. adopt (let Nodus take ownership and overwrite managed files under that target)"
)?;
if collision.source == ManagedCollisionSource::LegacyDependencyMapping {
writeln!(
output,
" 2. remove (delete the corresponding managed mapping from nodus.toml and continue)"
)?;
writeln!(output, " 3. cancel")?;
} else {
writeln!(output, " 2. cancel")?;
}
write!(output, "> ")?;
output.flush()?;
let mut line = String::new();
input.read_line(&mut line)?;
parse_managed_collision_choice(&line, collision.source)
}
fn render_managed_collision_notice(
project_root: &Path,
collision: &ManagedCollision,
output: &mut impl Write,
) -> Result<()> {
match collision.source {
ManagedCollisionSource::LegacyDependencyMapping
| ManagedCollisionSource::PackageManagedExport => {
writeln!(
output,
"{} {} from dependency `{}` collides with existing unmanaged path {}.",
match collision.source {
ManagedCollisionSource::LegacyDependencyMapping => "Managed target",
ManagedCollisionSource::PackageManagedExport => "Package-owned managed export",
ManagedCollisionSource::RuntimeOutput => unreachable!(),
},
display_path(&project_root.join(&collision.ownership_root)),
collision.alias,
display_path(&collision.collision_path)
)?;
}
ManagedCollisionSource::RuntimeOutput => {
writeln!(
output,
"Managed runtime output {} collides with existing unmanaged path {}.",
display_path(&collision.collision_path),
display_path(&collision.collision_path)
)?;
}
}
Ok(())
}
fn prompt_for_managed_collision_interactive(
collision: &ManagedCollision,
) -> Result<ManagedCollisionChoice> {
let items = managed_collision_prompt_items(collision.source);
let selection = Select::with_theme(&interactive_select_theme())
.with_prompt("Choose how Nodus should continue")
.items(&items)
.default(0)
.interact_on_opt(&dialoguer::console::Term::stderr())?;
Ok(match selection {
Some(index) => managed_collision_choice_for_index(collision.source, index)?,
None => ManagedCollisionChoice::Cancel,
})
}
fn managed_collision_prompt_items(source: ManagedCollisionSource) -> Vec<&'static str> {
match source {
ManagedCollisionSource::LegacyDependencyMapping => vec![
"Adopt and overwrite the managed target",
"Remove the legacy managed mapping from nodus.toml",
"Cancel sync",
],
ManagedCollisionSource::PackageManagedExport => vec![
"Adopt and overwrite the package-managed export",
"Cancel sync",
],
ManagedCollisionSource::RuntimeOutput => {
vec!["Adopt and overwrite this runtime output", "Cancel sync"]
}
}
}
fn managed_collision_choice_for_index(
source: ManagedCollisionSource,
index: usize,
) -> Result<ManagedCollisionChoice> {
match (source, index) {
(_, 0) => Ok(ManagedCollisionChoice::Adopt),
(ManagedCollisionSource::LegacyDependencyMapping, 1) => {
Ok(ManagedCollisionChoice::RemoveMapping)
}
(ManagedCollisionSource::LegacyDependencyMapping, 2)
| (
ManagedCollisionSource::PackageManagedExport | ManagedCollisionSource::RuntimeOutput,
1,
) => Ok(ManagedCollisionChoice::Cancel),
(_, other) => bail!("invalid collision selection index `{other}`"),
}
}
fn parse_managed_collision_choice(
answer: &str,
source: ManagedCollisionSource,
) -> Result<ManagedCollisionChoice> {
match (source, answer.trim().to_ascii_lowercase().as_str()) {
(_, "1" | "adopt") => Ok(ManagedCollisionChoice::Adopt),
(ManagedCollisionSource::LegacyDependencyMapping, "2" | "remove") => {
Ok(ManagedCollisionChoice::RemoveMapping)
}
(ManagedCollisionSource::LegacyDependencyMapping, "3" | "cancel") => {
Ok(ManagedCollisionChoice::Cancel)
}
(
ManagedCollisionSource::PackageManagedExport | ManagedCollisionSource::RuntimeOutput,
"2" | "cancel",
) => Ok(ManagedCollisionChoice::Cancel),
(_, other) => bail!("invalid collision resolution `{other}`"),
}
}
fn managed_collision_source(origin: ResolvedManagedPathOrigin) -> ManagedCollisionSource {
match origin {
ResolvedManagedPathOrigin::LegacyDependencyMapping => {
ManagedCollisionSource::LegacyDependencyMapping
}
ResolvedManagedPathOrigin::PackageManagedExport { .. } => {
ManagedCollisionSource::PackageManagedExport
}
}
}
pub(super) fn write_managed_files(planned_files: &[ManagedFile]) -> Result<()> {
planned_files
.par_iter()
.map(|file| {
write_atomic(&file.path, &file.contents)
.with_context(|| format!("failed to write managed file {}", file.path.display()))
})
.collect::<Vec<_>>()
.into_iter()
.collect()
}
pub(super) fn remove_path_and_empty_parents(path: &Path, project_root: &Path) -> Result<()> {
match fs::symlink_metadata(path) {
Ok(metadata) => {
if metadata.file_type().is_dir() {
fs::remove_dir_all(path).with_context(|| {
format!(
"failed to remove conflicting managed directory {}",
path.display()
)
})?;
} else {
fs::remove_file(path).with_context(|| {
format!(
"failed to remove conflicting managed file {}",
path.display()
)
})?;
}
prune_empty_parent_dirs(path, project_root)?;
Ok(())
}
Err(error) if error.kind() == std::io::ErrorKind::NotFound => Ok(()),
Err(error) => Err(error).with_context(|| {
format!(
"failed to inspect conflicting managed path {}",
path.display()
)
}),
}
}
pub(super) fn validate_state_consistency(
owned_paths: &HashSet<PathBuf>,
desired_paths: &HashSet<PathBuf>,
planned_files: &[ManagedFile],
) -> Result<()> {
if let Some(path) = owned_paths.difference(desired_paths).next() {
bail!("stale managed state entry for {}", path.display());
}
for path in desired_paths.intersection(owned_paths) {
if !path.exists() {
bail!("managed file is missing from disk: {}", path.display());
}
}
for file in planned_files {
if path_is_owned(&file.path, owned_paths) && !file.path.exists() {
bail!("managed file is missing from disk: {}", file.path.display());
}
}
Ok(())
}
fn path_is_owned(path: &Path, owned_paths: &HashSet<PathBuf>) -> bool {
owned_paths
.iter()
.any(|owned| path == owned || path.starts_with(owned))
}
pub(super) fn load_owned_paths(
project_root: &Path,
lockfile: Option<&Lockfile>,
) -> Result<HashSet<PathBuf>> {
if let Some(lockfile) = lockfile {
return if lockfile.uses_current_schema() {
lockfile.managed_paths(project_root)
} else {
lockfile.managed_paths_for_sync(project_root)
};
}
Ok(HashSet::new())
}
pub(super) fn managed_path_is_owned(path: &Path, owned_paths: &HashSet<PathBuf>) -> bool {
path_is_owned(path, owned_paths)
}
pub(super) fn planned_workspace_marketplace_files(
root: &LoadedManifest,
runtime_root: &Path,
) -> Result<Vec<ManagedFile>> {
if root.manifest.workspace.is_none() {
return Ok(Vec::new());
}
let members = root
.workspace_member_statuses()?
.into_iter()
.filter(|member| member.enabled)
.collect::<Vec<_>>();
if members.is_empty() {
return Ok(Vec::new());
}
let mut files = Vec::new();
let claude_marketplace_name = workspace_marketplace_name(root);
let claude_marketplace_owner_name = workspace_marketplace_owner_name(root);
let claude_plugins = members
.iter()
.map(|member| {
let member_root = root.resolve_path(&member.path)?;
let manifest = load_dependency_from_dir(&member_root)?;
let mut value = serde_json::Map::from_iter([
(
"name".to_string(),
serde_json::Value::String(
member
.name
.clone()
.unwrap_or_else(|| manifest.effective_name()),
),
),
(
"source".to_string(),
serde_json::Value::String(display_path(&member.path)),
),
]);
if let Some(version) = manifest
.effective_version()
.map(|version| version.to_string())
{
value.insert("version".to_string(), serde_json::Value::String(version));
}
Ok(serde_json::Value::Object(value))
})
.collect::<Result<Vec<_>>>()?;
files.push(ManagedFile {
path: runtime_root.join(".claude-plugin/marketplace.json"),
contents: serde_json::to_vec_pretty(&serde_json::json!({
"name": claude_marketplace_name,
"owner": {
"name": claude_marketplace_owner_name,
},
"plugins": claude_plugins,
}))?,
});
let codex_plugins = members
.iter()
.filter_map(|member| {
member.codex.as_ref().map(|codex| {
serde_json::json!({
"name": member.name.clone().unwrap_or_else(|| member.id.clone()),
"source": {
"source": "local",
"path": codex_workspace_plugin_path(&member.path),
},
"policy": {
"installation": codex.installation,
"authentication": codex.authentication,
},
"category": codex.category,
})
})
})
.collect::<Vec<_>>();
if !codex_plugins.is_empty() {
files.push(ManagedFile {
path: runtime_root.join(".agents/plugins/marketplace.json"),
contents: serde_json::to_vec_pretty(&serde_json::json!({
"name": claude_marketplace_name,
"plugins": codex_plugins,
}))?,
});
}
Ok(files)
}
pub(super) fn recover_runtime_owned_paths(
project_root: &Path,
desired_paths: &HashSet<PathBuf>,
) -> HashSet<PathBuf> {
desired_paths
.iter()
.filter(|path| is_runtime_managed_path(project_root, path))
.cloned()
.collect()
}
pub(super) fn recover_runtime_owned_paths_from_disk(
project_root: &Path,
desired_paths: &HashSet<PathBuf>,
planned_files: &[ManagedFile],
) -> HashSet<PathBuf> {
desired_paths
.iter()
.filter(|path| is_runtime_managed_path(project_root, path))
.filter(|path| !path_has_symlinked_ancestor_within(project_root, path))
.filter(|path| path_exactly_matches_planned_files(project_root, path, planned_files))
.cloned()
.collect()
}
fn path_exactly_matches_planned_files(
project_root: &Path,
path: &Path,
planned_files: &[ManagedFile],
) -> bool {
let Ok(metadata) = fs::symlink_metadata(path) else {
return false;
};
if metadata.file_type().is_symlink() {
return false;
}
if metadata.is_file() {
return planned_files
.iter()
.find(|file| file.path == path)
.is_some_and(|file| file_exactly_matches_planned_contents(project_root, file));
}
if metadata.is_dir() {
return directory_exactly_matches_planned_files(project_root, path, planned_files);
}
false
}
fn file_exactly_matches_planned_contents(project_root: &Path, file: &ManagedFile) -> bool {
if path_has_symlinked_ancestor_within(project_root, &file.path) {
return false;
}
fs::symlink_metadata(&file.path)
.map(|metadata| metadata.is_file() && !metadata.file_type().is_symlink())
.unwrap_or(false)
&& fs::read(&file.path)
.map(|contents| contents == file.contents)
.unwrap_or(false)
}
fn directory_exactly_matches_planned_files(
project_root: &Path,
path: &Path,
planned_files: &[ManagedFile],
) -> bool {
let planned_in_dir = planned_files
.iter()
.filter(|file| file.path.starts_with(path))
.collect::<Vec<_>>();
if planned_in_dir.is_empty() {
return false;
}
if !planned_in_dir
.iter()
.copied()
.all(|file| file_exactly_matches_planned_contents(project_root, file))
{
return false;
}
let expected_files = planned_in_dir
.iter()
.map(|file| file.path.clone())
.collect::<HashSet<_>>();
let mut expected_dirs = HashSet::new();
for file in &planned_in_dir {
let mut current = file.path.parent();
while let Some(parent) = current {
if parent == path {
break;
}
expected_dirs.insert(parent.to_path_buf());
current = parent.parent();
}
}
let expected_entries = expected_files
.into_iter()
.chain(expected_dirs)
.collect::<HashSet<_>>();
let Ok(existing_entries) = collect_entries_under_dir(project_root, path) else {
return false;
};
existing_entries == expected_entries
}
fn collect_entries_under_dir(project_root: &Path, path: &Path) -> Result<HashSet<PathBuf>> {
if path_has_symlinked_ancestor_within(project_root, path) {
return Ok(HashSet::new());
}
let metadata = fs::symlink_metadata(path)
.with_context(|| format!("failed to read metadata for {}", path.display()))?;
if metadata.file_type().is_symlink() || !metadata.is_dir() {
return Ok(HashSet::new());
}
let mut entries = HashSet::new();
let mut pending = vec![path.to_path_buf()];
while let Some(current) = pending.pop() {
for entry in fs::read_dir(¤t)
.with_context(|| format!("failed to read managed directory {}", current.display()))?
{
let entry = entry.with_context(|| {
format!("failed to inspect managed directory {}", current.display())
})?;
let entry_path = entry.path();
let file_type = entry
.file_type()
.with_context(|| format!("failed to read metadata for {}", entry_path.display()))?;
if file_type.is_symlink() {
return Ok(HashSet::new());
} else if file_type.is_dir() {
entries.insert(entry_path.clone());
pending.push(entry_path);
} else if file_type.is_file() {
entries.insert(entry_path);
} else {
return Ok(HashSet::new());
}
}
}
Ok(entries)
}
fn path_has_symlinked_ancestor_within(project_root: &Path, path: &Path) -> bool {
path.ancestors()
.skip(1)
.take_while(|ancestor| ancestor.starts_with(project_root) && *ancestor != project_root)
.any(|ancestor| {
fs::symlink_metadata(ancestor)
.map(|metadata| metadata.file_type().is_symlink())
.unwrap_or(false)
})
}
fn is_runtime_managed_path(project_root: &Path, path: &Path) -> bool {
let Some(relative) = strip_path_prefix(path, project_root) else {
return false;
};
if relative == Path::new(".mcp.json") || relative == Path::new("opencode.json") {
return true;
}
let mut components = relative.components();
let Some(first) = components.next() else {
return false;
};
match first.as_os_str().to_string_lossy().as_ref() {
".agents" => {
let second = components
.next()
.map(|component| component.as_os_str().to_string_lossy());
let third = components
.next()
.map(|component| component.as_os_str().to_string_lossy());
second.is_none()
|| matches!(
(second.as_deref(), third.as_deref()),
(Some("plugins"), Some("marketplace.json"))
)
}
".claude" | ".codex" | ".cursor" | ".opencode" => true,
".claude-plugin" => matches!(
components.next().map(|component| component.as_os_str().to_string_lossy()),
Some(second) if second == "marketplace.json"
),
".github" => matches!(
components.next().map(|component| component.as_os_str().to_string_lossy()),
Some(second) if second == "skills" || second == "agents"
),
".nodus" => matches!(
(
components.next().map(|component| component.as_os_str().to_string_lossy()),
components.next().map(|component| component.as_os_str().to_string_lossy())
),
(Some(second), Some(_third)) if second == "packages"
),
_ => false,
}
}
fn codex_workspace_plugin_path(member_path: &Path) -> String {
let path = display_path(member_path);
if path.starts_with("./") {
path
} else {
format!("./{path}")
}
}
fn workspace_marketplace_name(root: &LoadedManifest) -> String {
let source_name = root
.manifest
.name
.as_deref()
.map(str::trim)
.filter(|value| !value.is_empty())
.map(ToOwned::to_owned)
.unwrap_or_else(|| workspace_marketplace_root_basename(&root.root));
normalize_workspace_marketplace_name(&source_name)
}
fn workspace_marketplace_owner_name(root: &LoadedManifest) -> String {
root.manifest
.name
.as_deref()
.map(str::trim)
.filter(|value| !value.is_empty())
.map(ToOwned::to_owned)
.unwrap_or_else(|| workspace_marketplace_root_basename(&root.root))
}
fn workspace_marketplace_root_basename(root: &Path) -> String {
root.file_name()
.and_then(|value| value.to_str())
.filter(|value| !value.is_empty())
.map(ToOwned::to_owned)
.unwrap_or_else(|| String::from("agentpack"))
}
fn normalize_workspace_marketplace_name(value: &str) -> String {
let mut normalized = String::new();
for character in value.chars() {
if character.is_ascii_alphanumeric() {
normalized.push(character.to_ascii_lowercase());
} else if !normalized.ends_with('-') {
normalized.push('-');
}
}
let normalized = normalized.trim_matches('-').to_string();
if normalized.is_empty() {
String::from("agentpack")
} else {
normalized
}
}
pub(super) fn prune_empty_parent_dirs(path: &Path, project_root: &Path) -> Result<()> {
let stop_roots = [
project_root.to_path_buf(),
project_root.join(".agents"),
project_root.join(".claude"),
project_root.join(".codex"),
project_root.join(".cursor"),
project_root.join(".github"),
project_root.join(".opencode"),
];
let mut current = path.parent();
while let Some(dir) = current {
if stop_roots.iter().any(|root| dir == root) {
break;
}
match fs::remove_dir(dir) {
Ok(()) => {
current = dir.parent();
}
Err(error) if error.kind() == std::io::ErrorKind::NotFound => {
current = dir.parent();
}
Err(error) if error.kind() == std::io::ErrorKind::DirectoryNotEmpty => break,
Err(error) => {
return Err(error)
.with_context(|| format!("failed to prune empty directory {}", dir.display()));
}
}
}
Ok(())
}