use crate::utils::progress::{InstallationPhase, MultiPhaseProgress};
use anyhow::{Context, Result};
use std::path::PathBuf;
type InstallResult = Result<(String, bool, String), (String, anyhow::Error)>;
use futures::{
future,
stream::{self, StreamExt},
};
use std::path::Path;
use std::sync::Arc;
use tokio::sync::{Mutex, mpsc};
use crate::cache::Cache;
use crate::core::{ResourceIterator, ResourceTypeExt};
use crate::lockfile::{LockFile, LockedResource};
use crate::manifest::Manifest;
use crate::markdown::MarkdownFile;
use crate::utils::fs::{atomic_write, ensure_dir};
use crate::utils::progress::ProgressBar;
use hex;
use std::collections::HashSet;
use std::fs;
pub async fn install_resource(
entry: &LockedResource,
project_dir: &Path,
resource_dir: &str,
cache: &Cache,
force_refresh: bool,
) -> Result<(bool, String)> {
let dest_path = if entry.installed_at.is_empty() {
project_dir.join(resource_dir).join(format!("{}.md", entry.name))
} else {
project_dir.join(&entry.installed_at)
};
let existing_checksum = if dest_path.exists() {
let path = dest_path.clone();
tokio::task::spawn_blocking(move || LockFile::compute_checksum(&path)).await??.into()
} else {
None
};
let new_content = if let Some(source_name) = &entry.source {
let url = entry
.url
.as_ref()
.ok_or_else(|| anyhow::anyhow!("Resource {} has no URL", entry.name))?;
let is_local_source = entry.resolved_commit.as_deref().is_none_or(str::is_empty);
let cache_dir = if is_local_source {
PathBuf::from(url)
} else {
let sha = entry.resolved_commit.as_deref().ok_or_else(|| {
anyhow::anyhow!("Resource {} missing resolved commit SHA. Run 'agpm update' to regenerate lockfile.", entry.name)
})?;
if sha.len() != 40 || !sha.chars().all(|c| c.is_ascii_hexdigit()) {
return Err(anyhow::anyhow!(
"Invalid SHA '{}' for resource {}. Expected 40 hex characters.",
sha,
entry.name
));
}
let mut cache_dir = cache
.get_or_create_worktree_for_sha(source_name, url, sha, Some(&entry.name))
.await?;
if force_refresh {
let _ = cache.cleanup_worktree(&cache_dir).await;
cache_dir = cache
.get_or_create_worktree_for_sha(source_name, url, sha, Some(&entry.name))
.await?;
}
cache_dir
};
let source_path = cache_dir.join(&entry.path);
let content = tokio::fs::read_to_string(&source_path)
.await
.with_context(|| format!("Failed to read resource file: {}", source_path.display()))?;
MarkdownFile::parse_with_context(&content, Some(&source_path.display().to_string()))?;
content
} else {
let source_path = {
let candidate = Path::new(&entry.path);
if candidate.is_absolute() {
candidate.to_path_buf()
} else {
project_dir.join(candidate)
}
};
if !source_path.exists() {
return Err(anyhow::anyhow!(
"Local file '{}' not found. Expected at: {}",
entry.path,
source_path.display()
));
}
let content = tokio::fs::read_to_string(&source_path)
.await
.with_context(|| format!("Failed to read resource file: {}", source_path.display()))?;
MarkdownFile::parse_with_context(&content, Some(&source_path.display().to_string()))?;
content
};
let new_checksum = {
use sha2::{Digest, Sha256};
let mut hasher = Sha256::new();
hasher.update(new_content.as_bytes());
let hash = hasher.finalize();
format!("sha256:{}", hex::encode(hash))
};
let actually_installed = existing_checksum.as_ref() != Some(&new_checksum);
if actually_installed {
if let Some(parent) = dest_path.parent() {
ensure_dir(parent)?;
}
atomic_write(&dest_path, new_content.as_bytes())
.with_context(|| format!("Failed to install resource to {}", dest_path.display()))?;
}
Ok((actually_installed, new_checksum))
}
pub async fn install_resource_with_progress(
entry: &LockedResource,
project_dir: &Path,
resource_dir: &str,
cache: &Cache,
force_refresh: bool,
pb: &ProgressBar,
) -> Result<(bool, String)> {
pb.set_message(format!("Installing {}", entry.name));
install_resource(entry, project_dir, resource_dir, cache, force_refresh).await
}
#[deprecated(note = "Use install_resources with MultiPhaseProgress instead")]
pub async fn install_resources_parallel(
lockfile: &LockFile,
manifest: &Manifest,
project_dir: &Path,
pb: &ProgressBar,
cache: &Cache,
force_refresh: bool,
max_concurrency: Option<usize>,
) -> Result<usize> {
let all_entries = ResourceIterator::collect_all_entries(lockfile, manifest);
if all_entries.is_empty() {
return Ok(0);
}
let total = all_entries.len();
pb.set_message("Preparing resources");
let mut unique_worktrees = HashSet::new();
for (entry, _) in &all_entries {
if let Some(source_name) = &entry.source
&& let Some(url) = &entry.url
{
if let Some(sha) = entry.resolved_commit.as_ref().filter(|commit| {
commit.len() == 40 && commit.chars().all(|c| c.is_ascii_hexdigit())
}) {
unique_worktrees.insert((source_name.clone(), url.clone(), sha.clone()));
}
}
}
if !unique_worktrees.is_empty() {
let worktree_futures: Vec<_> = unique_worktrees
.into_iter()
.map(|(source, url, sha)| {
let cache = cache.clone();
async move {
cache
.get_or_create_worktree_for_sha(&source, &url, &sha, Some("pre-warm"))
.await
.ok(); }
})
.collect();
future::join_all(worktree_futures).await;
}
let installed_count = Arc::new(Mutex::new(0));
let pb = Arc::new(pb.clone());
pb.set_message(format!("Installing 0/{total} resources"));
let shared_cache = Arc::new(cache.clone());
let concurrency = max_concurrency.unwrap_or(usize::MAX).max(1);
let results: Vec<InstallResult> = stream::iter(all_entries)
.map(|(entry, resource_dir)| {
let entry = entry.clone();
let project_dir = project_dir.to_path_buf();
let resource_dir = resource_dir.to_string();
let installed_count = Arc::clone(&installed_count);
let pb = Arc::clone(&pb);
let cache = Arc::clone(&shared_cache);
async move {
let res = install_resource_for_parallel(
&entry,
&project_dir,
&resource_dir,
cache.as_ref(),
force_refresh,
)
.await;
match res {
Ok((actually_installed, checksum)) => {
if actually_installed {
let mut count = installed_count.lock().await;
*count += 1;
}
let count = *installed_count.lock().await;
pb.set_message(format!("Installing {count}/{total} resources"));
pb.inc(1);
Ok((entry.name.clone(), actually_installed, checksum))
}
Err(err) => Err((entry.name.clone(), err)),
}
}
})
.buffer_unordered(concurrency)
.collect()
.await;
let mut errors = Vec::new();
for result in results {
match result {
Ok((_name, _installed, _checksum)) => {
}
Err((name, error)) => {
errors.push((name, error));
}
}
}
if !errors.is_empty() {
let error_msgs: Vec<String> =
errors.into_iter().map(|(name, error)| format!(" {name}: {error}")).collect();
return Err(anyhow::anyhow!(
"Failed to install {} resources:\n{}",
error_msgs.len(),
error_msgs.join("\n")
));
}
let final_count = *installed_count.lock().await;
Ok(final_count)
}
async fn install_resource_for_parallel(
entry: &LockedResource,
project_dir: &Path,
resource_dir: &str,
cache: &Cache,
force_refresh: bool,
) -> Result<(bool, String)> {
install_resource(entry, project_dir, resource_dir, cache, force_refresh).await
}
#[derive(Debug, Clone)]
pub struct InstallProgress {
pub active_deps: Vec<String>,
pub completed_count: usize,
pub total_count: usize,
}
#[deprecated(note = "Use install_resources with MultiPhaseProgress instead")]
pub async fn install_resources_parallel_with_progress(
lockfile: &LockFile,
manifest: &Manifest,
project_dir: &Path,
cache: &Cache,
force_refresh: bool,
max_concurrency: Option<usize>,
progress_sender: Option<mpsc::UnboundedSender<InstallProgress>>,
) -> Result<usize> {
let all_entries = ResourceIterator::collect_all_entries(lockfile, manifest);
if all_entries.is_empty() {
return Ok(0);
}
let total = all_entries.len();
let mut unique_worktrees = HashSet::new();
for (entry, _) in &all_entries {
if let Some(source_name) = &entry.source
&& let Some(url) = &entry.url
{
if let Some(sha) = entry.resolved_commit.as_ref().filter(|commit| {
commit.len() == 40 && commit.chars().all(|c| c.is_ascii_hexdigit())
}) {
unique_worktrees.insert((source_name.clone(), url.clone(), sha.clone()));
}
}
}
if !unique_worktrees.is_empty() {
let worktree_futures: Vec<_> = unique_worktrees
.into_iter()
.map(|(source, url, sha)| {
async move {
cache
.get_or_create_worktree_for_sha(&source, &url, &sha, Some("pre-warm"))
.await
.ok(); }
})
.collect();
future::join_all(worktree_futures).await;
}
let installed_count = Arc::new(Mutex::new(0));
let active_deps = Arc::new(Mutex::new(Vec::<String>::new()));
let sender = progress_sender.map(Arc::new);
let shared_cache = Arc::new(cache.clone());
let concurrency = max_concurrency.unwrap_or(usize::MAX).max(1);
let results: Vec<InstallResult> = stream::iter(all_entries)
.map(|(entry, resource_dir)| {
let entry = entry.clone();
let project_dir = project_dir.to_path_buf();
let resource_dir = resource_dir.to_string();
let installed_count = Arc::clone(&installed_count);
let active_deps = Arc::clone(&active_deps);
let sender = sender.clone();
let cache = Arc::clone(&shared_cache);
async move {
{
let mut active = active_deps.lock().await;
active.push(entry.name.clone());
let count = *installed_count.lock().await;
if let Some(ref tx) = sender {
let _ = tx.send(InstallProgress {
active_deps: active.clone(),
completed_count: count,
total_count: total,
});
}
}
let res = install_resource_for_parallel(
&entry,
&project_dir,
&resource_dir,
cache.as_ref(),
force_refresh,
)
.await;
{
let mut active = active_deps.lock().await;
active.retain(|x| x != &entry.name);
if let Ok((actually_installed, _checksum)) = &res {
if *actually_installed {
let mut count = installed_count.lock().await;
*count += 1;
}
let count = *installed_count.lock().await;
if let Some(ref tx) = sender {
let _ = tx.send(InstallProgress {
active_deps: active.clone(),
completed_count: count,
total_count: total,
});
}
}
}
match res {
Ok((installed, checksum)) => Ok((entry.name.clone(), installed, checksum)),
Err(err) => Err((entry.name.clone(), err)),
}
}
})
.buffer_unordered(concurrency)
.collect()
.await;
let mut errors = Vec::new();
for result in results {
match result {
Ok((_name, _installed, _checksum)) => {
}
Err((name, error)) => {
errors.push((name, error));
}
}
}
if !errors.is_empty() {
let error_msgs: Vec<String> =
errors.into_iter().map(|(name, error)| format!(" {name}: {error}")).collect();
return Err(anyhow::anyhow!(
"Failed to install {} resources:\n{}",
error_msgs.len(),
error_msgs.join("\n")
));
}
let final_count = *installed_count.lock().await;
Ok(final_count)
}
pub enum ResourceFilter {
All,
Updated(Vec<(String, Option<String>, String, String)>),
}
#[allow(clippy::too_many_arguments)]
pub async fn install_resources(
filter: ResourceFilter,
lockfile: &LockFile,
manifest: &Manifest,
project_dir: &Path,
cache: Cache,
force_refresh: bool,
max_concurrency: Option<usize>,
progress: Option<Arc<MultiPhaseProgress>>,
) -> Result<(usize, Vec<(String, String)>)> {
let all_entries: Vec<(LockedResource, String)> = match filter {
ResourceFilter::All => {
ResourceIterator::collect_all_entries(lockfile, manifest)
.into_iter()
.map(|(entry, dir)| (entry.clone(), dir.into_owned()))
.collect()
}
ResourceFilter::Updated(ref updates) => {
let mut entries = Vec::new();
for (name, source, _, _) in updates {
if let Some((resource_type, entry)) =
ResourceIterator::find_resource_by_name_and_source(
lockfile,
name,
source.as_deref(),
)
{
let target_dir = if let Some(artifact_path) =
manifest.get_artifact_resource_path(&entry.tool, resource_type)
{
artifact_path.display().to_string()
} else {
#[allow(deprecated)]
resource_type.get_target_dir(&manifest.target).to_string()
};
entries.push((entry.clone(), target_dir));
}
}
entries
}
};
if all_entries.is_empty() {
return Ok((0, Vec::new()));
}
let total = all_entries.len();
if let Some(ref pm) = progress {
pm.start_phase_with_progress(InstallationPhase::InstallingResources, total);
}
let mut unique_worktrees = HashSet::new();
for (entry, _) in &all_entries {
if let Some(source_name) = &entry.source
&& let Some(url) = &entry.url
{
if let Some(sha) = entry.resolved_commit.as_ref().filter(|commit| {
commit.len() == 40 && commit.chars().all(|c| c.is_ascii_hexdigit())
}) {
unique_worktrees.insert((source_name.clone(), url.clone(), sha.clone()));
}
}
}
if !unique_worktrees.is_empty() {
let context = match filter {
ResourceFilter::All => "pre-warm",
ResourceFilter::Updated(_) => "update-pre-warm",
};
let worktree_futures: Vec<_> = unique_worktrees
.into_iter()
.map(|(source, url, sha)| {
let cache = cache.clone();
async move {
cache
.get_or_create_worktree_for_sha(&source, &url, &sha, Some(context))
.await
.ok(); }
})
.collect();
future::join_all(worktree_futures).await;
}
let installed_count = Arc::new(Mutex::new(0));
let concurrency = max_concurrency.unwrap_or(usize::MAX).max(1);
if let Some(ref pm) = progress {
pm.update_current_message(&format!("Installing 0/{total} resources"));
}
let results: Vec<InstallResult> = stream::iter(all_entries)
.map(|(entry, resource_dir)| {
let project_dir = project_dir.to_path_buf();
let installed_count = Arc::clone(&installed_count);
let cache = cache.clone();
let progress = progress.clone();
async move {
if let Some(ref pm) = progress {
pm.update_current_message(&format!("Installing {}", entry.name));
}
let res = install_resource_for_parallel(
&entry,
&project_dir,
&resource_dir,
&cache,
force_refresh,
)
.await;
if let Ok((actually_installed, _checksum)) = &res {
if *actually_installed {
let mut count = installed_count.lock().await;
*count += 1;
}
if let Some(ref pm) = progress {
let count = *installed_count.lock().await;
pm.update_current_message(&format!("Installing {count}/{total} resources"));
pm.increment_progress(1);
}
}
match res {
Ok((installed, checksum)) => Ok((entry.name.clone(), installed, checksum)),
Err(err) => Err((entry.name.clone(), err)),
}
}
})
.buffer_unordered(concurrency)
.collect()
.await;
let mut errors = Vec::new();
let mut checksums = Vec::new();
for result in results {
match result {
Ok((name, _installed, checksum)) => {
checksums.push((name, checksum));
}
Err((name, error)) => {
errors.push((name, error));
}
}
}
if !errors.is_empty() {
if let Some(ref pm) = progress {
pm.complete_phase(Some(&format!("Failed to install {} resources", errors.len())));
}
let error_msgs: Vec<String> =
errors.into_iter().map(|(name, error)| format!(" {name}: {error}")).collect();
return Err(anyhow::anyhow!(
"Failed to install {} resources:\n{}",
error_msgs.len(),
error_msgs.join("\n")
));
}
let final_count = *installed_count.lock().await;
if let Some(ref pm) = progress
&& final_count > 0
{
pm.complete_phase(Some(&format!("Installed {final_count} resources")));
}
Ok((final_count, checksums))
}
#[deprecated(note = "Use install_resources with MultiPhaseProgress instead")]
pub async fn install_resources_with_dynamic_progress(
lockfile: &LockFile,
manifest: &Manifest,
project_dir: &Path,
cache: &Cache,
force_refresh: bool,
max_concurrency: Option<usize>,
progress_bar: Option<Arc<crate::utils::progress::ProgressBar>>,
) -> Result<usize> {
let all_entries = ResourceIterator::collect_all_entries(lockfile, manifest);
if all_entries.is_empty() {
return Ok(0);
}
let _total = all_entries.len();
if let Some(ref progress) = progress_bar {
progress.set_message("Installing resources");
}
let mut unique_worktrees = HashSet::new();
for (entry, _) in &all_entries {
if let Some(source_name) = &entry.source
&& let Some(url) = &entry.url
{
if let Some(sha) = entry.resolved_commit.as_ref().filter(|commit| {
commit.len() == 40 && commit.chars().all(|c| c.is_ascii_hexdigit())
}) {
unique_worktrees.insert((source_name.clone(), url.clone(), sha.clone()));
}
}
}
if !unique_worktrees.is_empty() {
let worktree_futures: Vec<_> = unique_worktrees
.into_iter()
.map(|(source, url, sha)| {
async move {
cache
.get_or_create_worktree_for_sha(&source, &url, &sha, Some("pre-warm"))
.await
.ok(); }
})
.collect();
future::join_all(worktree_futures).await;
}
let installed_count = Arc::new(Mutex::new(0));
let shared_cache = Arc::new(cache.clone());
let concurrency = max_concurrency.unwrap_or(usize::MAX).max(1);
let results: Vec<InstallResult> = stream::iter(all_entries)
.map(|(entry, resource_dir)| {
let entry = entry.clone();
let project_dir = project_dir.to_path_buf();
let resource_dir = resource_dir.to_string();
let installed_count = Arc::clone(&installed_count);
let cache = Arc::clone(&shared_cache);
let progress_bar_ref = progress_bar.clone();
async move {
if let Some(ref progress) = progress_bar_ref {
progress.set_message(format!("Installing {}", entry.name));
}
let res = install_resource_for_parallel(
&entry,
&project_dir,
&resource_dir,
cache.as_ref(),
force_refresh,
)
.await;
if let Ok((actually_installed, _checksum)) = &res {
if *actually_installed {
let mut count = installed_count.lock().await;
*count += 1;
}
if let Some(ref progress) = progress_bar_ref {
progress.inc(1);
}
}
match res {
Ok((installed, checksum)) => Ok((entry.name.clone(), installed, checksum)),
Err(err) => Err((entry.name.clone(), err)),
}
}
})
.buffer_unordered(concurrency)
.collect()
.await;
let mut errors = Vec::new();
for result in results {
match result {
Ok((_name, _installed, _checksum)) => {
}
Err((name, error)) => {
errors.push((name, error));
}
}
}
if !errors.is_empty() {
if let Some(ref progress) = progress_bar {
progress.finish_and_clear();
}
let error_msgs: Vec<String> =
errors.into_iter().map(|(name, error)| format!(" {name}: {error}")).collect();
return Err(anyhow::anyhow!(
"Failed to install {} resources:\n{}",
error_msgs.len(),
error_msgs.join("\n")
));
}
let final_count = *installed_count.lock().await;
if let Some(ref progress) = progress_bar {
progress.finish_and_clear();
}
Ok(final_count)
}
pub async fn install_updated_resources(
updates: &[(String, Option<String>, String, String)], lockfile: &LockFile,
manifest: &Manifest,
project_dir: &Path,
cache: &Cache,
pb: Option<&ProgressBar>,
_quiet: bool,
) -> Result<usize> {
if updates.is_empty() {
return Ok(0);
}
let total = updates.len();
let mut entries_to_install = Vec::new();
for (name, source, _, _) in updates {
if let Some((resource_type, entry)) =
ResourceIterator::find_resource_by_name_and_source(lockfile, name, source.as_deref())
{
let target_dir = if let Some(artifact_path) =
manifest.get_artifact_resource_path(&entry.tool, resource_type)
{
artifact_path.display().to_string()
} else {
#[allow(deprecated)]
resource_type.get_target_dir(&manifest.target).to_string()
};
entries_to_install.push((entry.clone(), target_dir));
}
}
if entries_to_install.is_empty() {
return Ok(0);
}
if let Some(pb) = pb {
pb.set_message("Preparing resources...");
}
let mut unique_worktrees = HashSet::new();
for (entry, _) in &entries_to_install {
if let Some(source_name) = &entry.source
&& let Some(url) = &entry.url
{
if let Some(sha) = entry.resolved_commit.as_ref().filter(|commit| {
commit.len() == 40 && commit.chars().all(|c| c.is_ascii_hexdigit())
}) {
unique_worktrees.insert((source_name.clone(), url.clone(), sha.clone()));
}
}
}
if !unique_worktrees.is_empty() {
let worktree_futures: Vec<_> = unique_worktrees
.into_iter()
.map(|(source, url, sha)| {
async move {
cache
.get_or_create_worktree_for_sha(
&source,
&url,
&sha,
Some("update-pre-warm"),
)
.await
.ok(); }
})
.collect();
future::join_all(worktree_futures).await;
}
let installed_count = Arc::new(Mutex::new(0));
let pb = pb.map(Arc::new);
let cache = Arc::new(cache);
if let Some(ref pb) = pb {
pb.set_message(format!("Installing 0/{total} resources"));
}
let results: Vec<Result<(), anyhow::Error>> = stream::iter(entries_to_install)
.map(|(entry, resource_dir)| {
let project_dir = project_dir.to_path_buf();
let installed_count = Arc::clone(&installed_count);
let pb = pb.clone();
let cache = Arc::clone(&cache);
async move {
install_resource_for_parallel(
&entry,
&project_dir,
&resource_dir,
cache.as_ref(),
false,
)
.await?;
let mut count = installed_count.lock().await;
*count += 1;
if let Some(pb) = pb {
pb.set_message(format!("Installing {}/{} resources", *count, total));
pb.inc(1);
}
Ok::<(), anyhow::Error>(())
}
})
.buffer_unordered(usize::MAX) .collect()
.await;
for result in results {
result?;
}
let final_count = *installed_count.lock().await;
Ok(final_count)
}
pub fn update_gitignore(lockfile: &LockFile, project_dir: &Path, enabled: bool) -> Result<()> {
if !enabled {
return Ok(());
}
let gitignore_path = project_dir.join(".gitignore");
let mut paths_to_ignore = HashSet::new();
let mut add_resource_paths = |resources: &[LockedResource]| {
for resource in resources {
if !resource.installed_at.is_empty() {
paths_to_ignore.insert(resource.installed_at.clone());
}
}
};
add_resource_paths(&lockfile.agents);
add_resource_paths(&lockfile.snippets);
add_resource_paths(&lockfile.commands);
add_resource_paths(&lockfile.scripts);
let mut before_agpm_section = Vec::new();
let mut after_agpm_section = Vec::new();
if gitignore_path.exists() {
let content = fs::read_to_string(&gitignore_path)
.with_context(|| format!("Failed to read {}", gitignore_path.display()))?;
let mut in_agpm_section = false;
let mut past_agpm_section = false;
for line in content.lines() {
if line == "# AGPM managed entries - do not edit below this line"
|| line == "# CCPM managed entries - do not edit below this line"
{
in_agpm_section = true;
continue;
} else if line == "# End of AGPM managed entries"
|| line == "# End of CCPM managed entries"
{
in_agpm_section = false;
past_agpm_section = true;
continue;
}
if !in_agpm_section && !past_agpm_section {
before_agpm_section.push(line.to_string());
} else if in_agpm_section {
continue;
} else {
after_agpm_section.push(line.to_string());
}
}
}
let mut new_content = String::new();
if !before_agpm_section.is_empty() {
for line in &before_agpm_section {
new_content.push_str(line);
new_content.push('\n');
}
if !before_agpm_section.is_empty() && !before_agpm_section.last().unwrap().trim().is_empty()
{
new_content.push('\n');
}
}
new_content.push_str("# AGPM managed entries - do not edit below this line\n");
let mut sorted_paths: Vec<_> = paths_to_ignore.into_iter().collect();
sorted_paths.sort();
for path in &sorted_paths {
let ignore_path = if path.starts_with("./") {
path.strip_prefix("./").unwrap_or(path).to_string()
} else {
path.clone()
};
let normalized_path = ignore_path.replace('\\', "/");
new_content.push_str(&normalized_path);
new_content.push('\n');
}
new_content.push_str("# End of AGPM managed entries\n");
if !after_agpm_section.is_empty() {
new_content.push('\n');
for line in &after_agpm_section {
new_content.push_str(line);
new_content.push('\n');
}
}
if before_agpm_section.is_empty() && after_agpm_section.is_empty() {
let mut default_content = String::new();
default_content.push_str("# .gitignore - AGPM managed entries\n");
default_content.push_str("# AGPM entries are automatically generated\n");
default_content.push('\n');
default_content.push_str("# AGPM managed entries - do not edit below this line\n");
for path in &sorted_paths {
let ignore_path = if path.starts_with("./") {
path.strip_prefix("./").unwrap_or(path).to_string()
} else {
path.clone()
};
let normalized_path = ignore_path.replace('\\', "/");
default_content.push_str(&normalized_path);
default_content.push('\n');
}
default_content.push_str("# End of AGPM managed entries\n");
new_content = default_content;
}
atomic_write(&gitignore_path, new_content.as_bytes())
.with_context(|| format!("Failed to update {}", gitignore_path.display()))?;
Ok(())
}
pub async fn cleanup_removed_artifacts(
old_lockfile: &LockFile,
new_lockfile: &LockFile,
project_dir: &std::path::Path,
) -> Result<Vec<String>> {
use std::collections::HashSet;
let mut removed = Vec::new();
let new_paths: HashSet<String> =
new_lockfile.all_resources().into_iter().map(|r| r.installed_at.clone()).collect();
for old_resource in old_lockfile.all_resources() {
if !new_paths.contains(&old_resource.installed_at) {
let full_path = project_dir.join(&old_resource.installed_at);
if full_path.exists() {
tokio::fs::remove_file(&full_path).await.with_context(|| {
format!("Failed to remove old artifact: {}", full_path.display())
})?;
removed.push(old_resource.installed_at.clone());
cleanup_empty_dirs(&full_path).await?;
}
}
}
Ok(removed)
}
async fn cleanup_empty_dirs(file_path: &std::path::Path) -> Result<()> {
let mut current = file_path.parent();
while let Some(dir) = current {
if dir.ends_with(".claude") || dir.parent().is_none() {
break;
}
match tokio::fs::remove_dir(dir).await {
Ok(()) => {
current = dir.parent();
}
Err(e) if e.kind() == std::io::ErrorKind::NotFound => {
current = dir.parent();
}
Err(_) => {
break;
}
}
}
Ok(())
}
#[cfg(test)]
mod tests {
use super::*;
use tempfile::TempDir;
fn create_test_locked_resource(name: &str, is_local: bool) -> LockedResource {
if is_local {
LockedResource {
name: name.to_string(),
source: None,
url: None,
path: "test.md".to_string(),
version: None,
resolved_commit: None,
checksum: String::new(),
installed_at: String::new(),
dependencies: vec![],
resource_type: crate::core::ResourceType::Agent,
tool: "claude-code".to_string(),
}
} else {
LockedResource {
name: name.to_string(),
source: Some("test_source".to_string()),
url: Some("https://github.com/test/repo.git".to_string()),
path: "resources/test.md".to_string(),
version: Some("v1.0.0".to_string()),
resolved_commit: Some("abc123".to_string()),
checksum: "sha256:test".to_string(),
installed_at: String::new(),
dependencies: vec![],
resource_type: crate::core::ResourceType::Agent,
tool: "claude-code".to_string(),
}
}
}
#[tokio::test]
async fn test_install_resource_local() {
let temp_dir = TempDir::new().unwrap();
let project_dir = temp_dir.path();
let cache = Cache::with_dir(temp_dir.path().join("cache")).unwrap();
let local_file = temp_dir.path().join("test.md");
std::fs::write(&local_file, "# Test Resource\nThis is a test").unwrap();
let mut entry = create_test_locked_resource("local-test", true);
entry.path = local_file.to_string_lossy().to_string();
let result = install_resource(&entry, project_dir, "agents", &cache, false).await;
assert!(result.is_ok(), "Failed to install local resource: {:?}", result);
let (installed, _checksum) = result.unwrap();
assert!(installed, "Should have installed new resource");
let expected_path = project_dir.join("agents").join("local-test.md");
assert!(expected_path.exists(), "Installed file not found");
let content = std::fs::read_to_string(expected_path).unwrap();
assert_eq!(content, "# Test Resource\nThis is a test");
}
#[tokio::test]
async fn test_install_resource_with_custom_path() {
let temp_dir = TempDir::new().unwrap();
let project_dir = temp_dir.path();
let cache = Cache::with_dir(temp_dir.path().join("cache")).unwrap();
let local_file = temp_dir.path().join("test.md");
std::fs::write(&local_file, "# Custom Path Test").unwrap();
let mut entry = create_test_locked_resource("custom-test", true);
entry.path = local_file.to_string_lossy().to_string();
entry.installed_at = "custom/location/resource.md".to_string();
let result = install_resource(&entry, project_dir, "agents", &cache, false).await;
assert!(result.is_ok());
let (installed, _checksum) = result.unwrap();
assert!(installed, "Should have installed new resource");
let expected_path = project_dir.join("custom/location/resource.md");
assert!(expected_path.exists(), "File not installed at custom path");
}
#[tokio::test]
async fn test_install_resource_local_missing_file() {
let temp_dir = TempDir::new().unwrap();
let project_dir = temp_dir.path();
let cache = Cache::with_dir(temp_dir.path().join("cache")).unwrap();
let mut entry = create_test_locked_resource("missing-test", true);
entry.path = "/non/existent/file.md".to_string();
let result = install_resource(&entry, project_dir, "agents", &cache, false).await;
assert!(result.is_err());
let error_msg = result.unwrap_err().to_string();
assert!(error_msg.contains("Local file") && error_msg.contains("not found"));
}
#[tokio::test]
async fn test_install_resource_invalid_markdown_frontmatter() {
let temp_dir = TempDir::new().unwrap();
let project_dir = temp_dir.path();
let cache = Cache::with_dir(temp_dir.path().join("cache")).unwrap();
let local_file = temp_dir.path().join("invalid.md");
std::fs::write(&local_file, "---\ninvalid: yaml: [\n---\nContent").unwrap();
let mut entry = create_test_locked_resource("invalid-test", true);
entry.path = local_file.to_string_lossy().to_string();
let result = install_resource(&entry, project_dir, "agents", &cache, false).await;
assert!(result.is_ok());
let (installed, _checksum) = result.unwrap();
assert!(installed);
let dest_path = project_dir.join("agents/invalid-test.md");
assert!(dest_path.exists());
let installed_content = std::fs::read_to_string(&dest_path).unwrap();
assert!(installed_content.contains("---"));
assert!(installed_content.contains("invalid: yaml:"));
assert!(installed_content.contains("Content"));
}
#[tokio::test]
async fn test_install_resource_with_progress() {
let temp_dir = TempDir::new().unwrap();
let project_dir = temp_dir.path();
let cache = Cache::with_dir(temp_dir.path().join("cache")).unwrap();
let pb = ProgressBar::new(1);
let local_file = temp_dir.path().join("test.md");
std::fs::write(&local_file, "# Progress Test").unwrap();
let mut entry = create_test_locked_resource("progress-test", true);
entry.path = local_file.to_string_lossy().to_string();
let result =
install_resource_with_progress(&entry, project_dir, "agents", &cache, false, &pb).await;
assert!(result.is_ok());
let expected_path = project_dir.join("agents").join("progress-test.md");
assert!(expected_path.exists());
}
#[tokio::test]
async fn test_install_resources_empty() {
let temp_dir = TempDir::new().unwrap();
let project_dir = temp_dir.path();
let cache = Cache::with_dir(temp_dir.path().join("cache")).unwrap();
let lockfile = LockFile::new();
let manifest = Manifest::new();
let (count, _) = install_resources(
ResourceFilter::All,
&lockfile,
&manifest,
project_dir,
cache,
false,
None,
None,
)
.await
.unwrap();
assert_eq!(count, 0, "Should install 0 resources from empty lockfile");
}
#[tokio::test]
async fn test_install_resources_multiple() {
let temp_dir = TempDir::new().unwrap();
let project_dir = temp_dir.path();
let cache = Cache::with_dir(temp_dir.path().join("cache")).unwrap();
let file1 = temp_dir.path().join("agent.md");
let file2 = temp_dir.path().join("snippet.md");
let file3 = temp_dir.path().join("command.md");
std::fs::write(&file1, "# Agent").unwrap();
std::fs::write(&file2, "# Snippet").unwrap();
std::fs::write(&file3, "# Command").unwrap();
let mut lockfile = LockFile::new();
let mut agent = create_test_locked_resource("test-agent", true);
agent.path = file1.to_string_lossy().to_string();
lockfile.agents.push(agent);
let mut snippet = create_test_locked_resource("test-snippet", true);
snippet.path = file2.to_string_lossy().to_string();
lockfile.snippets.push(snippet);
let mut command = create_test_locked_resource("test-command", true);
command.path = file3.to_string_lossy().to_string();
lockfile.commands.push(command);
let manifest = Manifest::new();
let (count, _) = install_resources(
ResourceFilter::All,
&lockfile,
&manifest,
project_dir,
cache,
false,
None,
None,
)
.await
.unwrap();
assert_eq!(count, 3, "Should install 3 resources");
assert!(project_dir.join(".claude/agents/test-agent.md").exists());
assert!(project_dir.join(".claude/agpm/snippets/test-snippet.md").exists());
assert!(project_dir.join(".claude/commands/test-command.md").exists());
}
#[tokio::test]
async fn test_install_updated_resources() {
let temp_dir = TempDir::new().unwrap();
let project_dir = temp_dir.path();
let cache = Cache::with_dir(temp_dir.path().join("cache")).unwrap();
let file1 = temp_dir.path().join("agent.md");
let file2 = temp_dir.path().join("snippet.md");
std::fs::write(&file1, "# Updated Agent").unwrap();
std::fs::write(&file2, "# Updated Snippet").unwrap();
let mut lockfile = LockFile::new();
let mut agent = create_test_locked_resource("test-agent", true);
agent.path = file1.to_string_lossy().to_string();
lockfile.agents.push(agent);
let mut snippet = create_test_locked_resource("test-snippet", true);
snippet.path = file2.to_string_lossy().to_string();
lockfile.snippets.push(snippet);
let manifest = Manifest::new();
let updates = vec![(
"test-agent".to_string(),
None, "v1.0.0".to_string(),
"v1.1.0".to_string(),
)];
let count = install_updated_resources(
&updates,
&lockfile,
&manifest,
project_dir,
&cache,
None,
false, )
.await
.unwrap();
assert_eq!(count, 1, "Should install 1 updated resource");
assert!(project_dir.join(".claude/agents/test-agent.md").exists());
assert!(!project_dir.join(".claude/snippets/test-snippet.md").exists()); }
#[tokio::test]
async fn test_install_updated_resources_quiet_mode() {
let temp_dir = TempDir::new().unwrap();
let project_dir = temp_dir.path();
let cache = Cache::with_dir(temp_dir.path().join("cache")).unwrap();
let file = temp_dir.path().join("command.md");
std::fs::write(&file, "# Command").unwrap();
let mut lockfile = LockFile::new();
let mut command = create_test_locked_resource("test-command", true);
command.path = file.to_string_lossy().to_string();
lockfile.commands.push(command);
let manifest = Manifest::new();
let updates = vec![(
"test-command".to_string(),
None, "v1.0.0".to_string(),
"v2.0.0".to_string(),
)];
let count = install_updated_resources(
&updates,
&lockfile,
&manifest,
project_dir,
&cache,
None,
true, )
.await
.unwrap();
assert_eq!(count, 1);
assert!(project_dir.join(".claude/commands/test-command.md").exists());
}
#[tokio::test]
async fn test_install_resource_for_parallel() {
let temp_dir = TempDir::new().unwrap();
let project_dir = temp_dir.path();
let cache = Cache::with_dir(temp_dir.path().join("cache")).unwrap();
let local_file = temp_dir.path().join("parallel.md");
std::fs::write(&local_file, "# Parallel Test").unwrap();
let mut entry = create_test_locked_resource("parallel-test", true);
entry.path = local_file.to_string_lossy().to_string();
let result =
install_resource_for_parallel(&entry, project_dir, "agents", &cache, false).await;
assert!(result.is_ok());
let expected_path = project_dir.join("agents").join("parallel-test.md");
assert!(expected_path.exists());
}
#[tokio::test]
async fn test_install_resource_creates_nested_directories() {
let temp_dir = TempDir::new().unwrap();
let project_dir = temp_dir.path();
let cache = Cache::with_dir(temp_dir.path().join("cache")).unwrap();
let local_file = temp_dir.path().join("nested.md");
std::fs::write(&local_file, "# Nested Test").unwrap();
let mut entry = create_test_locked_resource("nested-test", true);
entry.path = local_file.to_string_lossy().to_string();
entry.installed_at = "very/deeply/nested/path/resource.md".to_string();
let result = install_resource(&entry, project_dir, "agents", &cache, false).await;
assert!(result.is_ok());
let (installed, _checksum) = result.unwrap();
assert!(installed, "Should have installed new resource");
let expected_path = project_dir.join("very/deeply/nested/path/resource.md");
assert!(expected_path.exists());
}
#[tokio::test]
async fn test_update_gitignore_creates_new_file() {
let temp_dir = TempDir::new().unwrap();
let project_dir = temp_dir.path();
let mut lockfile = LockFile::new();
let mut agent = create_test_locked_resource("test-agent", true);
agent.installed_at = ".claude/agents/test-agent.md".to_string();
lockfile.agents.push(agent);
let mut snippet = create_test_locked_resource("test-snippet", true);
snippet.installed_at = ".claude/agpm/snippets/test-snippet.md".to_string();
lockfile.snippets.push(snippet);
let result = update_gitignore(&lockfile, project_dir, true);
assert!(result.is_ok());
let gitignore_path = project_dir.join(".gitignore");
assert!(gitignore_path.exists(), "Gitignore file should be created");
let content = std::fs::read_to_string(&gitignore_path).unwrap();
assert!(content.contains("AGPM managed entries"));
assert!(content.contains(".claude/agents/test-agent.md"));
assert!(content.contains(".claude/agpm/snippets/test-snippet.md"));
}
#[tokio::test]
async fn test_update_gitignore_disabled() {
let temp_dir = TempDir::new().unwrap();
let project_dir = temp_dir.path();
let lockfile = LockFile::new();
let result = update_gitignore(&lockfile, project_dir, false);
assert!(result.is_ok());
let gitignore_path = project_dir.join(".gitignore");
assert!(!gitignore_path.exists(), "Gitignore should not be created when disabled");
}
#[tokio::test]
async fn test_update_gitignore_preserves_user_entries() {
let temp_dir = TempDir::new().unwrap();
let project_dir = temp_dir.path();
let claude_dir = project_dir.join(".claude");
ensure_dir(&claude_dir).unwrap();
let gitignore_path = project_dir.join(".gitignore");
let existing_content = "# User comment\n\
user-file.txt\n\
*.backup\n\
# AGPM managed entries - do not edit below this line\n\
.claude/agents/old-entry.md\n\
# End of AGPM managed entries\n";
std::fs::write(&gitignore_path, existing_content).unwrap();
let mut lockfile = LockFile::new();
let mut agent = create_test_locked_resource("new-agent", true);
agent.installed_at = ".claude/agents/new-agent.md".to_string();
lockfile.agents.push(agent);
let result = update_gitignore(&lockfile, project_dir, true);
assert!(result.is_ok());
let updated_content = std::fs::read_to_string(&gitignore_path).unwrap();
assert!(updated_content.contains("user-file.txt"));
assert!(updated_content.contains("*.backup"));
assert!(updated_content.contains("# User comment"));
assert!(updated_content.contains(".claude/agents/new-agent.md"));
assert!(!updated_content.contains(".claude/agents/old-entry.md"));
}
#[tokio::test]
async fn test_update_gitignore_handles_external_paths() {
let temp_dir = TempDir::new().unwrap();
let project_dir = temp_dir.path();
let mut lockfile = LockFile::new();
let mut script = create_test_locked_resource("test-script", true);
script.installed_at = "scripts/test.sh".to_string();
lockfile.scripts.push(script);
let mut agent = create_test_locked_resource("test-agent", true);
agent.installed_at = ".claude/agents/test.md".to_string();
lockfile.agents.push(agent);
let result = update_gitignore(&lockfile, project_dir, true);
assert!(result.is_ok());
let gitignore_path = project_dir.join(".gitignore");
let content = std::fs::read_to_string(&gitignore_path).unwrap();
assert!(content.contains("scripts/test.sh"));
assert!(content.contains(".claude/agents/test.md"));
}
#[tokio::test]
async fn test_update_gitignore_migrates_ccpm_entries() {
let temp_dir = TempDir::new().unwrap();
let project_dir = temp_dir.path();
tokio::fs::create_dir_all(project_dir.join(".claude/agents")).await.unwrap();
let gitignore_path = project_dir.join(".gitignore");
let legacy_content = r#"# User's custom entries
*.backup
temp/
# CCPM managed entries - do not edit below this line
.claude/agents/old-ccpm-agent.md
.claude/commands/old-ccpm-command.md
# End of CCPM managed entries
# More user entries
local-config.json
"#;
tokio::fs::write(&gitignore_path, legacy_content).await.unwrap();
let mut lockfile = LockFile::new();
let mut agent = create_test_locked_resource("new-agent", true);
agent.installed_at = ".claude/agents/new-agent.md".to_string();
lockfile.agents.push(agent);
let result = update_gitignore(&lockfile, project_dir, true);
assert!(result.is_ok());
let updated_content = tokio::fs::read_to_string(&gitignore_path).await.unwrap();
assert!(updated_content.contains("*.backup"));
assert!(updated_content.contains("temp/"));
assert!(updated_content.contains("local-config.json"));
assert!(updated_content.contains("# AGPM managed entries - do not edit below this line"));
assert!(updated_content.contains("# End of AGPM managed entries"));
assert!(!updated_content.contains("# CCPM managed entries"));
assert!(!updated_content.contains("# End of CCPM managed entries"));
assert!(!updated_content.contains("old-ccpm-agent.md"));
assert!(!updated_content.contains("old-ccpm-command.md"));
assert!(updated_content.contains(".claude/agents/new-agent.md"));
}
#[tokio::test]
async fn test_install_updated_resources_not_found() {
let temp_dir = TempDir::new().unwrap();
let project_dir = temp_dir.path();
let cache = Cache::with_dir(temp_dir.path().join("cache")).unwrap();
let lockfile = LockFile::new();
let manifest = Manifest::new();
let updates = vec![(
"non-existent".to_string(),
None, "v1.0.0".to_string(),
"v2.0.0".to_string(),
)];
let count = install_updated_resources(
&updates,
&lockfile,
&manifest,
project_dir,
&cache,
None,
false,
)
.await
.unwrap();
assert_eq!(count, 0, "Should install 0 resources when not found");
}
}