use crate::utils::progress::{InstallationPhase, MultiPhaseProgress};
use anyhow::{Context, Result};
use std::path::PathBuf;
mod cleanup;
mod context;
mod gitignore;
mod selective;
#[cfg(test)]
mod tests;
pub use cleanup::cleanup_removed_artifacts;
pub use context::InstallContext;
pub use gitignore::{add_path_to_gitignore, update_gitignore};
pub use selective::*;
use context::read_with_cache_retry;
type InstallResult = Result<
(
crate::lockfile::ResourceId,
bool,
String,
Option<String>,
crate::manifest::patches::AppliedPatches,
),
(crate::lockfile::ResourceId, anyhow::Error),
>;
use futures::{
future,
stream::{self, StreamExt},
};
use std::path::Path;
use std::sync::Arc;
use tokio::sync::Mutex;
use crate::cache::Cache;
use crate::core::ResourceIterator;
use crate::lockfile::{LockFile, LockedResource};
use crate::manifest::Manifest;
use crate::markdown::MarkdownFile;
use crate::utils::fs::{atomic_write, ensure_dir};
use crate::utils::progress::ProgressBar;
use hex;
use std::collections::HashSet;
pub async fn install_resource(
entry: &LockedResource,
resource_dir: &str,
context: &InstallContext<'_>,
) -> Result<(bool, String, Option<String>, crate::manifest::patches::AppliedPatches)> {
let dest_path = if entry.installed_at.is_empty() {
context.project_dir.join(resource_dir).join(format!("{}.md", entry.name))
} else {
context.project_dir.join(&entry.installed_at)
};
let existing_checksum = if dest_path.exists() {
let path = dest_path.clone();
tokio::task::spawn_blocking(move || LockFile::compute_checksum(&path)).await??.into()
} else {
None
};
let is_local_dependency = entry.resolved_commit.as_deref().is_none_or(str::is_empty);
if !context.force_refresh && !is_local_dependency {
if let Some(old_lockfile) = context.old_lockfile {
if let Some(old_entry) = old_lockfile.find_resource(&entry.name, &entry.resource_type) {
let resolved_commit_unchanged = old_entry.resolved_commit == entry.resolved_commit;
let variant_inputs_unchanged = old_entry.variant_inputs == entry.variant_inputs;
let patches_unchanged = old_entry.applied_patches == entry.applied_patches;
let all_inputs_unchanged =
resolved_commit_unchanged && variant_inputs_unchanged && patches_unchanged;
if all_inputs_unchanged && dest_path.exists() {
if existing_checksum.as_ref() == Some(&old_entry.checksum) {
tracing::debug!(
"⏭️ Skipping unchanged Git resource: {} (checksum matches)",
entry.name
);
return Ok((
false, old_entry.checksum.clone(),
old_entry.context_checksum.clone(),
crate::manifest::patches::AppliedPatches::from_lockfile_patches(
&old_entry.applied_patches,
),
));
} else {
tracing::debug!(
"Checksum mismatch for {}: existing={:?}, expected={}",
entry.name,
existing_checksum,
old_entry.checksum
);
}
}
}
}
}
if is_local_dependency {
tracing::debug!(
"Processing local dependency: {} (early-exit optimization skipped)",
entry.name
);
}
let new_content = if let Some(source_name) = &entry.source {
let url = entry
.url
.as_ref()
.ok_or_else(|| anyhow::anyhow!("Resource {} has no URL", entry.name))?;
let is_local_source = entry.resolved_commit.as_deref().is_none_or(str::is_empty);
let cache_dir = if is_local_source {
PathBuf::from(url)
} else {
let sha = entry.resolved_commit.as_deref().ok_or_else(|| {
anyhow::anyhow!("Resource {} missing resolved commit SHA. Run 'agpm update' to regenerate lockfile.", entry.name)
})?;
if sha.len() != 40 || !sha.chars().all(|c| c.is_ascii_hexdigit()) {
return Err(anyhow::anyhow!(
"Invalid SHA '{}' for resource {}. Expected 40 hex characters.",
sha,
entry.name
));
}
let mut cache_dir = context
.cache
.get_or_create_worktree_for_sha(source_name, url, sha, Some(&entry.name))
.await?;
if context.force_refresh {
let _ = context.cache.cleanup_worktree(&cache_dir).await;
cache_dir = context
.cache
.get_or_create_worktree_for_sha(source_name, url, sha, Some(&entry.name))
.await?;
}
cache_dir
};
let source_path = cache_dir.join(&entry.path);
let file_content = read_with_cache_retry(&source_path).await?;
MarkdownFile::parse(&file_content)?;
file_content
} else {
let source_path = {
let candidate = Path::new(&entry.path);
if candidate.is_absolute() {
candidate.to_path_buf()
} else {
context.project_dir.join(candidate)
}
};
if !source_path.exists() {
return Err(anyhow::anyhow!(
"Local file '{}' not found. Expected at: {}",
entry.path,
source_path.display()
));
}
let local_content = tokio::fs::read_to_string(&source_path)
.await
.with_context(|| format!("Failed to read resource file: {}", source_path.display()))?;
MarkdownFile::parse(&local_content)?;
local_content
};
let empty_patches = std::collections::BTreeMap::new();
let (patched_content, applied_patches) = if context.project_patches.is_some()
|| context.private_patches.is_some()
{
use crate::manifest::patches::apply_patches_to_content_with_origin;
let resource_type = entry.resource_type.to_plural();
let lookup_name = entry.lookup_name();
tracing::debug!(
"Installer patch lookup: resource_type={}, lookup_name={}, name={}, manifest_alias={:?}",
resource_type,
lookup_name,
entry.name,
entry.manifest_alias
);
let project_patch_data = context
.project_patches
.and_then(|patches| patches.get(resource_type, lookup_name))
.unwrap_or(&empty_patches);
tracing::debug!("Found {} project patches for {}", project_patch_data.len(), lookup_name);
let private_patch_data = context
.private_patches
.and_then(|patches| patches.get(resource_type, lookup_name))
.unwrap_or(&empty_patches);
let file_path = entry.installed_at.as_str();
apply_patches_to_content_with_origin(
&new_content,
file_path,
project_patch_data,
private_patch_data,
)
.with_context(|| format!("Failed to apply patches to resource {}", entry.name))?
} else {
(new_content.clone(), crate::manifest::patches::AppliedPatches::default())
};
let (final_content, templating_was_applied, captured_checksum) = if entry.path.ends_with(".md")
{
let template_context_builder = &context.template_context_builder;
use crate::templating::TemplateRenderer;
use crate::markdown::frontmatter::FrontmatterParser;
let parser = FrontmatterParser::new();
let (raw_frontmatter_text, body_content) =
if let Some(raw_fm) = parser.extract_raw_frontmatter(&patched_content) {
let body = parser.strip_frontmatter(&patched_content);
(raw_fm, body)
} else {
(String::new(), patched_content.clone())
};
if raw_frontmatter_text.is_empty() {
(patched_content, false, None)
} else {
let resource_type = entry.resource_type;
let templating_result: Option<(String, bool, Option<String>)> = 'templating: {
let context_digest = match template_context_builder.compute_context_digest() {
Ok(digest) => digest,
Err(e) => {
tracing::debug!(
"Failed to compute context digest for {}: {}. Using original content.",
entry.name,
e
);
break 'templating None;
}
};
let resource_id = crate::lockfile::ResourceId::new(
entry.name.clone(),
entry.source.clone(),
entry.tool.clone(),
resource_type,
entry.variant_inputs.hash().to_string(),
);
let (template_context, captured_context_checksum) = match template_context_builder
.build_context(&resource_id, entry.variant_inputs.json())
.await
{
Ok(ctx) => ctx,
Err(e) => {
tracing::debug!(
"Failed to build template context for {}: {}. Using original content.",
entry.name,
e
);
break 'templating None;
}
};
if context.verbose {
let num_resources = template_context
.get("resources")
.and_then(|v| v.as_object())
.map(|o| o.len())
.unwrap_or(0);
let num_dependencies = template_context
.get("dependencies")
.and_then(|v| v.as_object())
.map(|o| o.len())
.unwrap_or(0);
tracing::info!("📝 Rendering template: {}", entry.path);
tracing::info!(
" Context: {} resources, {} dependencies",
num_resources,
num_dependencies
);
tracing::debug!(" Context digest: {}", context_digest);
}
let frontmatter_template = format!("---\n{}\n---\n", raw_frontmatter_text);
let mut renderer = TemplateRenderer::new(
true,
context.project_dir.to_path_buf(),
context.max_content_file_size,
)
.with_context(|| "Failed to create template renderer")?;
let rendered_frontmatter = renderer
.render_template(&frontmatter_template, &template_context)
.map_err(|e| {
tracing::error!(
"Frontmatter rendering failed for resource '{}': {}",
entry.name,
e
);
e
})
.with_context(|| {
let manifest_alias_str = entry
.manifest_alias
.as_ref()
.map(|a| format!(", manifest_alias=\"{}\"", a))
.unwrap_or_default();
let source_str = entry
.source
.as_ref()
.map(|s| format!(", source=\"{}\"", s))
.unwrap_or_default();
let tool_str = entry
.tool
.as_ref()
.map(|t| format!(", tool=\"{}\"", t))
.unwrap_or_default();
let commit_str = entry
.resolved_commit
.as_ref()
.map(|c| format!(", resolved_commit=\"{}\"", &c[..8.min(c.len())]))
.unwrap_or_default();
let parent_str = if let Some(lf) = context.lockfile {
let parents = find_parent_resources(lf, &entry.name);
if !parents.is_empty() {
format!(", required_by=\"{}\"", parents.join(", "))
} else {
String::new()
}
} else {
String::new()
};
format!(
"Failed to render frontmatter for canonical_name=\"{}\"{}{}{}{}{}",
entry.name,
manifest_alias_str,
source_str,
tool_str,
commit_str,
parent_str
)
})?;
let (templating_enabled, yaml_parse_failed) = match MarkdownFile::parse(
&rendered_frontmatter,
) {
Ok(parsed_rendered) => (
parsed_rendered
.metadata
.as_ref()
.and_then(|m| m.extra.get("agpm"))
.and_then(|agpm| agpm.get("templating"))
.and_then(|v| v.as_bool())
.unwrap_or(false),
false,
),
Err(e) => {
eprintln!(
"Warning: Unable to parse YAML frontmatter in '{}' after template rendering.\n\
The file will be installed as-is without processing.\n\
Parse error: {}\n",
entry.name, e
);
tracing::debug!(
"Failed to parse rendered frontmatter for {}, using original content",
entry.name
);
(false, true)
}
};
tracing::debug!(
"Resource '{}': templating_enabled={}",
entry.name,
templating_enabled
);
if yaml_parse_failed {
break 'templating Some((patched_content.clone(), false, None));
}
let final_body = if templating_enabled {
let mut renderer = TemplateRenderer::new(
true,
context.project_dir.to_path_buf(),
context.max_content_file_size,
)
.with_context(|| "Failed to create template renderer")?;
renderer
.render_template(&body_content, &template_context)
.map_err(|e| {
tracing::error!(
"Body rendering failed for resource '{}': {}",
entry.name,
e
);
for (i, cause) in e.chain().skip(1).enumerate() {
tracing::error!(" Caused by [{}]: {}", i + 1, cause);
}
e
})
.with_context(|| {
let manifest_alias_str = entry
.manifest_alias
.as_ref()
.map(|a| format!(", manifest_alias=\"{}\"", a))
.unwrap_or_default();
let source_str = entry
.source
.as_ref()
.map(|s| format!(", source=\"{}\"", s))
.unwrap_or_default();
let tool_str = entry
.tool
.as_ref()
.map(|t| format!(", tool=\"{}\"", t))
.unwrap_or_default();
let commit_str = entry
.resolved_commit
.as_ref()
.map(|c| format!(", resolved_commit=\"{}\"", &c[..8.min(c.len())]))
.unwrap_or_default();
let parent_str = if let Some(lf) = context.lockfile {
let parents = find_parent_resources(lf, &entry.name);
if !parents.is_empty() {
format!(", required_by=\"{}\"", parents.join(", "))
} else {
String::new()
}
} else {
String::new()
};
format!(
"Failed to render body for canonical_name=\"{}\"{}{}{}{}{}",
entry.name,
manifest_alias_str,
source_str,
tool_str,
commit_str,
parent_str
)
})?
} else {
tracing::debug!(
"agpm.templating not enabled for {}, using original body content",
entry.name
);
body_content.clone()
};
let mut final_content = rendered_frontmatter;
final_content.push_str(&final_body);
if patched_content.ends_with('\n') && !final_content.ends_with('\n') {
final_content.push('\n');
}
if templating_enabled && context.verbose {
let size_bytes = final_content.len();
let size_str = if size_bytes < 1024 {
format!("{} B", size_bytes)
} else if size_bytes < 1024 * 1024 {
format!("{:.1} KB", size_bytes as f64 / 1024.0)
} else {
format!("{:.1} MB", size_bytes as f64 / (1024.0 * 1024.0))
};
tracing::info!(" Output: {} ({})", dest_path.display(), size_str);
tracing::info!("✅ Template rendered successfully");
}
Some((
final_content,
templating_enabled,
if templating_enabled {
captured_context_checksum
} else {
None
},
))
};
templating_result.unwrap_or((patched_content, false, None))
}
} else {
tracing::debug!("Not a markdown file: {}", entry.path);
(patched_content, false, None)
};
let file_checksum = {
use sha2::{Digest, Sha256};
let mut hasher = Sha256::new();
hasher.update(final_content.as_bytes());
let hash = hasher.finalize();
format!("sha256:{}", hex::encode(hash))
};
let context_checksum = if templating_was_applied {
captured_checksum
} else {
None
};
let content_changed = existing_checksum.as_ref() != Some(&file_checksum);
let should_install = entry.install.unwrap_or(true);
let actually_installed = if should_install && content_changed {
if let Some(parent) = dest_path.parent() {
ensure_dir(parent)?;
}
if let Some(lock) = context.gitignore_lock {
let relative_path = dest_path
.strip_prefix(context.project_dir)
.unwrap_or(&dest_path)
.to_string_lossy()
.to_string();
add_path_to_gitignore(context.project_dir, &relative_path, lock)
.await
.with_context(|| format!("Failed to add {} to .gitignore", relative_path))?;
}
atomic_write(&dest_path, final_content.as_bytes())
.with_context(|| format!("Failed to install resource to {}", dest_path.display()))?;
true
} else if !should_install {
tracing::debug!(
"Skipping file write for content-only dependency: {} (install=false)",
entry.name
);
false
} else {
false
};
Ok((actually_installed, file_checksum, context_checksum, applied_patches))
}
pub async fn install_resource_with_progress(
entry: &LockedResource,
resource_dir: &str,
context: &InstallContext<'_>,
pb: &ProgressBar,
) -> Result<(bool, String, Option<String>, crate::manifest::patches::AppliedPatches)> {
pb.set_message(format!("Installing {}", entry.name));
install_resource(entry, resource_dir, context).await
}
pub(crate) async fn install_resource_for_parallel(
entry: &LockedResource,
resource_dir: &str,
context: &InstallContext<'_>,
) -> Result<(bool, String, Option<String>, crate::manifest::patches::AppliedPatches)> {
install_resource(entry, resource_dir, context).await
}
pub enum ResourceFilter {
All,
Updated(Vec<(String, Option<String>, String, String)>),
}
#[allow(clippy::too_many_arguments)]
pub async fn install_resources(
filter: ResourceFilter,
lockfile: &Arc<LockFile>,
manifest: &Manifest,
project_dir: &Path,
cache: Cache,
force_refresh: bool,
max_concurrency: Option<usize>,
progress: Option<Arc<MultiPhaseProgress>>,
verbose: bool,
old_lockfile: Option<&LockFile>,
) -> Result<(
usize,
Vec<(crate::lockfile::ResourceId, String)>,
Vec<(crate::lockfile::ResourceId, Option<String>)>,
Vec<(crate::lockfile::ResourceId, crate::manifest::patches::AppliedPatches)>,
)> {
let all_entries: Vec<(LockedResource, String)> = match filter {
ResourceFilter::All => {
ResourceIterator::collect_all_entries(lockfile, manifest)
.into_iter()
.map(|(entry, dir)| (entry.clone(), dir.into_owned()))
.collect()
}
ResourceFilter::Updated(ref updates) => {
let mut entries = Vec::new();
for (name, source, _, _) in updates {
if let Some((resource_type, entry)) =
ResourceIterator::find_resource_by_name_and_source(
lockfile,
name,
source.as_deref(),
)
{
let tool = entry.tool.as_deref().unwrap_or("claude-code");
let artifact_path = manifest
.get_artifact_resource_path(tool, resource_type)
.expect("Resource type should be supported by configured tools");
let target_dir = artifact_path.display().to_string();
entries.push((entry.clone(), target_dir));
}
}
entries
}
};
if all_entries.is_empty() {
return Ok((0, Vec::new(), Vec::new(), Vec::new()));
}
let mut all_entries = all_entries;
all_entries.sort_by(|(a, _), (b, _)| {
a.resource_type.cmp(&b.resource_type).then_with(|| a.name.cmp(&b.name))
});
let total = all_entries.len();
if let Some(ref pm) = progress {
pm.start_phase_with_progress(InstallationPhase::InstallingResources, total);
}
let mut unique_worktrees = HashSet::new();
for (entry, _) in &all_entries {
if let Some(source_name) = &entry.source
&& let Some(url) = &entry.url
{
if let Some(sha) = entry.resolved_commit.as_ref().filter(|commit| {
commit.len() == 40 && commit.chars().all(|c| c.is_ascii_hexdigit())
}) {
unique_worktrees.insert((source_name.clone(), url.clone(), sha.clone()));
}
}
}
if !unique_worktrees.is_empty() {
let context = match filter {
ResourceFilter::All => "pre-warm",
ResourceFilter::Updated(_) => "update-pre-warm",
};
let worktree_futures: Vec<_> = unique_worktrees
.into_iter()
.map(|(source, url, sha)| {
let cache = cache.clone();
async move {
cache
.get_or_create_worktree_for_sha(&source, &url, &sha, Some(context))
.await
.ok(); }
})
.collect();
future::join_all(worktree_futures).await;
}
let installed_count = Arc::new(Mutex::new(0));
let concurrency = max_concurrency.unwrap_or(usize::MAX).max(1);
let gitignore_lock = Arc::new(Mutex::new(()));
if let Some(ref pm) = progress {
pm.update_current_message(&format!("Installing 0/{total} resources"));
}
let results: Vec<InstallResult> = stream::iter(all_entries)
.map(|(entry, resource_dir)| {
let project_dir = project_dir.to_path_buf();
let installed_count = Arc::clone(&installed_count);
let cache = cache.clone();
let progress = progress.clone();
let gitignore_lock = Arc::clone(&gitignore_lock);
async move {
if let Some(ref pm) = progress {
pm.update_current_message(&format!("Installing {}", entry.name));
}
let install_context = InstallContext::new(
&project_dir,
&cache,
force_refresh,
verbose,
Some(manifest),
Some(lockfile),
old_lockfile, Some(&manifest.project_patches),
Some(&manifest.private_patches),
Some(&gitignore_lock),
None, );
let res =
install_resource_for_parallel(&entry, &resource_dir, &install_context).await;
if let Ok((
actually_installed,
_file_checksum,
_context_checksum,
_applied_patches,
)) = &res
{
if *actually_installed {
let mut count = installed_count.lock().await;
*count += 1;
}
if let Some(ref pm) = progress {
let count = *installed_count.lock().await;
pm.update_current_message(&format!("Installing {count}/{total} resources"));
pm.increment_progress(1);
}
}
match res {
Ok((installed, file_checksum, context_checksum, applied_patches)) => Ok((
entry.id(),
installed,
file_checksum,
context_checksum,
applied_patches,
)),
Err(err) => Err((entry.id(), err)),
}
}
})
.buffered(concurrency) .collect()
.await;
let mut errors = Vec::new();
let mut checksums = Vec::new();
let mut context_checksums = Vec::new();
let mut applied_patches_list = Vec::new();
for result in results {
match result {
Ok((id, _installed, file_checksum, context_checksum, applied_patches)) => {
checksums.push((id.clone(), file_checksum));
context_checksums.push((id.clone(), context_checksum));
applied_patches_list.push((id, applied_patches));
}
Err((id, error)) => {
errors.push((id, error));
}
}
}
if !errors.is_empty() {
if let Some(ref pm) = progress {
pm.complete_phase(Some(&format!("Failed to install {} resources", errors.len())));
}
use crate::core::error::user_friendly_error;
let error_msgs: Vec<String> = errors
.into_iter()
.map(|(id, error)| {
let error_ctx = user_friendly_error(error);
format!(" {}:\n {}", id.name(), error_ctx.to_string().replace('\n', "\n "))
})
.collect();
return Err(anyhow::anyhow!(
"Failed to install {} resources:\n{}",
error_msgs.len(),
error_msgs.join("\n\n")
));
}
let final_count = *installed_count.lock().await;
if let Some(ref pm) = progress
&& final_count > 0
{
pm.complete_phase(Some(&format!("Installed {final_count} resources")));
}
Ok((final_count, checksums, context_checksums, applied_patches_list))
}
pub fn find_parent_resources(lockfile: &LockFile, resource_name: &str) -> Vec<String> {
use crate::core::ResourceIterator;
let mut parents = Vec::new();
for (entry, _dir) in
ResourceIterator::collect_all_entries(lockfile, &crate::manifest::Manifest::default())
{
if entry.dependencies.iter().any(|dep| dep == resource_name) {
let parent_name = entry.manifest_alias.as_ref().unwrap_or(&entry.name).clone();
parents.push(parent_name);
}
}
parents
}