agpm_cli/resolver/mod.rs
1//! Dependency resolution and conflict detection for AGPM.
2//!
3//! This module implements the core dependency resolution algorithm that transforms
4//! manifest dependencies into locked versions. It handles version constraint solving,
5//! conflict detection, transitive dependency resolution,
6//! parallel source synchronization, and relative path preservation during installation.
7//!
8//! # Architecture Overview
9//!
10//! The resolver operates using a **two-phase architecture** optimized for SHA-based worktree caching:
11//!
12//! ## Phase 1: Source Synchronization (`pre_sync_sources`)
13//! - **Purpose**: Perform all Git network operations upfront during "Syncing sources" phase
14//! - **Operations**: Clone/fetch repositories, update refs, populate cache
15//! - **Benefits**: Clear progress reporting, batch network operations, error isolation
16//! - **Result**: All required repositories cached locally for phase 2
17//!
18//! ## Phase 2: Version Resolution (`resolve` or `update`)
19//! - **Purpose**: Resolve versions to commit SHAs using cached repositories
20//! - **Operations**: Parse dependencies, resolve constraints, detect conflicts, create worktrees, preserve paths
21//! - **Benefits**: Fast local operations, no network I/O, deterministic behavior
22//! - **Result**: Locked dependencies ready for installation with preserved directory structure
23//!
24//! This two-phase approach replaces the previous three-phase model and provides better
25//! separation of concerns between network operations and dependency resolution logic.
26//!
27//! ## Algorithm Complexity
28//!
29//! - **Time**: O(n + s·log(t)) where:
30//! - n = number of dependencies
31//! - s = number of unique sources
32//! - t = average number of tags/branches per source
33//! - **Space**: O(n + s) for dependency graph and source cache
34//!
35//! ## Parallel Processing
36//!
37//! The resolver leverages async/await for concurrent operations:
38//! - Sources are synchronized in parallel using [`tokio::spawn`]
39//! - Git operations are batched to minimize network roundtrips
40//! - Progress reporting provides real-time feedback on long-running operations
41//!
42//! # Resolution Process
43//!
44//! The two-phase dependency resolution follows these steps:
45//!
46//! ## Phase 1: Source Synchronization
47//! 1. **Dependency Collection**: Extract all dependencies from manifest
48//! 2. **Source Validation**: Verify all referenced sources exist and are accessible
49//! 3. **Repository Preparation**: Use [`version_resolver::VersionResolver`] to collect unique sources
50//! 4. **Source Synchronization**: Clone/update source repositories with single fetch per repository
51//! 5. **Cache Population**: Store bare repository paths for phase 2 operations
52//!
53//! ## Phase 2: Version Resolution & Installation
54//! 1. **Batch SHA Resolution**: Resolve all collected versions to commit SHAs using cached repositories
55//! 2. **SHA-based Worktree Creation**: Create worktrees keyed by commit SHA for maximum deduplication
56//! 3. **Conflict Detection**: Check for path conflicts and incompatible versions
57//! 4. **Redundancy Analysis**: Identify duplicate resources across sources
58//! 5. **Path Processing**: Preserve directory structure by using paths directly from dependencies
59//! 6. **Resource Installation**: Copy resources to target locations with checksums
60//! 7. **Lockfile Generation**: Create deterministic lockfile entries with resolved SHAs and preserved paths
61//!
62//! This separation ensures all network operations complete in phase 1, while phase 2
63//! operates entirely on cached data for fast, deterministic resolution.
64//!
65//! ## Version Resolution Strategy
66//!
67//! Version constraints are resolved using the following precedence:
68//! 1. **Exact commits**: SHA hashes are used directly
69//! 2. **Tags**: Semantic version tags (e.g., `v1.2.3`) are preferred
70//! 3. **Branches**: Branch heads are resolved to current commits
71//! 4. **Latest**: Defaults to the default branch (usually `main` or `master`)
72//!
73//! # Conflict Detection
74//!
75//! The resolver detects several types of conflicts:
76//!
77//! ## Version Conflicts
78//! ```toml
79//! # Incompatible version constraints for the same resource
80//! [agents]
81//! app = { source = "community", path = "agents/helper.md", version = "v1.0.0" }
82//! tool = { source = "community", path = "agents/helper.md", version = "v2.0.0" }
83//! ```
84//!
85//! ## Path Conflicts
86//! ```toml
87//! # Different resources installing to the same location
88//! [agents]
89//! helper-v1 = { source = "old", path = "agents/helper.md" }
90//! helper-v2 = { source = "new", path = "agents/helper.md" }
91//! ```
92//!
93//! ## Source Conflicts
94//! When the same resource path exists in multiple sources with different content,
95//! the resolver uses source precedence (global config sources override local manifest sources).
96//!
97//! # Security Considerations
98//!
99//! The resolver implements several security measures:
100//!
101//! - **Input Validation**: All Git references are validated before checkout
102//! - **Path Sanitization**: Installation paths are validated to prevent directory traversal
103//! - **Credential Isolation**: Authentication tokens are never stored in manifest files
104//! - **Checksum Verification**: Resources are checksummed for integrity validation
105//!
106//! # Performance Optimizations
107//!
108//! - **SHA-based Worktree Caching**: Worktrees keyed by commit SHA maximize reuse across versions
109//! - **Batch Version Resolution**: All versions resolved to SHAs upfront via [`version_resolver::VersionResolver`]
110//! - **Single Fetch Per Repository**: Command-instance fetch caching eliminates redundant network operations
111//! - **Source Caching**: Git repositories are cached globally in `~/.agpm/cache/`
112//! - **Incremental Updates**: Only modified sources are re-synchronized
113//! - **Parallel Operations**: Source syncing and version resolution run concurrently
114//! - **Progress Batching**: UI updates are throttled to prevent performance impact
115//!
116//! # Error Handling
117//!
118//! The resolver provides detailed error context for common failure scenarios:
119//!
120//! - **Network Issues**: Graceful handling of Git clone/fetch failures
121//! - **Authentication**: Clear error messages for credential problems
122//! - **Version Mismatches**: Specific guidance for constraint resolution failures
123//! - **Path Issues**: Detailed information about file system conflicts
124//!
125//! # Example Usage
126//!
127//! ## Two-Phase Resolution Pattern
128//! ```rust,no_run
129//! use agpm_cli::resolver::DependencyResolver;
130//! use agpm_cli::manifest::Manifest;
131//! use agpm_cli::cache::Cache;
132//! use std::path::Path;
133//!
134//! # async fn example() -> anyhow::Result<()> {
135//! let manifest = Manifest::load(Path::new("agpm.toml"))?;
136//! let cache = Cache::new()?;
137//! let mut resolver = DependencyResolver::new_with_global(manifest.clone(), cache).await?;
138//!
139//! // Get all dependencies from manifest
140//! let deps: Vec<(String, agpm_cli::manifest::ResourceDependency)> = manifest
141//! .all_dependencies()
142//! .into_iter()
143//! .map(|(name, dep)| (name.to_string(), dep.clone()))
144//! .collect();
145//!
146//! // Phase 1: Sync all required sources (network operations)
147//! resolver.pre_sync_sources(&deps).await?;
148//!
149//! // Phase 2: Resolve dependencies using cached repositories (local operations)
150//! let lockfile = resolver.resolve().await?;
151//!
152//! println!("Resolved {} agents and {} snippets",
153//! lockfile.agents.len(), lockfile.snippets.len());
154//! # Ok(())
155//! # }
156//! ```
157//!
158//! ## Update Pattern
159//! ```rust,no_run
160//! # use agpm_cli::resolver::DependencyResolver;
161//! # use agpm_cli::manifest::Manifest;
162//! # use agpm_cli::cache::Cache;
163//! # use agpm_cli::lockfile::LockFile;
164//! # use std::path::Path;
165//! # async fn update_example() -> anyhow::Result<()> {
166//! let manifest = Manifest::load(Path::new("agpm.toml"))?;
167//! let mut lockfile = LockFile::load(Path::new("agpm.lock"))?;
168//! let cache = Cache::new()?;
169//! let mut resolver = DependencyResolver::with_cache(manifest.clone(), cache);
170//!
171//! // Get dependencies to update
172//! let deps: Vec<(String, agpm_cli::manifest::ResourceDependency)> = manifest
173//! .all_dependencies()
174//! .into_iter()
175//! .map(|(name, dep)| (name.to_string(), dep.clone()))
176//! .collect();
177//!
178//! // Phase 1: Sync sources for update
179//! resolver.pre_sync_sources(&deps).await?;
180//!
181//! // Phase 2: Update specific dependencies
182//! resolver.update(&mut lockfile, None).await?;
183//!
184//! lockfile.save(Path::new("agpm.lock"))?;
185//! # Ok(())
186//! # }
187//! ```
188//!
189//! ## Incremental Updates
190//! ```rust,no_run
191//! use agpm_cli::resolver::DependencyResolver;
192//! use agpm_cli::lockfile::LockFile;
193//! use agpm_cli::cache::Cache;
194//! use std::path::Path;
195//!
196//! # async fn update_example() -> anyhow::Result<()> {
197//! let existing = LockFile::load("agpm.lock".as_ref())?;
198//! let manifest = agpm_cli::manifest::Manifest::load("agpm.toml".as_ref())?;
199//! let cache = Cache::new()?;
200//! let mut resolver = DependencyResolver::new(manifest, cache)?;
201//!
202//! // Update specific dependencies only
203//! let deps_to_update = vec!["agent1".to_string(), "snippet2".to_string()];
204//! let deps_count = deps_to_update.len();
205//! let updated = resolver.update(&existing, Some(deps_to_update)).await?;
206//!
207//! println!("Updated {} dependencies", deps_count);
208//! # Ok(())
209//! # }
210//! ```
211
212pub mod dependency_graph;
213pub mod version_resolution;
214pub mod version_resolver;
215
216use crate::cache::Cache;
217use crate::core::{AgpmError, ResourceType};
218use crate::git::GitRepo;
219use crate::lockfile::{LockFile, LockedResource};
220use crate::manifest::{Manifest, ResourceDependency};
221use crate::metadata::MetadataExtractor;
222use crate::source::SourceManager;
223use crate::utils::{compute_relative_install_path, normalize_path, normalize_path_for_storage};
224use crate::version::conflict::ConflictDetector;
225use anyhow::{Context, Result};
226use std::collections::{HashMap, HashSet};
227use std::path::{Path, PathBuf};
228use std::time::Duration;
229
230use self::dependency_graph::{DependencyGraph, DependencyNode};
231use self::version_resolver::VersionResolver;
232
233/// Key for identifying unique dependencies in maps and sets.
234///
235/// Combines resource type, name, source, and tool to uniquely identify a dependency.
236/// This prevents collisions between same-named resources from different sources or
237/// using different tools.
238type DependencyKey = (ResourceType, String, Option<String>, Option<String>);
239
240/// Extract meaningful path structure from a dependency path.
241///
242/// This function handles three cases:
243/// 1. Relative paths with `../` - strips all parent directory components
244/// 2. Absolute paths - resolves `..` components and strips root/prefix
245/// 3. Clean relative paths - uses as-is with normalized separators
246///
247/// # Examples
248///
249/// ```
250/// use std::path::Path;
251/// # use agpm_cli::resolver::extract_meaningful_path;
252///
253/// // Relative paths with parent navigation
254/// assert_eq!(extract_meaningful_path(Path::new("../../snippets/dir/file.md")), "snippets/dir/file.md");
255///
256/// // Absolute paths (root stripped, .. resolved) - Unix-style path
257/// #[cfg(unix)]
258/// assert_eq!(extract_meaningful_path(Path::new("/tmp/foo/../bar/agent.md")), "tmp/bar/agent.md");
259///
260/// // Absolute paths (root stripped, .. resolved) - Windows-style path
261/// #[cfg(windows)]
262/// assert_eq!(extract_meaningful_path(Path::new("C:\\tmp\\foo\\..\\bar\\agent.md")), "tmp/bar/agent.md");
263///
264/// // Clean relative paths
265/// assert_eq!(extract_meaningful_path(Path::new("agents/test.md")), "agents/test.md");
266/// ```
267pub fn extract_meaningful_path(path: &Path) -> String {
268 let components: Vec<_> = path.components().collect();
269
270 if path.is_absolute() {
271 // Case 2: Absolute path - resolve ".." components first, then strip root
272 let mut resolved = Vec::new();
273
274 for component in components.iter() {
275 match component {
276 std::path::Component::Normal(name) => {
277 resolved.push(name.to_str().unwrap_or(""));
278 }
279 std::path::Component::ParentDir => {
280 // Pop the last component if there is one
281 resolved.pop();
282 }
283 // Skip RootDir, Prefix, and CurDir
284 _ => {}
285 }
286 }
287
288 resolved.join("/")
289 } else if components.iter().any(|c| matches!(c, std::path::Component::ParentDir)) {
290 // Case 1: Relative path with "../" - skip all parent components
291 let start_idx = components
292 .iter()
293 .position(|c| matches!(c, std::path::Component::Normal(_)))
294 .unwrap_or(0);
295
296 components[start_idx..]
297 .iter()
298 .filter_map(|c| c.as_os_str().to_str())
299 .collect::<Vec<_>>()
300 .join("/")
301 } else {
302 // Case 3: Clean relative path - use as-is
303 path.to_str().unwrap_or("").replace('\\', "/") // Normalize to forward slashes
304 }
305}
306
307/// Type alias for resource lookup key: (`ResourceType`, name, source).
308///
309/// Used internally by the resolver to uniquely identify resources across different sources.
310/// This enables precise lookups when multiple resources share the same name but come from
311/// different sources (common with transitive dependencies).
312///
313/// # Components
314///
315/// * `ResourceType` - The type of resource (Agent, Snippet, Command, Script, Hook, `McpServer`)
316/// * `String` - The resource name as defined in the manifest
317/// * `Option<String>` - The source name (None for local resources without a source)
318///
319/// # Usage
320///
321/// This key is used in `HashMap<ResourceKey, ResourceInfo>` to build a complete map
322/// of all resources in the lockfile for efficient cross-source dependency resolution.
323type ResourceKey = (crate::core::ResourceType, String, Option<String>);
324
325/// Type alias for resource information: (source, version).
326///
327/// Stores the source and version information for a resolved resource. Used in conjunction
328/// with [`ResourceKey`] to enable efficient lookups during transitive dependency resolution.
329///
330/// # Components
331///
332/// * First `Option<String>` - The source name (None for local resources)
333/// * Second `Option<String>` - The version constraint (None for unpinned resources)
334///
335/// # Usage
336///
337/// Paired with [`ResourceKey`] in a `HashMap<ResourceKey, ResourceInfo>` to store
338/// resource metadata for cross-source dependency resolution. This allows the resolver
339/// to construct fully-qualified dependency references like `source:type/name:version`.
340type ResourceInfo = (Option<String>, Option<String>);
341
342/// Read a file with retry logic to handle cross-process filesystem cache coherency issues.
343///
344/// This function wraps `std::fs::read_to_string` with retry logic to handle cases where
345/// files created by Git subprocesses are not immediately visible to the parent Rust process
346/// due to filesystem cache propagation delays. This is particularly important in CI
347/// environments with network-attached storage where cache coherency delays can be significant.
348///
349/// # Arguments
350///
351/// * `path` - The file path to read
352///
353/// # Returns
354///
355/// Returns the file content as a `String`, or an error if the file cannot be read after retries.
356///
357/// # Retry Strategy
358///
359/// - Initial delay: 10ms
360/// - Max delay: 500ms (capped exponential backoff)
361/// - Max attempts: 10
362/// - Total max time: ~5 seconds
363///
364/// Only `NotFound` errors are retried, as these indicate cache coherency issues.
365/// Other errors (permissions, I/O errors) fail immediately.
366fn read_with_cache_retry_sync(path: &Path) -> Result<String> {
367 use std::io;
368 use std::thread;
369
370 let mut attempts = 0;
371 const MAX_ATTEMPTS: u32 = 10;
372
373 loop {
374 match std::fs::read_to_string(path) {
375 Ok(content) => return Ok(content),
376 Err(e) if e.kind() == io::ErrorKind::NotFound && attempts < MAX_ATTEMPTS => {
377 attempts += 1;
378 // Exponential backoff: 10ms, 20ms, 40ms, 80ms, 160ms, 320ms, 500ms (capped)
379 let delay_ms = std::cmp::min(10 * (1 << attempts), 500);
380 let delay = Duration::from_millis(delay_ms);
381
382 tracing::debug!(
383 "File not yet visible (attempt {}/{}): {}",
384 attempts,
385 MAX_ATTEMPTS,
386 path.display()
387 );
388
389 thread::sleep(delay);
390 }
391 Err(e) => {
392 return Err(anyhow::anyhow!("Failed to read file: {}", path.display()).context(e));
393 }
394 }
395 }
396}
397
398/// Core dependency resolver that transforms manifest dependencies into lockfile entries.
399///
400/// The [`DependencyResolver`] is the main entry point for dependency resolution.
401/// It manages source repositories, resolves version constraints, detects conflicts,
402/// and generates deterministic lockfile entries using a centralized SHA-based
403/// resolution strategy for optimal performance.
404///
405/// # SHA-Based Resolution Workflow
406///
407/// Starting in v0.3.2, the resolver uses [`VersionResolver`] for centralized version
408/// resolution that minimizes Git operations and maximizes worktree reuse:
409/// 1. **Collection**: Gather all (source, version) pairs from dependencies
410/// 2. **Batch Resolution**: Resolve all versions to commit SHAs in parallel
411/// 3. **SHA-Based Worktrees**: Create worktrees keyed by commit SHA
412/// 4. **Deduplication**: Multiple refs to same SHA share one worktree
413///
414/// # Configuration
415///
416/// The resolver can be configured in several ways:
417/// - **Standard**: Uses manifest sources only via [`new()`]
418/// - **Global**: Includes global config sources via [`new_with_global()`]
419/// - **Custom Cache**: Uses custom cache directory via [`with_cache()`]
420///
421/// # Thread Safety
422///
423/// The resolver is not thread-safe due to its mutable state during resolution.
424/// Create separate instances for concurrent operations.
425///
426/// [`new()`]: DependencyResolver::new
427/// [`new_with_global()`]: DependencyResolver::new_with_global
428/// [`with_cache()`]: DependencyResolver::with_cache
429pub struct DependencyResolver {
430 manifest: Manifest,
431 /// Manages Git repository operations, source URL resolution, and authentication.
432 ///
433 /// The source manager handles:
434 /// - Mapping source names to Git repository URLs
435 /// - Git operations (clone, fetch, checkout) for dependency resolution
436 /// - Authentication token management for private repositories
437 /// - Source validation and configuration management
438 pub source_manager: SourceManager,
439 cache: Cache,
440 /// Cached per-(source, version) preparation results built during the
441 /// analysis stage so individual dependency resolution can reuse worktrees
442 /// without triggering additional sync operations.
443 prepared_versions: HashMap<String, PreparedSourceVersion>,
444 /// Centralized version resolver for efficient SHA-based dependency resolution.
445 ///
446 /// The `VersionResolver` handles the crucial first phase of dependency resolution
447 /// by batch-resolving all version specifications to commit SHAs before any worktree
448 /// operations. This strategy enables maximum worktree reuse and minimal Git operations.
449 ///
450 /// Used by [`prepare_remote_groups`] to resolve all dependencies upfront.
451 version_resolver: VersionResolver,
452 /// Dependency graph tracking which resources depend on which others.
453 ///
454 /// Maps from (`resource_type`, name, source, tool) to a list of dependencies in the format
455 /// "`resource_type/name`". This is populated during transitive dependency
456 /// resolution and used to fill the dependencies field in `LockedResource` entries.
457 /// The source and tool are included to prevent cross-source and cross-tool dependency contamination.
458 dependency_map: HashMap<DependencyKey, Vec<String>>,
459 /// Conflict detector for identifying version conflicts.
460 ///
461 /// Tracks version requirements across all dependencies (direct and transitive)
462 /// and detects incompatible version constraints before lockfile creation.
463 conflict_detector: ConflictDetector,
464 /// Maps resource names to their original pattern alias for pattern-expanded dependencies.
465 ///
466 /// When a pattern dependency (e.g., `all-helpers = { path = "agents/helpers/*.md" }`)
467 /// expands to multiple concrete dependencies (helper-alpha, helper-beta, etc.), this
468 /// map tracks which pattern alias each expanded resource came from. This enables
469 /// patches defined under the pattern alias to be correctly applied to all matched resources.
470 ///
471 /// Example: If "all-helpers" expands to "helper-alpha" and "helper-beta", this map contains:
472 /// - (ResourceType::Agent, "helper-alpha") -> "all-helpers"
473 /// - (ResourceType::Agent, "helper-beta") -> "all-helpers"
474 ///
475 /// The key includes ResourceType to prevent collisions when different resource types
476 /// have the same concrete name (e.g., agents/deploy.md and commands/deploy.md).
477 pattern_alias_map: HashMap<(ResourceType, String), String>,
478}
479
480#[derive(Clone, Debug, Default)]
481struct PreparedSourceVersion {
482 worktree_path: PathBuf,
483 resolved_version: Option<String>,
484 resolved_commit: String,
485}
486
487#[derive(Clone, Debug)]
488#[allow(dead_code)]
489struct PreparedGroupDescriptor {
490 key: String,
491 source: String,
492 requested_version: Option<String>,
493 version_key: String,
494}
495
496impl DependencyResolver {
497 fn group_key(source: &str, version: &str) -> String {
498 format!("{source}::{version}")
499 }
500
501 /// Adds or updates a resource entry in the lockfile based on resource type.
502 ///
503 /// This helper method eliminates code duplication between the `resolve()` and `update()`
504 /// methods by centralizing lockfile entry management logic. It automatically determines
505 /// the resource type from the entry name and adds or updates the entry in the appropriate
506 /// collection within the lockfile.
507 ///
508 /// The method performs upsert behavior - if an entry with matching name and source
509 /// already exists in the appropriate collection, it will be updated (including version);
510 /// otherwise, a new entry is added. This allows version updates (e.g., v1.0 → v2.0)
511 /// to replace the existing entry rather than creating duplicates.
512 ///
513 /// # Arguments
514 ///
515 /// * `lockfile` - Mutable reference to the lockfile to modify
516 /// * `name` - The name of the resource entry (used to determine resource type)
517 /// * `entry` - The [`LockedResource`] entry to add or update
518 ///
519 /// # Resource Type Detection
520 ///
521 /// Resource types are threaded from the manifest throughout the resolution pipeline,
522 /// which maps to the following lockfile collections:
523 /// - `"agent"` → `lockfile.agents`
524 /// - `"snippet"` → `lockfile.snippets`
525 /// - `"command"` → `lockfile.commands`
526 /// - `"script"` → `lockfile.scripts`
527 /// - `"hook"` → `lockfile.hooks`
528 /// - `"mcp-server"` → `lockfile.mcp_servers`
529 ///
530 /// # Example
531 ///
532 /// ```ignore
533 /// # use agpm_cli::lockfile::{LockFile, LockedResource};
534 /// # use agpm_cli::core::ResourceType;
535 /// # use agpm_cli::resolver::DependencyResolver;
536 /// # let resolver = DependencyResolver::new();
537 /// let mut lockfile = LockFile::new();
538 /// let entry = LockedResource {
539 /// name: "my-agent".to_string(),
540 /// source: Some("github".to_string()),
541 /// url: Some("https://github.com/org/repo.git".to_string()),
542 /// path: "agents/my-agent.md".to_string(),
543 /// version: Some("v1.0.0".to_string()),
544 /// resolved_commit: Some("abc123def456...".to_string()),
545 /// checksum: "sha256:a1b2c3d4...".to_string(),
546 /// installed_at: ".claude/agents/my-agent.md".to_string(),
547 /// dependencies: vec![],
548 /// resource_type: ResourceType::Agent,
549 /// };
550 ///
551 /// // Automatically adds to agents collection based on resource type detection
552 /// resolver.add_or_update_lockfile_entry(&mut lockfile, "my-agent", entry);
553 /// assert_eq!(lockfile.agents.len(), 1);
554 ///
555 /// // Subsequent calls update the existing entry
556 /// let updated_entry = LockedResource {
557 /// name: "my-agent".to_string(),
558 /// version: Some("v1.1.0".to_string()),
559 /// // ... other fields
560 /// # source: Some("github".to_string()),
561 /// # url: Some("https://github.com/org/repo.git".to_string()),
562 /// # path: "agents/my-agent.md".to_string(),
563 /// # resolved_commit: Some("def456789abc...".to_string()),
564 /// # checksum: "sha256:b2c3d4e5...".to_string()),
565 /// # installed_at: ".claude/agents/my-agent.md".to_string(),
566 /// dependencies: vec![],
567 /// resource_type: ResourceType::Agent,
568 /// };
569 /// resolver.add_or_update_lockfile_entry(&mut lockfile, "my-agent", updated_entry);
570 /// assert_eq!(lockfile.agents.len(), 1); // Still one entry, but updated
571 /// ```
572 fn add_or_update_lockfile_entry(
573 &self,
574 lockfile: &mut LockFile,
575 _name: &str,
576 entry: LockedResource,
577 ) {
578 // Get the appropriate resource collection based on the entry's type
579 let resources = lockfile.get_resources_mut(entry.resource_type);
580
581 // Use (name, source, tool) matching for deduplication
582 // This allows multiple entries with the same name from different sources or tools,
583 // which will be caught by conflict detection if they map to the same path
584 if let Some(existing) = resources
585 .iter_mut()
586 .find(|e| e.name == entry.name && e.source == entry.source && e.tool == entry.tool)
587 {
588 *existing = entry;
589 } else {
590 resources.push(entry);
591 }
592 }
593
594 /// Removes stale lockfile entries that are no longer in the manifest.
595 ///
596 /// This method removes lockfile entries for direct manifest dependencies that have been
597 /// commented out or removed from the manifest. This must be called BEFORE
598 /// `remove_manifest_entries_for_update()` to ensure stale entries don't cause conflicts
599 /// during resolution.
600 ///
601 /// A manifest-level entry is identified by:
602 /// - `manifest_alias.is_none()` - Direct dependency with no pattern expansion
603 /// - `manifest_alias.is_some()` - Pattern-expanded dependency (alias must be in manifest)
604 ///
605 /// For each stale entry found, this also removes its transitive children to maintain
606 /// lockfile consistency.
607 ///
608 /// # Arguments
609 ///
610 /// * `lockfile` - The mutable lockfile to clean
611 ///
612 /// # Examples
613 ///
614 /// If a user comments out an agent in agpm.toml:
615 /// ```toml
616 /// # [agents]
617 /// # example = { source = "community", path = "agents/example.md", version = "v1.0.0" }
618 /// ```
619 ///
620 /// This function will remove the "example" agent from the lockfile and all its transitive
621 /// dependencies before the update process begins.
622 fn remove_stale_manifest_entries(&self, lockfile: &mut LockFile) {
623 use std::collections::HashSet;
624
625 // Collect all current manifest keys for each resource type
626 let manifest_agents: HashSet<String> =
627 self.manifest.agents.keys().map(|k| k.to_string()).collect();
628 let manifest_snippets: HashSet<String> =
629 self.manifest.snippets.keys().map(|k| k.to_string()).collect();
630 let manifest_commands: HashSet<String> =
631 self.manifest.commands.keys().map(|k| k.to_string()).collect();
632 let manifest_scripts: HashSet<String> =
633 self.manifest.scripts.keys().map(|k| k.to_string()).collect();
634 let manifest_hooks: HashSet<String> =
635 self.manifest.hooks.keys().map(|k| k.to_string()).collect();
636 let manifest_mcp_servers: HashSet<String> =
637 self.manifest.mcp_servers.keys().map(|k| k.to_string()).collect();
638
639 // Helper to get the right manifest keys for a resource type
640 let get_manifest_keys = |resource_type: crate::core::ResourceType| match resource_type {
641 crate::core::ResourceType::Agent => &manifest_agents,
642 crate::core::ResourceType::Snippet => &manifest_snippets,
643 crate::core::ResourceType::Command => &manifest_commands,
644 crate::core::ResourceType::Script => &manifest_scripts,
645 crate::core::ResourceType::Hook => &manifest_hooks,
646 crate::core::ResourceType::McpServer => &manifest_mcp_servers,
647 };
648
649 // Collect (name, source) pairs to remove
650 let mut entries_to_remove: HashSet<(String, Option<String>)> = HashSet::new();
651 let mut direct_entries: Vec<(String, Option<String>)> = Vec::new();
652
653 // Find all manifest-level entries that are no longer in the manifest
654 for resource_type in crate::core::ResourceType::all() {
655 let manifest_keys = get_manifest_keys(*resource_type);
656 let resources = lockfile.get_resources(*resource_type);
657
658 for entry in resources {
659 // Determine if this is a stale manifest-level entry (no longer in manifest)
660 let is_stale = if let Some(ref alias) = entry.manifest_alias {
661 // Pattern-expanded entry: stale if alias is NOT in manifest
662 !manifest_keys.contains(alias)
663 } else {
664 // Direct entry: stale if name is NOT in manifest
665 !manifest_keys.contains(&entry.name)
666 };
667
668 if is_stale {
669 let key = (entry.name.clone(), entry.source.clone());
670 entries_to_remove.insert(key.clone());
671 direct_entries.push(key);
672 }
673 }
674 }
675
676 // For each stale entry, recursively collect its transitive children
677 for (parent_name, parent_source) in direct_entries {
678 for resource_type in crate::core::ResourceType::all() {
679 if let Some(parent_entry) = lockfile
680 .get_resources(*resource_type)
681 .iter()
682 .find(|e| e.name == parent_name && e.source == parent_source)
683 {
684 Self::collect_transitive_children(
685 lockfile,
686 parent_entry,
687 &mut entries_to_remove,
688 );
689 }
690 }
691 }
692
693 // Remove all marked entries
694 let should_remove = |entry: &crate::lockfile::LockedResource| {
695 entries_to_remove.contains(&(entry.name.clone(), entry.source.clone()))
696 };
697
698 lockfile.agents.retain(|entry| !should_remove(entry));
699 lockfile.snippets.retain(|entry| !should_remove(entry));
700 lockfile.commands.retain(|entry| !should_remove(entry));
701 lockfile.scripts.retain(|entry| !should_remove(entry));
702 lockfile.hooks.retain(|entry| !should_remove(entry));
703 lockfile.mcp_servers.retain(|entry| !should_remove(entry));
704 }
705
706 /// Removes lockfile entries for manifest dependencies that will be re-resolved.
707 ///
708 /// This method removes old entries for direct manifest dependencies before updating,
709 /// which handles the case where a dependency's source or resource type changes.
710 /// This prevents duplicate entries with the same name but different sources.
711 ///
712 /// Pattern-expanded and transitive dependencies are preserved because:
713 /// - Pattern expansions will be re-added during resolution with (name, source) matching
714 /// - Transitive dependencies aren't manifest keys and won't be removed
715 ///
716 /// # Arguments
717 ///
718 /// * `lockfile` - The mutable lockfile to clean
719 /// * `manifest_keys` - Set of manifest dependency keys being updated
720 fn remove_manifest_entries_for_update(
721 &self,
722 lockfile: &mut LockFile,
723 manifest_keys: &HashSet<String>,
724 ) {
725 use std::collections::HashSet;
726
727 // Collect (name, source) pairs to remove
728 // We use (name, source) tuples to distinguish same-named resources from different sources
729 let mut entries_to_remove: HashSet<(String, Option<String>)> = HashSet::new();
730
731 // Step 1: Find direct manifest entries and collect them for transitive traversal
732 let mut direct_entries: Vec<(String, Option<String>)> = Vec::new();
733
734 for resource_type in crate::core::ResourceType::all() {
735 let resources = lockfile.get_resources(*resource_type);
736 for entry in resources {
737 // Check if this entry originates from a manifest key being updated
738 if manifest_keys.contains(&entry.name)
739 || entry
740 .manifest_alias
741 .as_ref()
742 .is_some_and(|alias| manifest_keys.contains(alias))
743 {
744 let key = (entry.name.clone(), entry.source.clone());
745 entries_to_remove.insert(key.clone());
746 direct_entries.push(key);
747 }
748 }
749 }
750
751 // Step 2: For each direct entry, recursively collect its transitive children
752 // This ensures that when "agent-A" changes from repo1 to repo2, we also remove
753 // all transitive dependencies that came from repo1 via agent-A
754 for (parent_name, parent_source) in direct_entries {
755 // Find the parent entry in the lockfile
756 for resource_type in crate::core::ResourceType::all() {
757 if let Some(parent_entry) = lockfile
758 .get_resources(*resource_type)
759 .iter()
760 .find(|e| e.name == parent_name && e.source == parent_source)
761 {
762 // Walk its dependency tree
763 Self::collect_transitive_children(
764 lockfile,
765 parent_entry,
766 &mut entries_to_remove,
767 );
768 }
769 }
770 }
771
772 // Step 3: Remove all marked entries
773 let should_remove = |entry: &crate::lockfile::LockedResource| {
774 entries_to_remove.contains(&(entry.name.clone(), entry.source.clone()))
775 };
776
777 lockfile.agents.retain(|entry| !should_remove(entry));
778 lockfile.snippets.retain(|entry| !should_remove(entry));
779 lockfile.commands.retain(|entry| !should_remove(entry));
780 lockfile.scripts.retain(|entry| !should_remove(entry));
781 lockfile.hooks.retain(|entry| !should_remove(entry));
782 lockfile.mcp_servers.retain(|entry| !should_remove(entry));
783 }
784
785 /// Recursively collect all transitive children of a lockfile entry.
786 ///
787 /// This walks the dependency graph starting from `parent`, following the `dependencies`
788 /// field to find all resources that transitively depend on the parent. Only dependencies
789 /// with the same source as the parent are collected (to avoid removing unrelated resources).
790 ///
791 /// The `dependencies` field contains strings in the format:
792 /// - `"resource_type/name"` for dependencies from the same source
793 /// - `"source:resource_type/name:version"` for explicit source references
794 ///
795 /// # Arguments
796 ///
797 /// * `lockfile` - The lockfile to search for dependencies
798 /// * `parent` - The parent entry whose children we want to collect
799 /// * `entries_to_remove` - Set of (name, source) pairs to populate with found children
800 fn collect_transitive_children(
801 lockfile: &crate::lockfile::LockFile,
802 parent: &crate::lockfile::LockedResource,
803 entries_to_remove: &mut std::collections::HashSet<(String, Option<String>)>,
804 ) {
805 // For each dependency declared by this parent
806 for dep_ref in &parent.dependencies {
807 // Parse dependency reference: "source:resource_type/name:version" or "resource_type/name"
808 // Examples: "repo1:snippet/utils:v1.0.0" or "agent/helper"
809 let (dep_source, dep_name) = if let Some(colon_pos) = dep_ref.find(':') {
810 // Format: "source:resource_type/name:version"
811 let source_part = &dep_ref[..colon_pos];
812 let rest = &dep_ref[colon_pos + 1..];
813 // Find the resource_type/name part (before optional :version)
814 let type_name_part = if let Some(ver_colon) = rest.rfind(':') {
815 &rest[..ver_colon]
816 } else {
817 rest
818 };
819 // Extract name from "resource_type/name"
820 if let Some(slash_pos) = type_name_part.find('/') {
821 let name = &type_name_part[slash_pos + 1..];
822 (Some(source_part.to_string()), name.to_string())
823 } else {
824 continue; // Invalid format, skip
825 }
826 } else {
827 // Format: "resource_type/name"
828 if let Some(slash_pos) = dep_ref.find('/') {
829 let name = &dep_ref[slash_pos + 1..];
830 // Inherit parent's source
831 (parent.source.clone(), name.to_string())
832 } else {
833 continue; // Invalid format, skip
834 }
835 };
836
837 // Find the dependency entry with matching name and source
838 for resource_type in crate::core::ResourceType::all() {
839 if let Some(dep_entry) = lockfile
840 .get_resources(*resource_type)
841 .iter()
842 .find(|e| e.name == dep_name && e.source == dep_source)
843 {
844 let key = (dep_entry.name.clone(), dep_entry.source.clone());
845
846 // Add to removal set and recurse (if not already processed)
847 if !entries_to_remove.contains(&key) {
848 entries_to_remove.insert(key);
849 // Recursively collect this dependency's children
850 Self::collect_transitive_children(lockfile, dep_entry, entries_to_remove);
851 }
852 }
853 }
854 }
855 }
856
857 /// Detects conflicts where multiple dependencies resolve to the same installation path.
858 ///
859 /// This method validates that no two dependencies will overwrite each other during
860 /// installation. It builds a map of all resolved `installed_at` paths and checks for
861 /// collisions across all resource types.
862 ///
863 /// # Arguments
864 ///
865 /// * `lockfile` - The lockfile containing all resolved dependencies
866 ///
867 /// # Returns
868 ///
869 /// Returns `Ok(())` if no conflicts are detected, or an error describing the conflicts.
870 ///
871 /// # Errors
872 ///
873 /// Returns an error if:
874 /// - Two or more dependencies resolve to the same `installed_at` path
875 /// - The error message lists all conflicting dependency names and the shared path
876 ///
877 /// # Examples
878 ///
879 /// ```rust,ignore
880 /// // This would cause a conflict error:
881 /// // [agents]
882 /// // v1 = { source = "repo", path = "agents/example.md", version = "v1.0" }
883 /// // v2 = { source = "repo", path = "agents/example.md", version = "v2.0" }
884 /// // Both resolve to .claude/agents/example.md
885 /// ```
886 fn detect_target_conflicts(&self, lockfile: &LockFile) -> Result<()> {
887 use std::collections::HashMap;
888
889 // Map of (installed_at path, resolved_commit) -> list of dependency names
890 // Two dependencies with the same path AND same commit are NOT a conflict
891 let mut path_map: HashMap<(String, Option<String>), Vec<String>> = HashMap::new();
892
893 // Collect all resources from lockfile
894 // Note: Hooks and MCP servers are excluded because they're configuration-only
895 // resources that are designed to share config files (.claude/settings.local.json
896 // for hooks, .mcp.json for MCP servers), not individual files that would conflict.
897 let all_resources: Vec<(&str, &LockedResource)> = lockfile
898 .agents
899 .iter()
900 .map(|r| (r.name.as_str(), r))
901 .chain(lockfile.snippets.iter().map(|r| (r.name.as_str(), r)))
902 .chain(lockfile.commands.iter().map(|r| (r.name.as_str(), r)))
903 .chain(lockfile.scripts.iter().map(|r| (r.name.as_str(), r)))
904 // Hooks and MCP servers intentionally omitted - they share config files
905 .collect();
906
907 // Build the path map with commit information
908 for (name, resource) in &all_resources {
909 let key = (resource.installed_at.clone(), resource.resolved_commit.clone());
910 path_map.entry(key).or_default().push((*name).to_string());
911 }
912
913 // Now check for actual conflicts: same path but DIFFERENT commits
914 // Group by path only to find potential conflicts
915 let mut path_only_map: HashMap<String, Vec<(&str, &LockedResource)>> = HashMap::new();
916 for (name, resource) in &all_resources {
917 path_only_map.entry(resource.installed_at.clone()).or_default().push((name, resource));
918 }
919
920 // Find conflicts (same path with different commits)
921 let mut conflicts: Vec<(String, Vec<String>)> = Vec::new();
922 for (path, resources) in path_only_map {
923 if resources.len() > 1 {
924 // Check if they have different commits
925 let commits: std::collections::HashSet<_> =
926 resources.iter().map(|(_, r)| &r.resolved_commit).collect();
927
928 // Only a conflict if different commits
929 if commits.len() > 1 {
930 let names: Vec<String> =
931 resources.iter().map(|(n, _)| (*n).to_string()).collect();
932 conflicts.push((path, names));
933 }
934 }
935 }
936
937 if !conflicts.is_empty() {
938 // Build a detailed error message
939 let mut error_msg = String::from(
940 "Target path conflicts detected:\n\n\
941 Multiple dependencies resolve to the same installation path with different content.\n\
942 This would cause files to overwrite each other.\n\n",
943 );
944
945 for (path, names) in &conflicts {
946 error_msg.push_str(&format!(
947 " Path: {}\n Conflicts: {}\n\n",
948 path,
949 names.join(", ")
950 ));
951 }
952
953 error_msg.push_str(
954 "To resolve this conflict:\n\
955 1. Use custom 'target' field to specify different installation paths:\n\
956 Example: target = \"custom/subdir/file.md\"\n\n\
957 2. Use custom 'filename' field to specify different filenames:\n\
958 Example: filename = \"utils-v2.md\"\n\n\
959 3. For transitive dependencies, add them as direct dependencies with custom target/filename\n\n\
960 4. Ensure pattern dependencies don't overlap with single-file dependencies\n\n\
961 Note: This often occurs when different dependencies have transitive dependencies\n\
962 with the same name but from different sources.",
963 );
964
965 return Err(anyhow::anyhow!(error_msg));
966 }
967
968 Ok(())
969 }
970
971 /// Pre-syncs all sources needed for the given dependencies.
972 ///
973 /// This method implements the first phase of the two-phase resolution architecture.
974 /// It should be called during the "Syncing sources" phase to perform all Git
975 /// clone/fetch operations upfront, before actual dependency resolution.
976 ///
977 /// This separation provides several benefits:
978 /// - Clear separation of network operations from version resolution logic
979 /// - Better progress reporting with distinct phases
980 /// - Enables batch processing of Git operations for efficiency
981 /// - Allows the `resolve_all()` method to work purely with local cached data
982 ///
983 /// After calling this method, the internal [`VersionResolver`] will have all
984 /// necessary source repositories cached and ready for version-to-SHA resolution.
985 ///
986 /// # Arguments
987 ///
988 /// * `deps` - A slice of tuples containing dependency names and their definitions.
989 /// Only dependencies with Git sources will be processed.
990 ///
991 /// # Example
992 ///
993 /// ```no_run
994 /// # use agpm_cli::resolver::DependencyResolver;
995 /// # use agpm_cli::manifest::{Manifest, ResourceDependency};
996 /// # use agpm_cli::cache::Cache;
997 /// # async fn example() -> anyhow::Result<()> {
998 /// let manifest = Manifest::new();
999 /// let cache = Cache::new()?;
1000 /// let mut resolver = DependencyResolver::with_cache(manifest.clone(), cache);
1001 ///
1002 /// // Get all dependencies from manifest
1003 /// let deps: Vec<(String, ResourceDependency)> = manifest
1004 /// .all_dependencies()
1005 /// .into_iter()
1006 /// .map(|(name, dep)| (name.to_string(), dep.clone()))
1007 /// .collect();
1008 ///
1009 /// // Phase 1: Pre-sync all sources (performs Git clone/fetch operations)
1010 /// resolver.pre_sync_sources(&deps).await?;
1011 ///
1012 /// // Phase 2: Now sources are ready for version resolution (no network I/O)
1013 /// let resolved = resolver.resolve().await?;
1014 /// # Ok(())
1015 /// # }
1016 /// ```
1017 ///
1018 /// # Two-Phase Resolution Pattern
1019 ///
1020 /// This method is part of AGPM's two-phase resolution architecture:
1021 ///
1022 /// 1. **Sync Phase** (`pre_sync_sources`): Clone/fetch all Git repositories
1023 /// 2. **Resolution Phase** (`resolve` or `update`): Resolve versions to SHAs locally
1024 ///
1025 /// This pattern ensures all network operations happen upfront with clear progress
1026 /// reporting, while version resolution can proceed quickly using cached data.
1027 ///
1028 /// # Errors
1029 ///
1030 /// Returns an error if:
1031 /// - Source repository cloning or fetching fails
1032 /// - Network connectivity issues occur
1033 /// - Authentication fails for private repositories
1034 /// - Source names in dependencies don't match configured sources
1035 /// - Git operations fail due to repository corruption or disk space issues
1036 pub async fn pre_sync_sources(&mut self, deps: &[(String, ResourceDependency)]) -> Result<()> {
1037 // Clear and rebuild the version resolver entries
1038 self.version_resolver.clear();
1039
1040 // Collect all unique (source, version) pairs
1041 for (_, dep) in deps {
1042 if let Some(source_name) = dep.get_source() {
1043 let source_url =
1044 self.source_manager.get_source_url(source_name).ok_or_else(|| {
1045 AgpmError::SourceNotFound {
1046 name: source_name.to_string(),
1047 }
1048 })?;
1049
1050 let version = dep.get_version();
1051
1052 // Add to version resolver for batch syncing
1053 self.version_resolver.add_version(source_name, &source_url, version);
1054 }
1055 }
1056
1057 // Pre-sync all sources (performs Git operations)
1058 self.version_resolver.pre_sync_sources().await.context("Failed to sync sources")?;
1059
1060 Ok(())
1061 }
1062
1063 /// Get available versions (tags) for a repository.
1064 ///
1065 /// Lists all tags from a Git repository, which typically represent available versions.
1066 /// This is useful for checking what versions are available for updates.
1067 ///
1068 /// # Arguments
1069 ///
1070 /// * `repo_path` - Path to the Git repository (typically in the cache directory)
1071 ///
1072 /// # Returns
1073 ///
1074 /// A vector of version strings (tag names) available in the repository.
1075 ///
1076 /// # Examples
1077 ///
1078 /// ```rust,no_run,ignore
1079 /// use agpm_cli::resolver::DependencyResolver;
1080 /// use agpm_cli::cache::Cache;
1081 ///
1082 /// # async fn example() -> anyhow::Result<()> {
1083 /// let cache = Cache::new()?;
1084 /// let repo_path = cache.get_repo_path("community");
1085 /// let resolver = DependencyResolver::new(manifest, cache, 10)?;
1086 ///
1087 /// let versions = resolver.get_available_versions(&repo_path).await?;
1088 /// for version in versions {
1089 /// println!("Available: {}", version);
1090 /// }
1091 /// # Ok(())
1092 /// # }
1093 /// ```
1094 pub async fn get_available_versions(&self, repo_path: &Path) -> Result<Vec<String>> {
1095 let repo = GitRepo::new(repo_path);
1096 repo.list_tags()
1097 .await
1098 .with_context(|| format!("Failed to list tags from repository at {repo_path:?}"))
1099 }
1100
1101 /// Creates worktrees for all resolved SHAs in parallel.
1102 ///
1103 /// This helper method is part of AGPM's SHA-based worktree architecture, processing
1104 /// all resolved versions from the [`VersionResolver`] and creating Git worktrees
1105 /// for each unique commit SHA. It leverages async concurrency to create multiple
1106 /// worktrees in parallel while maintaining proper error propagation.
1107 ///
1108 /// The method implements SHA-based deduplication - multiple versions (tags, branches)
1109 /// that resolve to the same commit SHA will share a single worktree, maximizing
1110 /// disk space efficiency and reducing clone operations.
1111 ///
1112 /// # Implementation Details
1113 ///
1114 /// 1. **Parallel Execution**: Uses `futures::future::join_all()` for concurrent worktree creation
1115 /// 2. **SHA-based Keys**: Worktrees are keyed by commit SHA rather than version strings
1116 /// 3. **Deduplication**: Multiple refs pointing to the same commit share one worktree
1117 /// 4. **Error Handling**: Fails fast if any worktree creation fails
1118 ///
1119 /// # Returns
1120 ///
1121 /// Returns a [`HashMap`] mapping repository keys (format: `"source::version"`) to
1122 /// [`PreparedSourceVersion`] structs containing:
1123 /// - `worktree_path`: Absolute path to the created worktree directory
1124 /// - `resolved_version`: The resolved Git reference (tag, branch, or SHA)
1125 /// - `resolved_commit`: The final commit SHA for the worktree
1126 ///
1127 /// # Example Usage
1128 ///
1129 /// ```ignore
1130 /// // This is called internally after version resolution
1131 /// let prepared = resolver.create_worktrees_for_resolved_versions().await?;
1132 ///
1133 /// // Access worktree for a specific dependency
1134 /// let key = DependencyResolver::group_key("my-source", "v1.0.0");
1135 /// if let Some(prepared_version) = prepared.get(&key) {
1136 /// println!("Worktree at: {}", prepared_version.worktree_path.display());
1137 /// println!("Commit SHA: {}", prepared_version.resolved_commit);
1138 /// }
1139 /// ```
1140 ///
1141 /// # Errors
1142 ///
1143 /// Returns an error if:
1144 /// - Source URL cannot be found for a resolved version (indicates configuration issue)
1145 /// - Worktree creation fails for any SHA (disk space, permissions, Git errors)
1146 /// - File system operations fail (I/O errors, permission denied)
1147 /// - Git operations fail (corrupted repository, invalid SHA)
1148 async fn create_worktrees_for_resolved_versions(
1149 &self,
1150 ) -> Result<HashMap<String, PreparedSourceVersion>> {
1151 let resolved_full = self.version_resolver.get_all_resolved_full().clone();
1152 let mut prepared_versions = HashMap::new();
1153
1154 // Build futures for parallel worktree creation
1155 let mut futures = Vec::new();
1156
1157 for ((source_name, version_key), resolved_version) in resolved_full {
1158 let sha = resolved_version.sha;
1159 let resolved_ref = resolved_version.resolved_ref;
1160 let repo_key = Self::group_key(&source_name, &version_key);
1161 let cache_clone = self.cache.clone();
1162 let source_name_clone = source_name.clone();
1163
1164 // Get the source URL for this source
1165 let source_url_clone = self
1166 .source_manager
1167 .get_source_url(&source_name)
1168 .ok_or_else(|| AgpmError::SourceNotFound {
1169 name: source_name.to_string(),
1170 })?
1171 .to_string();
1172
1173 let sha_clone = sha.clone();
1174 let resolved_ref_clone = resolved_ref.clone();
1175
1176 let future = async move {
1177 // Use SHA-based worktree creation
1178 // The version resolver has already handled fetching and SHA resolution
1179 let worktree_path = cache_clone
1180 .get_or_create_worktree_for_sha(
1181 &source_name_clone,
1182 &source_url_clone,
1183 &sha_clone,
1184 Some(&source_name_clone), // context for logging
1185 )
1186 .await?;
1187
1188 Ok::<_, anyhow::Error>((
1189 repo_key,
1190 PreparedSourceVersion {
1191 worktree_path,
1192 resolved_version: Some(resolved_ref_clone),
1193 resolved_commit: sha_clone,
1194 },
1195 ))
1196 };
1197
1198 futures.push(future);
1199 }
1200
1201 // Execute all futures concurrently and collect results
1202 let results = futures::future::join_all(futures).await;
1203
1204 // Process results and build the map
1205 for result in results {
1206 let (key, prepared) = result?;
1207 prepared_versions.insert(key, prepared);
1208 }
1209
1210 Ok(prepared_versions)
1211 }
1212
1213 async fn prepare_remote_groups(&mut self, deps: &[(String, ResourceDependency)]) -> Result<()> {
1214 self.prepared_versions.clear();
1215
1216 // Check if we need to rebuild version resolver entries
1217 // This happens when prepare_remote_groups is called without pre_sync_sources
1218 // (e.g., during tests or backward compatibility)
1219 if !self.version_resolver.has_entries() {
1220 // Rebuild entries for version resolution
1221 for (_, dep) in deps {
1222 if let Some(source_name) = dep.get_source() {
1223 let source_url =
1224 self.source_manager.get_source_url(source_name).ok_or_else(|| {
1225 AgpmError::SourceNotFound {
1226 name: source_name.to_string(),
1227 }
1228 })?;
1229
1230 let version = dep.get_version();
1231
1232 // Add to version resolver for batch resolution
1233 self.version_resolver.add_version(source_name, &source_url, version);
1234 }
1235 }
1236
1237 // If entries were rebuilt, we need to sync sources first
1238 self.version_resolver.pre_sync_sources().await.context("Failed to sync sources")?;
1239 }
1240
1241 // Now resolve all versions to SHAs
1242 self.version_resolver.resolve_all().await.context("Failed to resolve versions to SHAs")?;
1243
1244 // Step 3: Create worktrees for all resolved SHAs in parallel
1245 let prepared_versions = self.create_worktrees_for_resolved_versions().await?;
1246
1247 // Store the prepared versions
1248 self.prepared_versions.extend(prepared_versions);
1249
1250 // Step 4: Handle local sources separately (they don't need worktrees)
1251 for (_, dep) in deps {
1252 if let Some(source_name) = dep.get_source() {
1253 let source_url =
1254 self.source_manager.get_source_url(source_name).ok_or_else(|| {
1255 AgpmError::SourceNotFound {
1256 name: source_name.to_string(),
1257 }
1258 })?;
1259
1260 // Check if this is a local directory source
1261 if crate::utils::is_local_path(&source_url) {
1262 let version_key = dep.get_version().unwrap_or("HEAD");
1263 let group_key = Self::group_key(source_name, version_key);
1264
1265 // Add to prepared_versions with the local path
1266 self.prepared_versions.insert(
1267 group_key,
1268 PreparedSourceVersion {
1269 worktree_path: PathBuf::from(&source_url),
1270 resolved_version: Some("local".to_string()),
1271 resolved_commit: String::new(), // No commit for local sources
1272 },
1273 );
1274 }
1275 }
1276 }
1277
1278 // Phase completion is handled by the caller
1279
1280 Ok(())
1281 }
1282
1283 /// Creates a new resolver using only manifest-defined sources.
1284 ///
1285 /// This constructor creates a resolver that only considers sources defined
1286 /// in the manifest file. Global configuration sources from `~/.agpm/config.toml`
1287 /// are ignored, which may cause resolution failures for private repositories
1288 /// that require authentication.
1289 ///
1290 /// # Usage
1291 ///
1292 /// Use this constructor for:
1293 /// - Public repositories only
1294 /// - Testing and development
1295 /// - Backward compatibility with older workflows
1296 ///
1297 /// For production use with private repositories, prefer [`new_with_global()`].
1298 ///
1299 /// # Errors
1300 ///
1301 /// Returns an error if the cache cannot be created.
1302 ///
1303 /// [`new_with_global()`]: DependencyResolver::new_with_global
1304 pub fn new(manifest: Manifest, cache: Cache) -> Result<Self> {
1305 let source_manager = SourceManager::from_manifest(&manifest)?;
1306 let version_resolver = VersionResolver::new(cache.clone());
1307
1308 Ok(Self {
1309 manifest,
1310 source_manager,
1311 cache,
1312 prepared_versions: HashMap::new(),
1313 version_resolver,
1314 dependency_map: HashMap::new(),
1315 conflict_detector: ConflictDetector::new(),
1316 pattern_alias_map: HashMap::new(),
1317 })
1318 }
1319
1320 /// Creates a new resolver with global configuration support.
1321 ///
1322 /// This is the recommended constructor for most use cases. It loads both
1323 /// manifest sources and global sources from `~/.agpm/config.toml`, enabling
1324 /// access to private repositories with authentication tokens.
1325 ///
1326 /// # Source Priority
1327 ///
1328 /// When sources are defined in both locations:
1329 /// 1. **Global sources** (from `~/.agpm/config.toml`) are loaded first
1330 /// 2. **Local sources** (from `agpm.toml`) can override global sources
1331 ///
1332 /// This allows teams to share project configurations while keeping
1333 /// authentication tokens in user-specific global config.
1334 ///
1335 /// # Errors
1336 ///
1337 /// Returns an error if:
1338 /// - The cache cannot be created
1339 /// - The global config file exists but cannot be parsed
1340 /// - Network errors occur while validating global sources
1341 pub async fn new_with_global(manifest: Manifest, cache: Cache) -> Result<Self> {
1342 let source_manager = SourceManager::from_manifest_with_global(&manifest).await?;
1343 let version_resolver = VersionResolver::new(cache.clone());
1344
1345 Ok(Self {
1346 manifest,
1347 source_manager,
1348 cache,
1349 prepared_versions: HashMap::new(),
1350 version_resolver,
1351 dependency_map: HashMap::new(),
1352 conflict_detector: ConflictDetector::new(),
1353 pattern_alias_map: HashMap::new(),
1354 })
1355 }
1356
1357 /// Creates a new resolver with a custom cache.
1358 ///
1359 /// This constructor is primarily used for testing and specialized deployments
1360 /// where the default cache location (`~/.agpm/cache/`) is not suitable.
1361 ///
1362 /// # Use Cases
1363 ///
1364 /// - **Testing**: Isolated cache for test environments
1365 /// - **CI/CD**: Custom cache locations for build systems
1366 /// - **Containers**: Non-standard filesystem layouts
1367 /// - **Multi-user**: Shared cache directories
1368 ///
1369 /// # Note
1370 ///
1371 /// This constructor does not load global configuration. If you need both
1372 /// custom cache location and global config support, create the resolver
1373 /// with [`new_with_global()`] and manually configure the source manager.
1374 ///
1375 /// [`new_with_global()`]: DependencyResolver::new_with_global
1376 #[must_use]
1377 pub fn with_cache(manifest: Manifest, cache: Cache) -> Self {
1378 let cache_dir = cache.get_cache_location().to_path_buf();
1379 let source_manager = SourceManager::from_manifest_with_cache(&manifest, cache_dir);
1380 let version_resolver = VersionResolver::new(cache.clone());
1381
1382 Self {
1383 manifest,
1384 source_manager,
1385 cache,
1386 prepared_versions: HashMap::new(),
1387 version_resolver,
1388 dependency_map: HashMap::new(),
1389 conflict_detector: ConflictDetector::new(),
1390 pattern_alias_map: HashMap::new(),
1391 }
1392 }
1393
1394 /// Resolves all dependencies and generates a complete lockfile.
1395 ///
1396 /// This is the main resolution method that processes all dependencies from
1397 /// the manifest and produces a deterministic lockfile. The process includes:
1398 ///
1399 /// 1. **Source Validation**: Verify all referenced sources exist
1400 /// 2. **Parallel Sync**: Clone/update source repositories concurrently
1401 /// 3. **Version Resolution**: Resolve constraints to specific commits
1402 /// 4. **Entry Generation**: Create lockfile entries with checksums
1403 ///
1404 /// # Algorithm Details
1405 ///
1406 /// The resolution algorithm processes dependencies in dependency order to ensure
1407 /// consistency. For each dependency:
1408 /// - Local dependencies are processed immediately (no network access)
1409 /// - Remote dependencies trigger source synchronization
1410 /// - Version constraints are resolved using Git tag/branch lookup
1411 /// - Installation paths are determined based on resource type
1412 ///
1413 /// # Parameters
1414 ///
1415 /// - `progress`: Optional progress manager for user feedback during long operations
1416 ///
1417 /// # Returns
1418 ///
1419 /// A [`LockFile`] containing all resolved dependencies with:
1420 /// - Exact commit hashes for reproducible installations
1421 /// - Source URLs for traceability
1422 /// - Installation paths for each resource
1423 /// - Checksums for integrity verification (computed later during installation)
1424 ///
1425 /// # Errors
1426 ///
1427 /// Resolution can fail due to:
1428 /// - **Network Issues**: Git clone/fetch failures
1429 /// - **Authentication**: Missing or invalid credentials for private sources
1430 /// - **Version Conflicts**: Incompatible version constraints
1431 /// - **Missing Resources**: Referenced files don't exist in sources
1432 /// - **Path Conflicts**: Multiple resources installing to same location
1433 ///
1434 /// # Performance
1435 ///
1436 /// - **Parallel Source Sync**: Multiple sources are processed concurrently
1437 /// - **Cache Utilization**: Previously cloned sources are reused
1438 /// - **Progress Reporting**: Non-blocking UI updates during resolution
1439 ///
1440 /// [`LockFile`]: crate::lockfile::LockFile
1441 ///
1442 /// Resolve transitive dependencies by extracting metadata from resource files.
1443 ///
1444 /// This method builds a dependency graph by:
1445 /// 1. Starting with direct manifest dependencies
1446 /// 2. Extracting metadata from each resolved resource
1447 /// 3. Adding discovered transitive dependencies to the graph
1448 /// 4. Checking for circular dependencies
1449 /// 5. Returning all dependencies in topological order
1450 ///
1451 /// # Cross-Source Handling
1452 ///
1453 /// Resources are uniquely identified by `(ResourceType, name, source)`, allowing multiple
1454 /// resources with the same name from different sources to coexist. During topological
1455 /// ordering, all resources with matching name and type are included, even if they come
1456 /// from different sources.
1457 ///
1458 /// # Arguments
1459 /// * `base_deps` - The initial dependencies from the manifest
1460 /// * `enable_transitive` - Whether to resolve transitive dependencies
1461 ///
1462 /// # Returns
1463 /// A vector of all dependencies (direct + transitive) in topological order, including
1464 /// all same-named resources from different sources. Each tuple contains:
1465 /// - Dependency name
1466 /// - Resource dependency details
1467 /// - Resource type (to avoid ambiguity when same name exists across types)
1468 async fn resolve_transitive_dependencies(
1469 &mut self,
1470 base_deps: &[(String, ResourceDependency, crate::core::ResourceType)],
1471 enable_transitive: bool,
1472 ) -> Result<Vec<(String, ResourceDependency, crate::core::ResourceType)>> {
1473 // Clear state from any previous resolution to prevent stale data
1474 // IMPORTANT: Must clear before early return to avoid contaminating non-transitive runs
1475 self.dependency_map.clear();
1476 // NOTE: Don't reset conflict_detector here - it was already populated with direct dependencies
1477
1478 if !enable_transitive {
1479 // If transitive resolution is disabled, return base dependencies as-is
1480 // with their resource types already threaded from the manifest
1481 return Ok(base_deps.to_vec());
1482 }
1483
1484 let mut graph = DependencyGraph::new();
1485 // Use (resource_type, name, source, tool) as key to distinguish same-named resources from different sources and tools
1486 let mut all_deps: HashMap<
1487 (crate::core::ResourceType, String, Option<String>, Option<String>),
1488 ResourceDependency,
1489 > = HashMap::new();
1490 let mut processed: HashSet<(
1491 crate::core::ResourceType,
1492 String,
1493 Option<String>,
1494 Option<String>,
1495 )> = HashSet::new();
1496 let mut queue: Vec<(String, ResourceDependency, Option<crate::core::ResourceType>)> =
1497 Vec::new();
1498
1499 // Add initial dependencies to queue with their threaded types
1500 for (name, dep, resource_type) in base_deps {
1501 let source = dep.get_source().map(std::string::ToString::to_string);
1502 let tool = dep.get_tool().map(std::string::ToString::to_string);
1503 queue.push((name.clone(), dep.clone(), Some(*resource_type)));
1504 all_deps.insert((*resource_type, name.clone(), source, tool), dep.clone());
1505 }
1506
1507 // Process queue to discover transitive dependencies
1508 while let Some((name, dep, resource_type)) = queue.pop() {
1509 let source = dep.get_source().map(std::string::ToString::to_string);
1510 let tool = dep.get_tool().map(std::string::ToString::to_string);
1511 let resource_type =
1512 resource_type.expect("resource_type should always be threaded through queue");
1513 let key = (resource_type, name.clone(), source.clone(), tool.clone());
1514
1515 // Check if this queue entry is stale (superseded by conflict resolution)
1516 // IMPORTANT: This must come BEFORE the processed check so that conflict-resolved
1517 // entries can be reprocessed even if an older version was already processed.
1518 // If all_deps has a different version than what we popped, skip this entry
1519 if let Some(current_dep) = all_deps.get(&key) {
1520 if current_dep.get_version() != dep.get_version() {
1521 // This entry was superseded by conflict resolution, skip it
1522 continue;
1523 }
1524 }
1525
1526 if processed.contains(&key) {
1527 continue;
1528 }
1529
1530 processed.insert(key.clone());
1531
1532 // Handle pattern dependencies by expanding them to concrete files
1533 if dep.is_pattern() {
1534 // Expand the pattern to get all matching files
1535 match self.expand_pattern_to_concrete_deps(&name, &dep, resource_type).await {
1536 Ok(concrete_deps) => {
1537 // Queue each concrete dependency for transitive resolution
1538 for (concrete_name, concrete_dep) in concrete_deps {
1539 // Record the mapping from concrete resource name to pattern alias
1540 // This enables patches defined under the pattern alias to be applied to all matched resources
1541 // Key includes ResourceType to prevent collisions between different resource types
1542 self.pattern_alias_map
1543 .insert((resource_type, concrete_name.clone()), name.clone());
1544
1545 let concrete_source =
1546 concrete_dep.get_source().map(std::string::ToString::to_string);
1547 let concrete_tool =
1548 concrete_dep.get_tool().map(std::string::ToString::to_string);
1549 let concrete_key = (
1550 resource_type,
1551 concrete_name.clone(),
1552 concrete_source,
1553 concrete_tool,
1554 );
1555
1556 // Only add if not already processed or queued
1557 if let std::collections::hash_map::Entry::Vacant(e) =
1558 all_deps.entry(concrete_key)
1559 {
1560 e.insert(concrete_dep.clone());
1561 queue.push((concrete_name, concrete_dep, Some(resource_type)));
1562 }
1563 }
1564 }
1565 Err(e) => {
1566 anyhow::bail!(
1567 "Failed to expand pattern '{}' for transitive dependency extraction: {}",
1568 dep.get_path(),
1569 e
1570 );
1571 }
1572 }
1573 continue; // Skip to next queue item
1574 }
1575
1576 // Get the resource content to extract metadata
1577 let content = self.fetch_resource_content(&name, &dep).await.with_context(|| {
1578 format!("Failed to fetch resource '{name}' for transitive dependency extraction")
1579 })?;
1580
1581 // Extract metadata from the resource
1582 let path = PathBuf::from(dep.get_path());
1583 let metadata =
1584 MetadataExtractor::extract(&path, &content, self.manifest.project.as_ref())?;
1585
1586 // Process transitive dependencies if present
1587 if let Some(deps_map) = metadata.dependencies {
1588 tracing::debug!(
1589 "Processing transitive deps for: {} (has source: {:?})",
1590 name,
1591 dep.get_source()
1592 );
1593
1594 for (dep_resource_type_str, dep_specs) in deps_map {
1595 // Convert plural form from YAML (e.g., "agents") to ResourceType enum
1596 // The ResourceType::FromStr accepts both plural and singular forms
1597 let dep_resource_type: crate::core::ResourceType =
1598 dep_resource_type_str.parse().unwrap_or(crate::core::ResourceType::Snippet);
1599
1600 for dep_spec in dep_specs {
1601 // UNIFIED APPROACH: File-relative path resolution for all transitive dependencies
1602
1603 // Get the canonical path to the parent resource file
1604 let parent_file_path = self
1605 .get_canonical_path_for_dependency(&dep)
1606 .await
1607 .with_context(|| {
1608 format!(
1609 "Failed to get parent path for transitive dependencies of '{}'",
1610 name
1611 )
1612 })?;
1613
1614 // Check if this is a glob pattern
1615 let is_pattern = dep_spec.path.contains('*')
1616 || dep_spec.path.contains('?')
1617 || dep_spec.path.contains('[');
1618
1619 let trans_canonical = if is_pattern {
1620 // For patterns, normalize (resolve .. and .) but don't canonicalize
1621 let parent_dir = parent_file_path.parent()
1622 .ok_or_else(|| anyhow::anyhow!(
1623 "Failed to resolve transitive dependency '{}' for '{}': parent file has no directory",
1624 dep_spec.path, name
1625 ))?;
1626 let resolved = parent_dir.join(&dep_spec.path);
1627 // IMPORTANT: Preserve the root component when normalizing
1628 let mut result = PathBuf::new();
1629 for component in resolved.components() {
1630 match component {
1631 std::path::Component::RootDir => {
1632 result.push(component);
1633 } // Preserve root!
1634 std::path::Component::ParentDir => {
1635 result.pop();
1636 }
1637 std::path::Component::CurDir => {}
1638 _ => {
1639 result.push(component);
1640 }
1641 }
1642 }
1643 result
1644 } else if dep_spec.path.starts_with("./")
1645 || dep_spec.path.starts_with("../")
1646 {
1647 // File-relative path (used in Markdown YAML frontmatter)
1648 crate::utils::resolve_file_relative_path(
1649 &parent_file_path,
1650 &dep_spec.path,
1651 )
1652 .with_context(|| {
1653 format!(
1654 "Failed to resolve transitive dependency '{}' for '{}'",
1655 dep_spec.path, name
1656 )
1657 })?
1658 } else {
1659 // Repo-relative path (used in JSON dependencies field)
1660 // Get the repository root (worktree path for Git sources, source path for local)
1661 let repo_root = if dep.get_source().is_some() {
1662 // For Git sources, the parent_file_path is inside a worktree
1663 // Find the worktree root by going up until we find it
1664 parent_file_path.ancestors()
1665 .find(|p| {
1666 // Worktree directories have format: owner_repo_sha8
1667 p.file_name()
1668 .and_then(|n| n.to_str())
1669 .map(|s| s.contains('_'))
1670 .unwrap_or(false)
1671 })
1672 .ok_or_else(|| anyhow::anyhow!(
1673 "Failed to find worktree root for transitive dependency '{}'",
1674 dep_spec.path
1675 ))?
1676 } else {
1677 // For local sources, go up to the source root
1678 parent_file_path.ancestors()
1679 .nth(2) // Go up 2 levels from the file (e.g., from commands/file.json to repo root)
1680 .ok_or_else(|| anyhow::anyhow!(
1681 "Failed to find source root for transitive dependency '{}'",
1682 dep_spec.path
1683 ))?
1684 };
1685
1686 let full_path = repo_root.join(&dep_spec.path);
1687 full_path.canonicalize().with_context(|| {
1688 format!(
1689 "Failed to resolve repo-relative transitive dependency '{}' for '{}': {} (repo root: {})",
1690 dep_spec.path, name, full_path.display(), repo_root.display()
1691 )
1692 })?
1693 };
1694
1695 // Create the transitive dependency based on whether parent is Git or path-only
1696 use crate::manifest::DetailedDependency;
1697 let trans_dep = if dep.get_source().is_none() {
1698 // Path-only transitive dep (parent is path-only)
1699 // The path was resolved relative to the parent file (file-relative resolution)
1700 // CRITICAL: Always store as manifest-relative path (even with ../) for lockfile portability
1701 // Absolute paths in lockfiles break cross-machine sharing
1702
1703 let manifest_dir = self.manifest.manifest_dir.as_ref()
1704 .ok_or_else(|| anyhow::anyhow!("Manifest directory not available for path-only transitive dep"))?;
1705
1706 // Always compute relative path from manifest to target
1707 // This handles both inside (agents/foo.md) and outside (../shared/utils.md) cases
1708 let dep_path_str = match manifest_dir.canonicalize() {
1709 Ok(canonical_manifest) => {
1710 // Normal case: both paths are canonical, compute relative path
1711 crate::utils::compute_relative_path(
1712 &canonical_manifest,
1713 &trans_canonical,
1714 )
1715 }
1716 Err(e) => {
1717 // Canonicalization failed (broken symlink, permissions, etc.)
1718 // We MUST still produce a relative path for lockfile portability.
1719 // Use the non-canonical manifest_dir - not ideal but better than absolute paths.
1720 eprintln!(
1721 "Warning: Could not canonicalize manifest directory {}: {}. Using non-canonical path for relative path computation.",
1722 manifest_dir.display(),
1723 e
1724 );
1725 crate::utils::compute_relative_path(
1726 manifest_dir,
1727 &trans_canonical,
1728 )
1729 }
1730 };
1731
1732 // Determine tool for transitive dependency
1733 let trans_tool = if let Some(explicit_tool) = &dep_spec.tool {
1734 Some(explicit_tool.clone())
1735 } else {
1736 let parent_tool =
1737 dep.get_tool().map(str::to_string).unwrap_or_else(|| {
1738 self.manifest.get_default_tool(resource_type)
1739 });
1740 if self
1741 .manifest
1742 .is_resource_supported(&parent_tool, dep_resource_type)
1743 {
1744 Some(parent_tool)
1745 } else {
1746 Some(self.manifest.get_default_tool(dep_resource_type))
1747 }
1748 };
1749
1750 ResourceDependency::Detailed(Box::new(DetailedDependency {
1751 source: None,
1752 path: crate::utils::normalize_path_for_storage(dep_path_str),
1753 version: None,
1754 branch: None,
1755 rev: None,
1756 command: None,
1757 args: None,
1758 target: None,
1759 filename: None,
1760 dependencies: None,
1761 tool: trans_tool,
1762 flatten: None,
1763 install: dep_spec.install.or(Some(true)),
1764 }))
1765 } else {
1766 // Git-backed transitive dep (parent is Git-backed)
1767 // The resolved path is within the worktree - need to convert back to repo-relative
1768 let source_name = dep.get_source().ok_or_else(|| {
1769 anyhow::anyhow!("Expected source for Git-backed dependency")
1770 })?;
1771 let version = dep.get_version().unwrap_or("main").to_string();
1772 let source_url =
1773 self.source_manager.get_source_url(source_name).ok_or_else(
1774 || anyhow::anyhow!("Source '{source_name}' not found"),
1775 )?;
1776
1777 // Get repo-relative path by stripping the appropriate prefix
1778 let repo_relative = if crate::utils::is_local_path(&source_url) {
1779 // For local directory sources, strip the source path to get relative path
1780 let source_path = PathBuf::from(&source_url).canonicalize()?;
1781 trans_canonical.strip_prefix(&source_path)
1782 .with_context(|| format!(
1783 "Transitive dep resolved outside parent's source directory: {} not under {}",
1784 trans_canonical.display(),
1785 source_path.display()
1786 ))?
1787 .to_path_buf()
1788 } else {
1789 // For Git sources, get worktree and strip it
1790 let sha = self
1791 .prepared_versions
1792 .get(&Self::group_key(source_name, &version))
1793 .ok_or_else(|| {
1794 anyhow::anyhow!(
1795 "Parent version not resolved for {}",
1796 source_name
1797 )
1798 })?
1799 .resolved_commit
1800 .clone();
1801 let worktree_path = self
1802 .cache
1803 .get_or_create_worktree_for_sha(
1804 source_name,
1805 &source_url,
1806 &sha,
1807 None,
1808 )
1809 .await?;
1810
1811 // Canonicalize worktree path to handle symlinks (e.g., /var -> /private/var on macOS)
1812 // and ensure consistent path formats on Windows (\\?\ prefix)
1813 let canonical_worktree =
1814 worktree_path.canonicalize().with_context(|| {
1815 format!(
1816 "Failed to canonicalize worktree path: {}",
1817 worktree_path.display()
1818 )
1819 })?;
1820
1821 trans_canonical.strip_prefix(&canonical_worktree)
1822 .with_context(|| format!(
1823 "Transitive dep resolved outside parent's worktree: {} not under {}",
1824 trans_canonical.display(),
1825 canonical_worktree.display()
1826 ))?
1827 .to_path_buf()
1828 };
1829
1830 // Determine tool for transitive dependency
1831 let trans_tool = if let Some(explicit_tool) = &dep_spec.tool {
1832 Some(explicit_tool.clone())
1833 } else {
1834 let parent_tool =
1835 dep.get_tool().map(str::to_string).unwrap_or_else(|| {
1836 self.manifest.get_default_tool(resource_type)
1837 });
1838 if self
1839 .manifest
1840 .is_resource_supported(&parent_tool, dep_resource_type)
1841 {
1842 Some(parent_tool)
1843 } else {
1844 Some(self.manifest.get_default_tool(dep_resource_type))
1845 }
1846 };
1847
1848 ResourceDependency::Detailed(Box::new(DetailedDependency {
1849 source: Some(source_name.to_string()),
1850 path: crate::utils::normalize_path_for_storage(
1851 repo_relative.to_string_lossy().to_string(),
1852 ),
1853 version: dep_spec
1854 .version
1855 .clone()
1856 .or_else(|| dep.get_version().map(|v| v.to_string())),
1857 branch: None,
1858 rev: None,
1859 command: None,
1860 args: None,
1861 target: None,
1862 filename: None,
1863 dependencies: None,
1864 tool: trans_tool,
1865 flatten: None,
1866 install: dep_spec.install.or(Some(true)),
1867 }))
1868 };
1869
1870 // Generate a name for the transitive dependency
1871 // Use explicit name from DependencySpec if provided, otherwise derive from path
1872 let trans_name = dep_spec
1873 .name
1874 .clone()
1875 .unwrap_or_else(|| self.generate_dependency_name(trans_dep.get_path()));
1876
1877 // Add to graph (use source-aware nodes to prevent false cycles)
1878 let trans_source =
1879 trans_dep.get_source().map(std::string::ToString::to_string);
1880 let trans_tool = trans_dep.get_tool().map(std::string::ToString::to_string);
1881 let from_node =
1882 DependencyNode::with_source(resource_type, &name, source.clone());
1883 let to_node = DependencyNode::with_source(
1884 dep_resource_type,
1885 &trans_name,
1886 trans_source.clone(),
1887 );
1888 graph.add_dependency(from_node.clone(), to_node.clone());
1889
1890 // Track in dependency map (use singular form from enum for dependency references)
1891 // Include source to prevent cross-source contamination
1892 let from_key = (resource_type, name.clone(), source.clone(), tool.clone());
1893 let dep_ref = format!("{dep_resource_type}/{trans_name}");
1894 self.dependency_map.entry(from_key).or_default().push(dep_ref);
1895
1896 // Add to conflict detector for tracking version requirements
1897 self.add_to_conflict_detector(&trans_name, &trans_dep, &name);
1898
1899 // Check for version conflicts and resolve them
1900 let trans_key = (
1901 dep_resource_type,
1902 trans_name.clone(),
1903 trans_source.clone(),
1904 trans_tool.clone(),
1905 );
1906
1907 if let Some(existing_dep) = all_deps.get(&trans_key) {
1908 // Version conflict detected (same name and source, different version)
1909 let resolved_dep = self
1910 .resolve_version_conflict(
1911 &trans_name,
1912 existing_dep,
1913 &trans_dep,
1914 &name, // Who requires this version
1915 )
1916 .await?;
1917
1918 // Only re-queue if the resolved version differs from existing
1919 let needs_reprocess =
1920 resolved_dep.get_version() != existing_dep.get_version();
1921
1922 all_deps.insert(trans_key.clone(), resolved_dep.clone());
1923
1924 if needs_reprocess {
1925 // Remove from processed so the resolved version can be processed
1926 // This ensures we fetch metadata for the correct version
1927 processed.remove(&trans_key);
1928 // Re-queue the resolved dependency
1929 queue.push((
1930 trans_name.clone(),
1931 resolved_dep,
1932 Some(dep_resource_type),
1933 ));
1934 }
1935 } else {
1936 // No conflict, add the dependency
1937 tracing::debug!(
1938 "Adding transitive dep '{}' to all_deps and queue (parent: {})",
1939 trans_name,
1940 name
1941 );
1942 all_deps.insert(trans_key.clone(), trans_dep.clone());
1943 queue.push((trans_name, trans_dep, Some(dep_resource_type)));
1944 }
1945 }
1946 }
1947 }
1948 }
1949
1950 // Check for circular dependencies
1951 graph.detect_cycles()?;
1952
1953 // Get topological order for dependencies that have relationships
1954 let ordered_nodes = graph.topological_order()?;
1955
1956 // Build result: start with topologically ordered dependencies
1957 let mut result = Vec::new();
1958 let mut added_keys = HashSet::new();
1959
1960 tracing::debug!(
1961 "Transitive resolution - topological order has {} nodes, all_deps has {} entries",
1962 ordered_nodes.len(),
1963 all_deps.len()
1964 );
1965
1966 for node in ordered_nodes {
1967 tracing::debug!(
1968 "Processing ordered node: {}/{} (source: {:?})",
1969 node.resource_type,
1970 node.name,
1971 node.source
1972 );
1973 // Find matching dependency - now that nodes include source, we can match precisely
1974 for (key, dep) in &all_deps {
1975 if key.0 == node.resource_type && key.1 == node.name && key.2 == node.source {
1976 tracing::debug!(
1977 " -> Found match in all_deps, adding to result with type {:?}",
1978 node.resource_type
1979 );
1980 result.push((node.name.clone(), dep.clone(), node.resource_type));
1981 added_keys.insert(key.clone());
1982 break; // Exact match found, no need to continue
1983 }
1984 }
1985 }
1986
1987 // Add remaining dependencies that weren't in the graph (no transitive deps)
1988 // These can be added in any order since they have no dependencies
1989 // IMPORTANT: Filter out patterns - they should only serve as expansion points,
1990 // not final dependencies. The concrete deps from expansion are what we want.
1991 for (key, dep) in all_deps {
1992 if !added_keys.contains(&key) {
1993 // Skip pattern dependencies - they were expanded to concrete deps
1994 if dep.is_pattern() {
1995 tracing::debug!(
1996 "Skipping pattern dependency in final result: {}/{} (source: {:?})",
1997 key.0,
1998 key.1,
1999 key.2
2000 );
2001 continue;
2002 }
2003
2004 tracing::debug!(
2005 "Adding non-graph dependency: {}/{} (source: {:?}) with type {:?}",
2006 key.0,
2007 key.1,
2008 key.2,
2009 key.0
2010 );
2011 result.push((key.1.clone(), dep.clone(), key.0));
2012 }
2013 }
2014
2015 tracing::debug!("Transitive resolution returning {} dependencies", result.len());
2016
2017 Ok(result)
2018 }
2019
2020 /// Fetch the content of a resource for metadata extraction.
2021 async fn fetch_resource_content(
2022 &mut self,
2023 _name: &str,
2024 dep: &ResourceDependency,
2025 ) -> Result<String> {
2026 match dep {
2027 ResourceDependency::Simple(path) => {
2028 // Local file - resolve relative to manifest directory
2029 let manifest_dir = self.manifest.manifest_dir.as_ref().ok_or_else(|| {
2030 anyhow::anyhow!("Manifest directory not available for Simple dependency")
2031 })?;
2032 let full_path =
2033 crate::utils::resolve_path_relative_to_manifest(manifest_dir, path)?;
2034 std::fs::read_to_string(&full_path)
2035 .with_context(|| format!("Failed to read local file: {}", full_path.display()))
2036 }
2037 ResourceDependency::Detailed(detailed) => {
2038 if let Some(source_name) = &detailed.source {
2039 let source_url = self
2040 .source_manager
2041 .get_source_url(source_name)
2042 .ok_or_else(|| anyhow::anyhow!("Source '{source_name}' not found"))?;
2043
2044 // Check if this is a local directory source
2045 if crate::utils::is_local_path(&source_url) {
2046 // Local directory source - read directly from path
2047 let file_path = PathBuf::from(&source_url).join(&detailed.path);
2048 std::fs::read_to_string(&file_path).with_context(|| {
2049 format!("Failed to read local file: {}", file_path.display())
2050 })
2051 } else {
2052 // Git-based remote dependency - need to checkout and read
2053 // Use get_version() to respect rev > branch > version precedence
2054 let version = dep.get_version().unwrap_or("main").to_string();
2055
2056 // Check if we already have this version resolved
2057 let sha = if let Some(prepared) =
2058 self.prepared_versions.get(&Self::group_key(source_name, &version))
2059 {
2060 prepared.resolved_commit.clone()
2061 } else {
2062 // Need to resolve this version
2063 self.version_resolver.add_version(
2064 source_name,
2065 &source_url,
2066 Some(&version),
2067 );
2068 self.version_resolver.resolve_all().await?;
2069
2070 let resolved_sha = self
2071 .version_resolver
2072 .get_resolved_sha(source_name, &version)
2073 .ok_or_else(|| {
2074 anyhow::anyhow!(
2075 "Failed to resolve version for {source_name} @ {version}"
2076 )
2077 })?;
2078
2079 // Cache the resolved version for nested transitive dependency resolution
2080 // Note: worktree_path will be set when get_or_create_worktree_for_sha is called below
2081 self.prepared_versions.insert(
2082 Self::group_key(source_name, &version),
2083 PreparedSourceVersion {
2084 worktree_path: PathBuf::new(), // Placeholder, will be set after worktree creation
2085 resolved_version: Some(version.clone()),
2086 resolved_commit: resolved_sha.clone(),
2087 },
2088 );
2089
2090 resolved_sha
2091 };
2092
2093 // Get worktree for this SHA
2094 let worktree_path = self
2095 .cache
2096 .get_or_create_worktree_for_sha(source_name, &source_url, &sha, None)
2097 .await?;
2098
2099 // Read the file from worktree (with cache coherency retry)
2100 let file_path = worktree_path.join(&detailed.path);
2101 read_with_cache_retry_sync(&file_path)
2102 }
2103 } else {
2104 // Local dependency with detailed spec - resolve relative to manifest directory
2105 let manifest_dir = self.manifest.manifest_dir.as_ref().ok_or_else(|| {
2106 anyhow::anyhow!(
2107 "Manifest directory not available for local Detailed dependency"
2108 )
2109 })?;
2110 let full_path = crate::utils::resolve_path_relative_to_manifest(
2111 manifest_dir,
2112 &detailed.path,
2113 )?;
2114 std::fs::read_to_string(&full_path).with_context(|| {
2115 format!("Failed to read local file: {}", full_path.display())
2116 })
2117 }
2118 }
2119 }
2120 }
2121
2122 /// Gets the canonical file path for a dependency (unified for Git and path-only).
2123 ///
2124 /// For path-only deps: resolves from manifest directory
2125 /// For Git-backed deps: resolves from worktree path
2126 async fn get_canonical_path_for_dependency(
2127 &mut self,
2128 dep: &ResourceDependency,
2129 ) -> Result<PathBuf> {
2130 if dep.get_source().is_none() {
2131 // Path-only: resolve from manifest directory
2132 let manifest_dir = self
2133 .manifest
2134 .manifest_dir
2135 .as_ref()
2136 .ok_or_else(|| anyhow::anyhow!("Manifest directory not available"))?;
2137 crate::utils::resolve_path_relative_to_manifest(manifest_dir, dep.get_path())
2138 } else {
2139 // Git-backed: get worktree path and join with repo-relative path
2140 let source_name = dep
2141 .get_source()
2142 .ok_or_else(|| anyhow::anyhow!("Cannot get worktree for path-only dependency"))?;
2143 let version = dep.get_version().unwrap_or("main").to_string();
2144
2145 // Get source URL
2146 let source_url = self
2147 .source_manager
2148 .get_source_url(source_name)
2149 .ok_or_else(|| anyhow::anyhow!("Source '{source_name}' not found"))?;
2150
2151 // Check if this is a local directory source (not a Git repo)
2152 if crate::utils::is_local_path(&source_url) {
2153 // Local directory source - resolve directly from source path
2154 let file_path = PathBuf::from(&source_url).join(dep.get_path());
2155 file_path.canonicalize().with_context(|| {
2156 format!("Failed to canonicalize local source resource: {}", file_path.display())
2157 })
2158 } else {
2159 // Git-backed: resolve from worktree
2160 // Get the resolved SHA
2161 let sha = if let Some(prepared) =
2162 self.prepared_versions.get(&Self::group_key(source_name, &version))
2163 {
2164 prepared.resolved_commit.clone()
2165 } else {
2166 // Need to resolve this version
2167 self.version_resolver.add_version(source_name, &source_url, Some(&version));
2168 self.version_resolver.resolve_all().await?;
2169
2170 self.version_resolver.get_resolved_sha(source_name, &version).ok_or_else(
2171 || {
2172 anyhow::anyhow!(
2173 "Failed to resolve version for {source_name} @ {version}"
2174 )
2175 },
2176 )?
2177 };
2178
2179 // Get worktree path
2180 let worktree_path = self
2181 .cache
2182 .get_or_create_worktree_for_sha(source_name, &source_url, &sha, None)
2183 .await?;
2184
2185 // Join with repo-relative path and canonicalize
2186 let full_path = worktree_path.join(dep.get_path());
2187 full_path.canonicalize().with_context(|| {
2188 format!("Failed to canonicalize Git resource: {}", full_path.display())
2189 })
2190 }
2191 }
2192 }
2193
2194 /// Expands a pattern dependency to concrete (non-pattern) dependencies.
2195 ///
2196 /// This method is used during transitive dependency resolution to handle
2197 /// glob patterns declared in resource frontmatter. It expands the pattern
2198 /// to all matching files and creates a concrete ResourceDependency for each.
2199 ///
2200 /// # Arguments
2201 ///
2202 /// * `name` - The dependency name (used for logging)
2203 /// * `dep` - The pattern-based dependency to expand
2204 /// * `resource_type` - The resource type (for path construction)
2205 ///
2206 /// # Returns
2207 ///
2208 /// A vector of (name, ResourceDependency) tuples for each matched file.
2209 async fn expand_pattern_to_concrete_deps(
2210 &self,
2211 _name: &str,
2212 dep: &ResourceDependency,
2213 _resource_type: crate::core::ResourceType,
2214 ) -> Result<Vec<(String, ResourceDependency)>> {
2215 use crate::manifest::DetailedDependency;
2216
2217 let pattern = dep.get_path();
2218
2219 if dep.is_local() {
2220 // Local pattern dependency - search in filesystem
2221 // For absolute patterns, use the parent directory as base and strip the pattern to just the filename part
2222 // For relative patterns, use current directory
2223 let pattern_path = Path::new(pattern);
2224 let (base_path, search_pattern) = if pattern_path.is_absolute() {
2225 // Absolute pattern: extract base directory and relative pattern
2226 // Example: "/tmp/xyz/agents/*.md" -> base="/tmp/xyz", pattern="agents/*.md"
2227 let components: Vec<_> = pattern_path.components().collect();
2228
2229 // Find the first component with a glob character
2230 let glob_idx = components.iter().position(|c| {
2231 let s = c.as_os_str().to_string_lossy();
2232 s.contains('*') || s.contains('?') || s.contains('[')
2233 });
2234
2235 if let Some(idx) = glob_idx {
2236 // Split at the glob component
2237 let base_components = &components[..idx];
2238 let pattern_components = &components[idx..];
2239
2240 let base: PathBuf = base_components.iter().collect();
2241 let pattern: String = pattern_components
2242 .iter()
2243 .map(|c| c.as_os_str().to_string_lossy())
2244 .collect::<Vec<_>>()
2245 .join("/");
2246
2247 (base, pattern)
2248 } else {
2249 // No glob characters, use as-is
2250 (PathBuf::from("."), pattern.to_string())
2251 }
2252 } else {
2253 // Relative pattern, use current directory
2254 (PathBuf::from("."), pattern.to_string())
2255 };
2256
2257 let pattern_resolver = crate::pattern::PatternResolver::new();
2258 let matches = pattern_resolver.resolve(&search_pattern, &base_path)?;
2259
2260 // Get tool, target, and flatten from parent pattern dependency
2261 let (tool, target, flatten) = match dep {
2262 ResourceDependency::Detailed(d) => (d.tool.clone(), d.target.clone(), d.flatten),
2263 _ => (None, None, None),
2264 };
2265
2266 let mut concrete_deps = Vec::new();
2267 for matched_path in matches {
2268 let resource_name = crate::pattern::extract_resource_name(&matched_path);
2269 // Convert matched path to absolute by joining with base_path
2270 let absolute_path = base_path.join(&matched_path);
2271 let concrete_path = absolute_path.to_string_lossy().to_string();
2272
2273 // Create a non-pattern Detailed dependency, inheriting tool, target, and flatten from parent
2274 concrete_deps.push((
2275 resource_name,
2276 ResourceDependency::Detailed(Box::new(DetailedDependency {
2277 source: None,
2278 path: concrete_path,
2279 version: None,
2280 branch: None,
2281 rev: None,
2282 command: None,
2283 args: None,
2284 target: target.clone(),
2285 filename: None,
2286 dependencies: None,
2287 tool: tool.clone(),
2288 flatten,
2289 install: None,
2290 })),
2291 ));
2292 }
2293
2294 Ok(concrete_deps)
2295 } else {
2296 // Git-based remote dependency - need worktree
2297 let source_name = dep
2298 .get_source()
2299 .ok_or_else(|| anyhow::anyhow!("Pattern dependency missing source"))?;
2300
2301 // Get the prepared worktree for this source/version
2302 let version_key = dep
2303 .get_version()
2304 .map_or_else(|| "HEAD".to_string(), std::string::ToString::to_string);
2305 let prepared_key = Self::group_key(source_name, &version_key);
2306
2307 let prepared = self.prepared_versions.get(&prepared_key).ok_or_else(|| {
2308 anyhow::anyhow!(
2309 "Prepared state missing for source '{}' @ '{}'. Pattern expansion requires prepared worktree.",
2310 source_name,
2311 version_key
2312 )
2313 })?;
2314
2315 let repo_path = &prepared.worktree_path;
2316
2317 // Search for matching files in the repository
2318 let pattern_resolver = crate::pattern::PatternResolver::new();
2319 let matches = pattern_resolver.resolve(pattern, Path::new(repo_path))?;
2320
2321 // Create concrete dependencies for each match
2322 let mut concrete_deps = Vec::new();
2323 for matched_path in matches {
2324 let resource_name = crate::pattern::extract_resource_name(&matched_path);
2325
2326 // Use the matched path as-is (it's already relative to repo root)
2327 // The matched path includes the resource type directory (e.g., "snippets/helper-one.md")
2328 let concrete_path = matched_path.to_string_lossy().to_string();
2329
2330 // Get tool, target, and flatten from parent
2331 let (tool, target, flatten) = match dep {
2332 ResourceDependency::Detailed(d) => {
2333 (d.tool.clone(), d.target.clone(), d.flatten)
2334 }
2335 _ => (None, None, None),
2336 };
2337
2338 // Create a non-pattern Detailed dependency, inheriting tool, target, and flatten from parent
2339 concrete_deps.push((
2340 resource_name,
2341 ResourceDependency::Detailed(Box::new(DetailedDependency {
2342 source: Some(source_name.to_string()),
2343 path: concrete_path,
2344 version: dep.get_version().map(std::string::ToString::to_string),
2345 branch: None,
2346 rev: None,
2347 command: None,
2348 args: None,
2349 target, // Inherit custom target from parent pattern
2350 filename: None,
2351 dependencies: None,
2352 tool,
2353 flatten,
2354 install: None,
2355 })),
2356 ));
2357 }
2358
2359 Ok(concrete_deps)
2360 }
2361 }
2362
2363 /// Generate a dependency name from a path.
2364 /// Creates collision-resistant names by preserving directory structure.
2365 fn generate_dependency_name(&self, path: &str) -> String {
2366 // Convert path to a collision-resistant name
2367 // Example: "agents/ai/helper.md" -> "ai/helper"
2368 // Example: "snippets/commands/commit.md" -> "commands/commit"
2369 // Example: "commit.md" -> "commit"
2370 // Example (absolute): "/private/tmp/shared/snippets/utils.md" -> "/private/tmp/shared/snippets/utils"
2371 // Example (Windows absolute): "C:/team/tools/foo.md" -> "C:/team/tools/foo"
2372 // Example (parent-relative): "../shared/utils.md" -> "../shared/utils"
2373
2374 let path = Path::new(path);
2375
2376 // Get the path without extension
2377 let without_ext = path.with_extension("");
2378
2379 // Convert to string and normalize separators to forward slashes
2380 // This ensures consistent behavior on Windows where Path::to_string_lossy()
2381 // produces backslashes, which would break our split('/') logic below
2382 let path_str = without_ext.to_string_lossy().replace('\\', "/");
2383
2384 // Check if this is an absolute path or starts with ../ (cross-directory)
2385 // Note: With the fix to always use manifest-relative paths (even with ../),
2386 // lockfiles should never contain absolute paths. We check path.is_absolute()
2387 // defensively for manually-edited lockfiles.
2388 let is_absolute = path.is_absolute();
2389 let is_cross_directory = path_str.starts_with("../");
2390
2391 // If the path has multiple components, skip the first directory (resource type)
2392 // to avoid redundancy, but keep subdirectories for uniqueness
2393 // EXCEPTIONS that keep all components to avoid collisions:
2394 // 1. Absolute paths (e.g., C:/team/tools/foo.md vs D:/team/tools/foo.md)
2395 // 2. Cross-directory paths with ../ (e.g., ../shared/a.md vs ../other/a.md)
2396 let components: Vec<&str> = path_str.split('/').collect();
2397 if is_absolute || is_cross_directory {
2398 // Keep all components to avoid collisions
2399 path_str.to_string()
2400 } else if components.len() > 1 {
2401 // Regular relative path: skip the first component (e.g., "agents", "snippets") and join the rest
2402 components[1..].join("/")
2403 } else {
2404 // Single component, just return it
2405 components[0].to_string()
2406 }
2407 }
2408
2409 /// Resolve all manifest dependencies into a deterministic lockfile.
2410 ///
2411 /// This is the primary entry point for dependency resolution. It resolves all
2412 /// dependencies from the manifest (including transitive dependencies) and
2413 /// generates a complete lockfile with resolved versions and commit SHAs.
2414 ///
2415 /// By default, this method enables transitive dependency resolution. Resources
2416 /// can declare their own dependencies via YAML frontmatter (Markdown) or JSON
2417 /// fields, which will be automatically discovered and resolved.
2418 ///
2419 /// # Transitive Dependency Resolution
2420 ///
2421 /// When enabled (default), the resolver:
2422 /// 1. Resolves direct manifest dependencies
2423 /// 2. Extracts dependency metadata from resource files
2424 /// 3. Builds a dependency graph with cycle detection
2425 /// 4. Resolves transitive dependencies in topological order
2426 ///
2427 /// # Returns
2428 ///
2429 /// A complete [`LockFile`] with all resolved dependencies including:
2430 /// - Resolved commit SHAs for reproducible installations
2431 /// - Checksums for integrity verification
2432 /// - Installation paths for all resources
2433 /// - Source repository information
2434 ///
2435 /// # Errors
2436 ///
2437 /// Returns an error if:
2438 /// - Source repositories cannot be accessed
2439 /// - Version constraints cannot be satisfied
2440 /// - Circular dependencies are detected
2441 /// - Resource files cannot be read or parsed
2442 ///
2443 /// # Example
2444 ///
2445 /// ```rust,no_run
2446 /// # use agpm_cli::resolver::DependencyResolver;
2447 /// # use agpm_cli::manifest::Manifest;
2448 /// # use agpm_cli::cache::Cache;
2449 /// # async fn example() -> anyhow::Result<()> {
2450 /// let manifest = Manifest::load("agpm.toml".as_ref())?;
2451 /// let cache = Cache::new()?;
2452 /// let mut resolver = DependencyResolver::new(manifest, cache)?;
2453 ///
2454 /// // Resolve all dependencies including transitive ones
2455 /// let lockfile = resolver.resolve().await?;
2456 ///
2457 /// lockfile.save("agpm.lock".as_ref())?;
2458 /// println!("Resolved {} total resources",
2459 /// lockfile.agents.len() + lockfile.snippets.len());
2460 /// # Ok(())
2461 /// # }
2462 /// ```
2463 pub async fn resolve(&mut self) -> Result<LockFile> {
2464 self.resolve_with_options(true).await
2465 }
2466
2467 /// Resolve dependencies with configurable transitive dependency support.
2468 ///
2469 /// This method provides fine-grained control over dependency resolution behavior,
2470 /// allowing you to disable transitive dependency resolution when needed. This is
2471 /// useful for debugging, testing, or when you want to install only direct
2472 /// dependencies without their transitive requirements.
2473 ///
2474 /// # Arguments
2475 ///
2476 /// * `enable_transitive` - Whether to resolve transitive dependencies
2477 /// - `true`: Full transitive resolution (default behavior)
2478 /// - `false`: Only direct manifest dependencies
2479 ///
2480 /// # Transitive Resolution Details
2481 ///
2482 /// When `enable_transitive` is `true`:
2483 /// - Resources are checked for embedded dependency metadata
2484 /// - Markdown files (.md): YAML frontmatter between `---` delimiters
2485 /// - JSON files (.json): Top-level `dependencies` field
2486 /// - Dependency graph is built with cycle detection
2487 /// - Dependencies are resolved in topological order
2488 ///
2489 /// When `enable_transitive` is `false`:
2490 /// - Only dependencies explicitly declared in `agpm.toml` are resolved
2491 /// - Resource metadata is not extracted or processed
2492 /// - Faster resolution for known dependency trees
2493 ///
2494 /// # Returns
2495 ///
2496 /// A [`LockFile`] containing all resolved dependencies according to the
2497 /// configuration. When transitive resolution is disabled, the lockfile will
2498 /// only contain direct dependencies from the manifest.
2499 ///
2500 /// # Errors
2501 ///
2502 /// Returns an error if:
2503 /// - Source repositories are inaccessible or invalid
2504 /// - Version constraints conflict or cannot be satisfied
2505 /// - Circular dependencies are detected (when `enable_transitive` is true)
2506 /// - Resource files cannot be read or contain invalid metadata
2507 /// - Network operations fail during source synchronization
2508 ///
2509 /// # Performance Considerations
2510 ///
2511 /// Disabling transitive resolution (`enable_transitive = false`) can improve
2512 /// performance when:
2513 /// - You know all required dependencies are explicitly listed
2514 /// - Testing specific dependency combinations
2515 /// - Debugging dependency resolution issues
2516 /// - Working with large resources that have expensive metadata extraction
2517 ///
2518 /// # Example
2519 ///
2520 /// ```rust,no_run
2521 /// # use agpm_cli::resolver::DependencyResolver;
2522 /// # use agpm_cli::manifest::Manifest;
2523 /// # use agpm_cli::cache::Cache;
2524 /// # async fn example() -> anyhow::Result<()> {
2525 /// let manifest = Manifest::load("agpm.toml".as_ref())?;
2526 /// let cache = Cache::new()?;
2527 /// let mut resolver = DependencyResolver::new(manifest, cache)?;
2528 ///
2529 /// // Resolve only direct dependencies without transitive resolution
2530 /// let lockfile = resolver.resolve_with_options(false).await?;
2531 ///
2532 /// println!("Resolved {} direct dependencies",
2533 /// lockfile.agents.len() + lockfile.snippets.len());
2534 /// # Ok(())
2535 /// # }
2536 /// ```
2537 ///
2538 /// # See Also
2539 ///
2540 /// - [`resolve()`]: Convenience method that enables transitive resolution by default
2541 /// - [`DependencyGraph`]: Graph structure used for cycle detection and ordering
2542 /// - [`DependencySpec`](crate::manifest::DependencySpec): Specification format for transitive dependencies
2543 ///
2544 /// [`resolve()`]: DependencyResolver::resolve
2545 pub async fn resolve_with_options(&mut self, enable_transitive: bool) -> Result<LockFile> {
2546 let mut lockfile = LockFile::new();
2547
2548 // Add sources to lockfile
2549 for (name, url) in &self.manifest.sources {
2550 lockfile.add_source(name.clone(), url.clone(), String::new());
2551 }
2552
2553 // Get all dependencies with their types to avoid mis-typing same-named resources
2554 let base_deps: Vec<(String, ResourceDependency, crate::core::ResourceType)> = self
2555 .manifest
2556 .all_dependencies_with_types()
2557 .into_iter()
2558 .map(|(name, dep, resource_type)| (name.to_string(), dep.into_owned(), resource_type))
2559 .collect();
2560
2561 // Add direct dependencies to conflict detector
2562 for (name, dep, _) in &base_deps {
2563 self.add_to_conflict_detector(name, dep, "manifest");
2564 }
2565
2566 // Show initial message about what we're doing
2567 // Sync sources (phase management is handled by caller)
2568 // prepare_remote_groups only needs name and dep, not type
2569 let base_deps_for_prep: Vec<(String, ResourceDependency)> =
2570 base_deps.iter().map(|(name, dep, _)| (name.clone(), dep.clone())).collect();
2571 self.prepare_remote_groups(&base_deps_for_prep).await?;
2572
2573 // Resolve transitive dependencies if enabled
2574 let deps = self.resolve_transitive_dependencies(&base_deps, enable_transitive).await?;
2575
2576 // Resolve each dependency (including transitive ones)
2577 tracing::debug!("resolve_with_options - about to resolve {} dependencies", deps.len());
2578 for (name, dep, resource_type) in &deps {
2579 tracing::debug!(
2580 "Resolving dependency: {} -> {} (type: {:?})",
2581 name,
2582 dep.get_path(),
2583 resource_type
2584 );
2585 // Progress is tracked at the phase level
2586
2587 // Check if this is a pattern dependency
2588 if dep.is_pattern() {
2589 // Pattern dependencies resolve to multiple resources
2590 let entries = self.resolve_pattern_dependency(name, dep, *resource_type).await?;
2591
2592 // Add each resolved entry to the appropriate resource type with deduplication
2593 // Resource type was already determined during transitive resolution
2594 for entry in entries {
2595 match *resource_type {
2596 crate::core::ResourceType::Agent => {
2597 // Match by (name, source) to allow same-named resources from different sources
2598 if let Some(existing) = lockfile
2599 .agents
2600 .iter_mut()
2601 .find(|e| e.name == entry.name && e.source == entry.source)
2602 {
2603 *existing = entry;
2604 } else {
2605 lockfile.agents.push(entry);
2606 }
2607 }
2608 crate::core::ResourceType::Snippet => {
2609 if let Some(existing) = lockfile
2610 .snippets
2611 .iter_mut()
2612 .find(|e| e.name == entry.name && e.source == entry.source)
2613 {
2614 *existing = entry;
2615 } else {
2616 lockfile.snippets.push(entry);
2617 }
2618 }
2619 crate::core::ResourceType::Command => {
2620 if let Some(existing) = lockfile
2621 .commands
2622 .iter_mut()
2623 .find(|e| e.name == entry.name && e.source == entry.source)
2624 {
2625 *existing = entry;
2626 } else {
2627 lockfile.commands.push(entry);
2628 }
2629 }
2630 crate::core::ResourceType::Script => {
2631 if let Some(existing) = lockfile
2632 .scripts
2633 .iter_mut()
2634 .find(|e| e.name == entry.name && e.source == entry.source)
2635 {
2636 *existing = entry;
2637 } else {
2638 lockfile.scripts.push(entry);
2639 }
2640 }
2641 crate::core::ResourceType::Hook => {
2642 if let Some(existing) = lockfile
2643 .hooks
2644 .iter_mut()
2645 .find(|e| e.name == entry.name && e.source == entry.source)
2646 {
2647 *existing = entry;
2648 } else {
2649 lockfile.hooks.push(entry);
2650 }
2651 }
2652 crate::core::ResourceType::McpServer => {
2653 if let Some(existing) = lockfile
2654 .mcp_servers
2655 .iter_mut()
2656 .find(|e| e.name == entry.name && e.source == entry.source)
2657 {
2658 *existing = entry;
2659 } else {
2660 lockfile.mcp_servers.push(entry);
2661 }
2662 }
2663 }
2664 }
2665 } else {
2666 // Regular single dependency
2667 // Pass the resource type from transitive resolution to ensure correct type
2668 // This handles cases where transitive dependencies share the same name
2669 // as direct manifest dependencies but have a different type
2670 let entry = self.resolve_dependency(name, dep, *resource_type).await?;
2671 tracing::debug!(
2672 "Resolved {} to resource_type={:?}, installed_at={}",
2673 name,
2674 entry.resource_type,
2675 entry.installed_at
2676 );
2677 // Add directly to lockfile to preserve (name, source) uniqueness
2678 self.add_or_update_lockfile_entry(&mut lockfile, name, entry);
2679 }
2680
2681 // Progress is tracked by updating messages, no need to increment
2682 }
2683
2684 // Progress is tracked at the phase level
2685
2686 // Progress completion is handled by the caller
2687
2688 // Detect version conflicts before creating lockfile
2689 let conflicts = self.conflict_detector.detect_conflicts();
2690 if !conflicts.is_empty() {
2691 let mut error_msg = String::from("Version conflicts detected:\n\n");
2692 for conflict in &conflicts {
2693 error_msg.push_str(&format!("{conflict}\n"));
2694 }
2695 return Err(AgpmError::Other {
2696 message: error_msg,
2697 }
2698 .into());
2699 }
2700
2701 // Post-process dependencies to add version information
2702 self.add_version_to_dependencies(&mut lockfile)?;
2703
2704 // Detect target-path conflicts before finalizing
2705 self.detect_target_conflicts(&lockfile)?;
2706
2707 Ok(lockfile)
2708 }
2709
2710 /// Resolves a single dependency to a lockfile entry.
2711 ///
2712 /// This internal method handles the resolution of one dependency, including
2713 /// source synchronization, version resolution, and entry creation.
2714 ///
2715 /// # Algorithm
2716 ///
2717 /// For local dependencies:
2718 /// 1. Validate the path format
2719 /// 2. Determine installation location based on resource type
2720 /// 3. Preserve relative directory structure from source path
2721 /// 4. Create entry with relative path (no source sync required)
2722 ///
2723 /// For remote dependencies:
2724 /// 1. Validate source exists in manifest or global config
2725 /// 2. Synchronize source repository (clone or fetch)
2726 /// 3. Resolve version constraint to specific commit
2727 /// 4. Preserve relative directory structure from dependency path
2728 /// 5. Create entry with resolved commit and source information
2729 ///
2730 /// # Parameters
2731 ///
2732 /// - `name`: The dependency name from the manifest
2733 /// - `dep`: The dependency specification with source, path, and version
2734 ///
2735 /// # Returns
2736 ///
2737 /// A [`LockedResource`] with:
2738 /// - Resolved commit hash (for remote dependencies)
2739 /// - Source and URL information
2740 /// - Installation path in the project
2741 /// - Empty checksum (computed during actual installation)
2742 ///
2743 /// # Errors
2744 ///
2745 /// Returns an error if:
2746 /// - Source is not found in manifest or global config
2747 /// - Source repository cannot be cloned or accessed
2748 /// - Version constraint cannot be resolved (tag/branch not found)
2749 /// - Git operations fail due to network or authentication issues
2750 ///
2751 /// [`LockedResource`]: crate::lockfile::LockedResource
2752 async fn resolve_dependency(
2753 &mut self,
2754 name: &str,
2755 dep: &ResourceDependency,
2756 resource_type: crate::core::ResourceType,
2757 ) -> Result<LockedResource> {
2758 // Check if this is a pattern-based dependency
2759 if dep.is_pattern() {
2760 // Pattern dependencies resolve to multiple resources
2761 // This should be handled by a separate method
2762 return Err(anyhow::anyhow!(
2763 "Pattern dependency '{name}' should be resolved using resolve_pattern_dependency"
2764 ));
2765 }
2766
2767 if dep.is_local() {
2768 // Local dependency - just create entry with path
2769 // Resource type is passed as a parameter to ensure correct type resolution
2770 // for both direct and transitive dependencies
2771
2772 // Determine the filename to use
2773 let filename = if let Some(custom_filename) = dep.get_filename() {
2774 // Use custom filename as-is (includes extension)
2775 custom_filename.to_string()
2776 } else {
2777 // Extract meaningful path structure from dependency path
2778 extract_meaningful_path(Path::new(dep.get_path()))
2779 };
2780
2781 // Determine artifact type - use get_tool() method, then apply defaults
2782 let artifact_type_string = dep
2783 .get_tool()
2784 .map(|s| s.to_string())
2785 .unwrap_or_else(|| self.manifest.get_default_tool(resource_type));
2786 let artifact_type = artifact_type_string.as_str();
2787
2788 // For local resources without a source, just use the name (no version suffix)
2789 let unique_name = name.to_string();
2790
2791 // Hooks and MCP servers are configured in config files, not installed as artifact files
2792 let installed_at = match resource_type {
2793 crate::core::ResourceType::Hook | crate::core::ResourceType::McpServer => {
2794 // Use configured merge target, with fallback to hardcoded defaults
2795 if let Some(merge_target) =
2796 self.manifest.get_merge_target(artifact_type, resource_type)
2797 {
2798 normalize_path_for_storage(merge_target.display().to_string())
2799 } else {
2800 // Fallback to hardcoded defaults if not configured
2801 match resource_type {
2802 crate::core::ResourceType::Hook => {
2803 ".claude/settings.local.json".to_string()
2804 }
2805 crate::core::ResourceType::McpServer => {
2806 if artifact_type == "opencode" {
2807 ".opencode/opencode.json".to_string()
2808 } else {
2809 ".mcp.json".to_string()
2810 }
2811 }
2812 _ => unreachable!(),
2813 }
2814 }
2815 }
2816 _ => {
2817 // For regular resources, get the artifact path
2818 let artifact_path = self
2819 .manifest
2820 .get_artifact_resource_path(artifact_type, resource_type)
2821 .ok_or_else(|| {
2822 // Provide helpful error message with context
2823 let base_msg = format!(
2824 "Resource type '{}' is not supported by tool '{}' for dependency '{}'",
2825 resource_type, artifact_type, name
2826 );
2827
2828 // Check if this looks like a tool name was used as resource type
2829 let resource_type_str = resource_type.to_string();
2830 let hint = if ["claude-code", "opencode", "agpm"].contains(&resource_type_str.as_str()) {
2831 format!(
2832 "\n\nIt looks like '{}' is a tool name, not a resource type.\n\
2833 In transitive dependencies, use resource types (agents, snippets, commands)\n\
2834 as section headers, then specify 'tool: {}' within each dependency.",
2835 resource_type_str, resource_type_str
2836 )
2837 } else {
2838 format!(
2839 "\n\nValid resource types: agent, command, snippet, hook, mcp-server, script\n\
2840 Source file: {}",
2841 dep.get_path()
2842 )
2843 };
2844
2845 anyhow::anyhow!("{}{}", base_msg, hint)
2846 })?;
2847
2848 // Determine flatten behavior: use explicit setting or tool config default
2849 let flatten = dep
2850 .get_flatten()
2851 .or_else(|| {
2852 // Get flatten default from tool config
2853 self.manifest
2854 .get_tool_config(artifact_type)
2855 .and_then(|config| config.resources.get(resource_type.to_plural()))
2856 .and_then(|resource_config| resource_config.flatten)
2857 })
2858 .unwrap_or(false); // Default to false if not configured
2859
2860 let path = if let Some(custom_target) = dep.get_target() {
2861 // Custom target is relative to the artifact's resource directory
2862 let base_target = PathBuf::from(artifact_path.display().to_string())
2863 .join(custom_target.trim_start_matches('/'));
2864 // For custom targets, still strip prefix based on the original artifact path
2865 // (not the custom target), to avoid duplicate directories
2866 let relative_path = compute_relative_install_path(
2867 &artifact_path,
2868 Path::new(&filename),
2869 flatten,
2870 );
2871 base_target.join(relative_path)
2872 } else {
2873 // Use artifact configuration for default path
2874 let relative_path = compute_relative_install_path(
2875 &artifact_path,
2876 Path::new(&filename),
2877 flatten,
2878 );
2879 artifact_path.join(relative_path)
2880 };
2881 normalize_path_for_storage(normalize_path(&path))
2882 }
2883 };
2884
2885 Ok(LockedResource {
2886 name: unique_name,
2887 source: None,
2888 url: None,
2889 path: normalize_path_for_storage(dep.get_path()),
2890 version: None,
2891 resolved_commit: None,
2892 checksum: String::new(),
2893 installed_at,
2894 dependencies: self.get_dependencies_for(name, None, resource_type, dep.get_tool()),
2895 resource_type,
2896 tool: Some(
2897 dep.get_tool()
2898 .map(std::string::ToString::to_string)
2899 .unwrap_or_else(|| self.manifest.get_default_tool(resource_type)),
2900 ),
2901 manifest_alias: self
2902 .pattern_alias_map
2903 .get(&(resource_type, name.to_string()))
2904 .cloned(), // Check if this came from a pattern expansion
2905 applied_patches: HashMap::new(), // Populated during installation, not resolution
2906 install: dep.get_install(),
2907 })
2908 } else {
2909 // Remote dependency - need to sync and resolve
2910 let source_name = dep.get_source().ok_or_else(|| AgpmError::ConfigError {
2911 message: format!("Dependency '{name}' has no source specified"),
2912 })?;
2913
2914 // Get source URL
2915 let source_url = self.source_manager.get_source_url(source_name).ok_or_else(|| {
2916 AgpmError::SourceNotFound {
2917 name: source_name.to_string(),
2918 }
2919 })?;
2920
2921 let version_key = dep
2922 .get_version()
2923 .map_or_else(|| "HEAD".to_string(), std::string::ToString::to_string);
2924 let prepared_key = Self::group_key(source_name, &version_key);
2925
2926 // Check if this dependency has been prepared
2927 let (resolved_version, resolved_commit) = if let Some(prepared) =
2928 self.prepared_versions.get(&prepared_key)
2929 {
2930 // Use prepared version
2931 (prepared.resolved_version.clone(), prepared.resolved_commit.clone())
2932 } else {
2933 // This dependency wasn't prepared (e.g., when called from `agpm add`)
2934 // We need to prepare it on-demand
2935 let deps = vec![(name.to_string(), dep.clone())];
2936 self.prepare_remote_groups(&deps).await?;
2937
2938 // Now it should be prepared
2939 if let Some(prepared) = self.prepared_versions.get(&prepared_key) {
2940 (prepared.resolved_version.clone(), prepared.resolved_commit.clone())
2941 } else {
2942 return Err(anyhow::anyhow!(
2943 "Failed to prepare dependency '{name}' from source '{source_name}' @ '{version_key}'"
2944 ));
2945 }
2946 };
2947
2948 // Resource type is passed as a parameter to ensure correct type resolution
2949 // for both direct and transitive dependencies
2950
2951 // Determine the filename to use
2952 let filename = if let Some(custom_filename) = dep.get_filename() {
2953 // Use custom filename as-is (includes extension)
2954 custom_filename.to_string()
2955 } else {
2956 // Preserve the dependency path structure directly
2957 let dep_path = Path::new(dep.get_path());
2958 dep_path.to_string_lossy().to_string()
2959 };
2960
2961 // Determine artifact type - use get_tool() method, then apply defaults
2962 // Extract artifact_type from dependency - convert to String for lockfile
2963 let artifact_type_string = dep
2964 .get_tool()
2965 .map(std::string::ToString::to_string)
2966 .unwrap_or_else(|| self.manifest.get_default_tool(resource_type));
2967 let artifact_type = artifact_type_string.as_str();
2968
2969 // Use simple name from manifest - lockfile entries are identified by (name, source)
2970 // Multiple entries with the same name but different sources can coexist
2971 // Version updates replace the existing entry for the same (name, source) pair
2972 let unique_name = name.to_string();
2973
2974 // Hooks and MCP servers are configured in config files, not installed as artifact files
2975 let installed_at = match resource_type {
2976 crate::core::ResourceType::Hook | crate::core::ResourceType::McpServer => {
2977 // Use configured merge target, with fallback to hardcoded defaults
2978 if let Some(merge_target) =
2979 self.manifest.get_merge_target(artifact_type, resource_type)
2980 {
2981 normalize_path_for_storage(merge_target.display().to_string())
2982 } else {
2983 // Fallback to hardcoded defaults if not configured
2984 match resource_type {
2985 crate::core::ResourceType::Hook => {
2986 ".claude/settings.local.json".to_string()
2987 }
2988 crate::core::ResourceType::McpServer => {
2989 if artifact_type == "opencode" {
2990 ".opencode/opencode.json".to_string()
2991 } else {
2992 ".mcp.json".to_string()
2993 }
2994 }
2995 _ => unreachable!(),
2996 }
2997 }
2998 }
2999 _ => {
3000 // For regular resources, get the artifact path
3001 let artifact_path = self
3002 .manifest
3003 .get_artifact_resource_path(artifact_type, resource_type)
3004 .ok_or_else(|| {
3005 // Provide helpful error message with context
3006 let base_msg = format!(
3007 "Resource type '{}' is not supported by tool '{}' for dependency '{}'",
3008 resource_type, artifact_type, name
3009 );
3010
3011 // Check if this looks like a tool name was used as resource type
3012 let resource_type_str = resource_type.to_string();
3013 let hint = if ["claude-code", "opencode", "agpm"].contains(&resource_type_str.as_str()) {
3014 format!(
3015 "\n\nIt looks like '{}' is a tool name, not a resource type.\n\
3016 In transitive dependencies, use resource types (agents, snippets, commands)\n\
3017 as section headers, then specify 'tool: {}' within each dependency.",
3018 resource_type_str, resource_type_str
3019 )
3020 } else {
3021 format!(
3022 "\n\nValid resource types: agent, command, snippet, hook, mcp-server, script\n\
3023 Source file: {}",
3024 dep.get_path()
3025 )
3026 };
3027
3028 anyhow::anyhow!("{}{}", base_msg, hint)
3029 })?;
3030
3031 // Determine flatten behavior: use explicit setting or tool config default
3032 let flatten = dep
3033 .get_flatten()
3034 .or_else(|| {
3035 // Get flatten default from tool config
3036 self.manifest
3037 .get_tool_config(artifact_type)
3038 .and_then(|config| config.resources.get(resource_type.to_plural()))
3039 .and_then(|resource_config| resource_config.flatten)
3040 })
3041 .unwrap_or(false); // Default to false if not configured
3042
3043 let path = if let Some(custom_target) = dep.get_target() {
3044 // Custom target is relative to the artifact's resource directory
3045 let base_target = PathBuf::from(artifact_path.display().to_string())
3046 .join(custom_target.trim_start_matches('/'));
3047 // For custom targets, still strip prefix based on the original artifact path
3048 // (not the custom target), to avoid duplicate directories
3049 let relative_path = compute_relative_install_path(
3050 &artifact_path,
3051 Path::new(&filename),
3052 flatten,
3053 );
3054 base_target.join(relative_path)
3055 } else {
3056 // Use artifact configuration for default path
3057 let relative_path = compute_relative_install_path(
3058 &artifact_path,
3059 Path::new(&filename),
3060 flatten,
3061 );
3062 artifact_path.join(relative_path)
3063 };
3064 normalize_path_for_storage(normalize_path(&path))
3065 }
3066 };
3067
3068 Ok(LockedResource {
3069 name: unique_name,
3070 source: Some(source_name.to_string()),
3071 url: Some(source_url.clone()),
3072 path: normalize_path_for_storage(dep.get_path()),
3073 version: resolved_version, // Resolved version (tag/branch like "v2.1.4" or "main")
3074 resolved_commit: Some(resolved_commit),
3075 checksum: String::new(), // Will be calculated during installation
3076 installed_at,
3077 dependencies: self.get_dependencies_for(
3078 name,
3079 Some(source_name),
3080 resource_type,
3081 dep.get_tool(),
3082 ),
3083 resource_type,
3084 tool: Some(artifact_type_string),
3085 manifest_alias: self
3086 .pattern_alias_map
3087 .get(&(resource_type, name.to_string()))
3088 .cloned(), // Check if this came from a pattern expansion
3089 applied_patches: HashMap::new(), // Populated during installation, not resolution
3090 install: dep.get_install(),
3091 })
3092 }
3093 }
3094
3095 /// Gets the dependencies for a resource from the dependency map.
3096 ///
3097 /// Returns a list of dependencies in the format "`resource_type/name`".
3098 ///
3099 /// # Parameters
3100 /// - `name`: The resource name
3101 /// - `source`: The source name (None for local dependencies)
3102 fn get_dependencies_for(
3103 &self,
3104 name: &str,
3105 source: Option<&str>,
3106 resource_type: crate::core::ResourceType,
3107 tool: Option<&str>,
3108 ) -> Vec<String> {
3109 // Use the threaded resource_type parameter from the manifest
3110 // This prevents type misclassification when same names exist across types
3111 let key = (
3112 resource_type,
3113 name.to_string(),
3114 source.map(std::string::ToString::to_string),
3115 tool.map(std::string::ToString::to_string),
3116 );
3117 self.dependency_map.get(&key).cloned().unwrap_or_default()
3118 }
3119
3120 /// Resolves a pattern-based dependency to multiple locked resources.
3121 ///
3122 /// Pattern dependencies match multiple resources using glob patterns,
3123 /// enabling batch installation of related resources.
3124 ///
3125 /// # Process
3126 ///
3127 /// 1. Sync the source repository
3128 /// 2. Checkout the specified version (if any)
3129 /// 3. Search for files matching the pattern
3130 /// 4. Preserve relative directory structure for each matched file
3131 /// 5. Create a locked resource for each match
3132 ///
3133 /// # Parameters
3134 ///
3135 /// - `name`: The dependency name (used for the collection)
3136 /// - `dep`: The pattern-based dependency specification
3137 ///
3138 /// # Returns
3139 ///
3140 /// A vector of [`LockedResource`] entries, one for each matched file.
3141 async fn resolve_pattern_dependency(
3142 &mut self,
3143 name: &str,
3144 dep: &ResourceDependency,
3145 resource_type: crate::core::ResourceType,
3146 ) -> Result<Vec<LockedResource>> {
3147 // Pattern dependencies use the path field with glob characters
3148 if !dep.is_pattern() {
3149 return Err(anyhow::anyhow!(
3150 "Expected pattern dependency but no glob characters found in path"
3151 ));
3152 }
3153
3154 let pattern = dep.get_path();
3155
3156 if dep.is_local() {
3157 // Local pattern dependency - search in filesystem
3158 // Extract base path from the pattern if it contains an absolute path
3159 let (base_path, pattern_str) = if pattern.contains('/') || pattern.contains('\\') {
3160 // Pattern contains path separators, extract base path
3161 let pattern_path = Path::new(pattern);
3162 if let Some(parent) = pattern_path.parent() {
3163 if parent.is_absolute() || parent.starts_with("..") || parent.starts_with(".") {
3164 // Use the parent as base path and just the filename pattern
3165 (
3166 parent.to_path_buf(),
3167 pattern_path
3168 .file_name()
3169 .and_then(|s| s.to_str())
3170 .unwrap_or(pattern)
3171 .to_string(),
3172 )
3173 } else {
3174 // Relative path, use current directory as base
3175 (PathBuf::from("."), pattern.to_string())
3176 }
3177 } else {
3178 // No parent, use current directory
3179 (PathBuf::from("."), pattern.to_string())
3180 }
3181 } else {
3182 // Simple pattern without path separators
3183 (PathBuf::from("."), pattern.to_string())
3184 };
3185
3186 let pattern_resolver = crate::pattern::PatternResolver::new();
3187 let matches = pattern_resolver.resolve(&pattern_str, &base_path)?;
3188
3189 // Use the threaded resource_type from the parameter
3190 let mut resources = Vec::new();
3191
3192 for matched_path in matches {
3193 let resource_name = crate::pattern::extract_resource_name(&matched_path);
3194
3195 // Determine artifact type - use get_tool() method, then apply defaults
3196 let artifact_type_string = dep
3197 .get_tool()
3198 .map(|s| s.to_string())
3199 .unwrap_or_else(|| self.manifest.get_default_tool(resource_type));
3200 let artifact_type = artifact_type_string.as_str();
3201
3202 // Construct full relative path from base_path and matched_path
3203 let full_relative_path = if base_path == Path::new(".") {
3204 crate::utils::normalize_path_for_storage(
3205 matched_path.to_string_lossy().to_string(),
3206 )
3207 } else {
3208 crate::utils::normalize_path_for_storage(format!(
3209 "{}/{}",
3210 base_path.display(),
3211 matched_path.display()
3212 ))
3213 };
3214
3215 // Use the threaded resource_type (pattern dependencies inherit from parent)
3216
3217 // Hooks and MCP servers are configured in config files, not installed as artifact files
3218 let installed_at = match resource_type {
3219 crate::core::ResourceType::Hook | crate::core::ResourceType::McpServer => {
3220 // Use configured merge target, with fallback to hardcoded defaults
3221 if let Some(merge_target) =
3222 self.manifest.get_merge_target(artifact_type, resource_type)
3223 {
3224 normalize_path_for_storage(merge_target.display().to_string())
3225 } else {
3226 // Fallback to hardcoded defaults if not configured
3227 match resource_type {
3228 crate::core::ResourceType::Hook => {
3229 ".claude/settings.local.json".to_string()
3230 }
3231 crate::core::ResourceType::McpServer => {
3232 if artifact_type == "opencode" {
3233 ".opencode/opencode.json".to_string()
3234 } else {
3235 ".mcp.json".to_string()
3236 }
3237 }
3238 _ => unreachable!(),
3239 }
3240 }
3241 }
3242 _ => {
3243 // For regular resources, get the artifact path
3244 let artifact_path = self
3245 .manifest
3246 .get_artifact_resource_path(artifact_type, resource_type)
3247 .ok_or_else(|| {
3248 anyhow::anyhow!(
3249 "Resource type '{}' is not supported by tool '{}'",
3250 resource_type,
3251 artifact_type
3252 )
3253 })?;
3254
3255 // Determine flatten behavior: use explicit setting or tool config default
3256 let dep_flatten = dep.get_flatten();
3257 let tool_flatten = self
3258 .manifest
3259 .get_tool_config(artifact_type)
3260 .and_then(|config| config.resources.get(resource_type.to_plural()))
3261 .and_then(|resource_config| resource_config.flatten);
3262
3263 let flatten = dep_flatten.or(tool_flatten).unwrap_or(false); // Default to false if not configured
3264
3265 // Determine the base target directory
3266 let base_target = if let Some(custom_target) = dep.get_target() {
3267 // Custom target is relative to the artifact's resource directory
3268 PathBuf::from(artifact_path.display().to_string())
3269 .join(custom_target.trim_start_matches('/'))
3270 } else {
3271 artifact_path.to_path_buf()
3272 };
3273
3274 // Extract the meaningful path structure
3275 let filename = {
3276 // Construct full path from base_path and matched_path for extraction
3277 let full_path = if base_path == Path::new(".") {
3278 matched_path.to_path_buf()
3279 } else {
3280 base_path.join(&matched_path)
3281 };
3282 extract_meaningful_path(&full_path)
3283 };
3284
3285 // Use compute_relative_install_path to avoid redundant prefixes
3286 let relative_path = compute_relative_install_path(
3287 &base_target,
3288 Path::new(&filename),
3289 flatten,
3290 );
3291 normalize_path_for_storage(normalize_path(&base_target.join(relative_path)))
3292 }
3293 };
3294
3295 resources.push(LockedResource {
3296 name: resource_name.clone(),
3297 source: None,
3298 url: None,
3299 path: full_relative_path,
3300 version: None,
3301 resolved_commit: None,
3302 checksum: String::new(),
3303 installed_at,
3304 dependencies: self.get_dependencies_for(
3305 &resource_name,
3306 None,
3307 resource_type,
3308 dep.get_tool(),
3309 ),
3310 resource_type,
3311 tool: Some(
3312 dep.get_tool()
3313 .map(std::string::ToString::to_string)
3314 .unwrap_or_else(|| self.manifest.get_default_tool(resource_type)),
3315 ),
3316 manifest_alias: Some(name.to_string()), // Pattern dependency: preserve original alias
3317 applied_patches: HashMap::new(), // Populated during installation, not resolution
3318 install: dep.get_install(),
3319 });
3320 }
3321
3322 Ok(resources)
3323 } else {
3324 // Remote pattern dependency - need to sync and search
3325 let source_name = dep.get_source().ok_or_else(|| AgpmError::ConfigError {
3326 message: format!("Pattern dependency '{name}' has no source specified"),
3327 })?;
3328
3329 let source_url = self.source_manager.get_source_url(source_name).ok_or_else(|| {
3330 AgpmError::SourceNotFound {
3331 name: source_name.to_string(),
3332 }
3333 })?;
3334
3335 let version_key = dep
3336 .get_version()
3337 .map_or_else(|| "HEAD".to_string(), std::string::ToString::to_string);
3338 let prepared_key = Self::group_key(source_name, &version_key);
3339
3340 let prepared = self
3341 .prepared_versions
3342 .get(&prepared_key)
3343 .ok_or_else(|| {
3344 anyhow::anyhow!(
3345 "Prepared state missing for source '{source_name}' @ '{version_key}'. Stage 1 preparation should have populated this entry."
3346 )
3347 })?;
3348
3349 let repo_path = prepared.worktree_path.clone();
3350 let resolved_version = prepared.resolved_version.clone();
3351 let resolved_commit = prepared.resolved_commit.clone();
3352
3353 // Search for matching files in the repository
3354 let pattern_resolver = crate::pattern::PatternResolver::new();
3355 let repo_path_ref = Path::new(&repo_path);
3356 let matches = pattern_resolver.resolve(pattern, repo_path_ref)?;
3357
3358 // Use the threaded resource_type parameter (no need to recompute)
3359 let mut resources = Vec::new();
3360
3361 for matched_path in matches {
3362 let resource_name = crate::pattern::extract_resource_name(&matched_path);
3363
3364 // Determine artifact type - use get_tool() method, then apply defaults
3365 let artifact_type_string = dep
3366 .get_tool()
3367 .map(|s| s.to_string())
3368 .unwrap_or_else(|| self.manifest.get_default_tool(resource_type));
3369 let artifact_type = artifact_type_string.as_str();
3370
3371 // Use the threaded resource_type (pattern dependencies inherit from parent)
3372
3373 // Hooks and MCP servers are configured in config files, not installed as artifact files
3374 let installed_at = match resource_type {
3375 crate::core::ResourceType::Hook | crate::core::ResourceType::McpServer => {
3376 // Use configured merge target, with fallback to hardcoded defaults
3377 if let Some(merge_target) =
3378 self.manifest.get_merge_target(artifact_type, resource_type)
3379 {
3380 normalize_path_for_storage(merge_target.display().to_string())
3381 } else {
3382 // Fallback to hardcoded defaults if not configured
3383 match resource_type {
3384 crate::core::ResourceType::Hook => {
3385 ".claude/settings.local.json".to_string()
3386 }
3387 crate::core::ResourceType::McpServer => {
3388 if artifact_type == "opencode" {
3389 ".opencode/opencode.json".to_string()
3390 } else {
3391 ".mcp.json".to_string()
3392 }
3393 }
3394 _ => unreachable!(),
3395 }
3396 }
3397 }
3398 _ => {
3399 // For regular resources, get the artifact path
3400 let artifact_path = self
3401 .manifest
3402 .get_artifact_resource_path(artifact_type, resource_type)
3403 .ok_or_else(|| {
3404 anyhow::anyhow!(
3405 "Resource type '{}' is not supported by tool '{}'",
3406 resource_type,
3407 artifact_type
3408 )
3409 })?;
3410
3411 // Determine flatten behavior: use explicit setting or tool config default
3412 let dep_flatten = dep.get_flatten();
3413 let tool_flatten = self
3414 .manifest
3415 .get_tool_config(artifact_type)
3416 .and_then(|config| config.resources.get(resource_type.to_plural()))
3417 .and_then(|resource_config| resource_config.flatten);
3418
3419 let flatten = dep_flatten.or(tool_flatten).unwrap_or(false); // Default to false if not configured
3420
3421 // Determine the base target directory
3422 let base_target = if let Some(custom_target) = dep.get_target() {
3423 // Custom target is relative to the artifact's resource directory
3424 PathBuf::from(artifact_path.display().to_string())
3425 .join(custom_target.trim_start_matches('/'))
3426 } else {
3427 artifact_path.to_path_buf()
3428 };
3429
3430 // Extract the meaningful path structure
3431 // 1. For relative paths with "../", strip parent components: "../../snippets/dir/file.md" → "snippets/dir/file.md"
3432 // 2. For absolute paths, resolve ".." first then strip root: "/tmp/foo/../bar/agent.md" → "tmp/bar/agent.md"
3433 // 3. For clean relative paths, use as-is: "agents/test.md" → "agents/test.md"
3434 let filename = {
3435 // Construct full path from repo_path and matched_path for extraction
3436 let full_path = repo_path_ref.join(&matched_path);
3437 let components: Vec<_> = full_path.components().collect();
3438
3439 if full_path.is_absolute() {
3440 // Case 2: Absolute path - resolve ".." components first, then strip root
3441 let mut resolved = Vec::new();
3442
3443 for component in components.iter() {
3444 match component {
3445 std::path::Component::Normal(name) => {
3446 resolved.push(name.to_str().unwrap_or(""));
3447 }
3448 std::path::Component::ParentDir => {
3449 // Pop the last component if there is one
3450 resolved.pop();
3451 }
3452 // Skip RootDir, Prefix, and CurDir
3453 _ => {}
3454 }
3455 }
3456
3457 resolved.join("/")
3458 } else if components
3459 .iter()
3460 .any(|c| matches!(c, std::path::Component::ParentDir))
3461 {
3462 // Case 1: Relative path with "../" - skip all parent components
3463 let start_idx = components
3464 .iter()
3465 .position(|c| matches!(c, std::path::Component::Normal(_)))
3466 .unwrap_or(0);
3467
3468 components[start_idx..]
3469 .iter()
3470 .filter_map(|c| c.as_os_str().to_str())
3471 .collect::<Vec<_>>()
3472 .join("/")
3473 } else {
3474 // Case 3: Clean relative path - use as-is
3475 full_path.to_str().unwrap_or("").replace('\\', "/") // Normalize to forward slashes
3476 }
3477 };
3478
3479 // Use compute_relative_install_path to avoid redundant prefixes
3480 let relative_path = compute_relative_install_path(
3481 &base_target,
3482 Path::new(&filename),
3483 flatten,
3484 );
3485 normalize_path_for_storage(normalize_path(&base_target.join(relative_path)))
3486 }
3487 };
3488
3489 resources.push(LockedResource {
3490 name: resource_name.clone(),
3491 source: Some(source_name.to_string()),
3492 url: Some(source_url.clone()),
3493 path: normalize_path_for_storage(matched_path.to_string_lossy().to_string()),
3494 version: resolved_version.clone(), // Use the resolved version (e.g., "main")
3495 resolved_commit: Some(resolved_commit.clone()),
3496 checksum: String::new(),
3497 installed_at,
3498 dependencies: self.get_dependencies_for(
3499 &resource_name,
3500 Some(source_name),
3501 resource_type,
3502 dep.get_tool(),
3503 ),
3504 resource_type,
3505 tool: Some(
3506 dep.get_tool()
3507 .map(|s| s.to_string())
3508 .unwrap_or_else(|| self.manifest.get_default_tool(resource_type)),
3509 ),
3510 manifest_alias: Some(name.to_string()), // Pattern dependency: preserve original alias
3511 applied_patches: HashMap::new(), // Populated during installation, not resolution
3512 install: dep.get_install(),
3513 });
3514 }
3515
3516 Ok(resources)
3517 }
3518 }
3519
3520 /// Checks out a specific version in a Git repository.
3521 ///
3522 /// This method implements the version resolution strategy by attempting
3523 /// to checkout Git references in order of preference:
3524 ///
3525 /// 1. **Tags**: Exact tag matches (e.g., `v1.2.3`)
3526 /// 2. **Branches**: Branch heads (e.g., `main`, `develop`)
3527 /// 3. **Commits**: Direct commit hashes (40-character SHA)
3528 ///
3529 /// # Algorithm
3530 ///
3531 /// ```text
3532 /// 1. List all tags in repository
3533 /// 2. If version matches a tag, checkout tag
3534 /// 3. Else attempt branch checkout
3535 /// 4. Else attempt commit hash checkout
3536 /// 5. Return current HEAD commit hash
3537 /// ```
3538 ///
3539 /// # Performance Note
3540 ///
3541 /// Tag listing is cached by Git, making tag lookups efficient.
3542 /// The method avoids unnecessary network operations by checking
3543 /// local references first.
3544 ///
3545 /// # Parameters
3546 ///
3547 /// - `repo`: Git repository handle
3548 /// - `version`: Version constraint (tag, branch, or commit hash)
3549 ///
3550 /// # Returns
3551 ///
3552 /// The commit hash (SHA) of the checked out version.
3553 ///
3554 /// # Errors
3555 ///
3556 /// Returns an error if:
3557 /// - Git repository is in an invalid state
3558 /// - Version string doesn't match any tag, branch, or valid commit
3559 /// - Git checkout fails due to conflicts or permissions
3560 /// - Repository is corrupted or inaccessible
3561 ///
3562 /// Determines the resource type (agent or snippet) from a dependency name.
3563 ///
3564 /// This method checks which manifest section contains the dependency
3565 /// to determine where it should be installed in the project.
3566 ///
3567 /// # Resource Type Mapping
3568 ///
3569 /// - **agents**: Dependencies listed in `[agents]` section
3570 /// - **snippets**: Dependencies listed in `[snippets]` section
3571 ///
3572 /// # Installation Paths
3573 ///
3574 /// Resource types determine installation directories based on tool configuration:
3575 /// - Agents typically install to `.claude/agents/{name}.md` (claude-code tool)
3576 /// - Snippets typically install to `.agpm/snippets/{name}.md` (agpm tool)
3577 ///
3578 /// # Parameters
3579 ///
3580 /// - `name`: Dependency name as defined in manifest
3581 ///
3582 /// # Returns
3583 ///
3584 /// Resource type as a string: `"agent"` or `"snippet"`.
3585 ///
3586 /// # Default Behavior
3587 ///
3588 /// If a dependency is not found in the agents section, it defaults
3589 /// to `"snippet"`. This handles edge cases and maintains backward compatibility.
3590 /// Resolve version conflicts between two dependencies.
3591 ///
3592 /// This method implements version conflict resolution strategies when the same
3593 /// resource is required with different versions by different dependencies.
3594 ///
3595 /// # Resolution Strategy
3596 ///
3597 /// The current implementation uses a "highest compatible version" strategy:
3598 /// 1. If one dependency has no version (latest), use the other's version
3599 /// 2. If both have versions, prefer semantic version comparison
3600 /// 3. For incompatible versions, warn and use the higher version
3601 ///
3602 /// # Future Enhancements
3603 ///
3604 /// - Support for version ranges (^1.0.0, ~2.1.0)
3605 /// - User-configurable resolution strategies
3606 /// - Interactive conflict resolution
3607 ///
3608 /// # Parameters
3609 ///
3610 /// - `resource_name`: Name of the conflicting resource
3611 /// - `existing`: Current version in the dependency map
3612 /// - `new_dep`: New version being requested
3613 /// - `requester`: Name of the dependency requesting the new version
3614 ///
3615 /// # Returns
3616 ///
3617 /// The resolved dependency that satisfies both requirements if possible,
3618 /// or the higher version with a warning if not compatible.
3619 async fn resolve_version_conflict(
3620 &self,
3621 resource_name: &str,
3622 existing: &ResourceDependency,
3623 new_dep: &ResourceDependency,
3624 requester: &str,
3625 ) -> Result<ResourceDependency> {
3626 let existing_version = existing.get_version();
3627 let new_version = new_dep.get_version();
3628
3629 // If versions are identical, no conflict
3630 if existing_version == new_version {
3631 return Ok(existing.clone());
3632 }
3633
3634 // Check if either version is a semver range (not an exact version)
3635 let is_existing_range = existing_version.is_some_and(|v| {
3636 v.starts_with('^') || v.starts_with('~') || v.starts_with('>') || v.starts_with('<')
3637 });
3638 let is_new_range = new_version.is_some_and(|v| {
3639 v.starts_with('^') || v.starts_with('~') || v.starts_with('>') || v.starts_with('<')
3640 });
3641
3642 // If we have semver ranges, resolve them to a concrete version
3643 if is_existing_range || is_new_range {
3644 return self
3645 .resolve_semver_range_conflict(resource_name, existing, new_dep, requester)
3646 .await;
3647 }
3648
3649 // Log the conflict for user awareness
3650 tracing::warn!(
3651 "Version conflict for '{}': existing version {:?} vs {:?} required by '{}'",
3652 resource_name,
3653 existing_version.unwrap_or("HEAD"),
3654 new_version.unwrap_or("HEAD"),
3655 requester
3656 );
3657
3658 // Resolution strategy
3659 match (existing_version, new_version) {
3660 (None, Some(_)) => {
3661 // Existing wants HEAD, new wants specific - use specific
3662 Ok(new_dep.clone())
3663 }
3664 (Some(_), None) => {
3665 // Existing wants specific, new wants HEAD - keep specific
3666 Ok(existing.clone())
3667 }
3668 (Some(v1), Some(v2)) => {
3669 // Both have versions - use semver-aware comparison
3670 use semver::Version;
3671
3672 // Try to parse as semver (strip 'v' prefix if present)
3673 let v1_semver = Version::parse(v1.trim_start_matches('v')).ok();
3674 let v2_semver = Version::parse(v2.trim_start_matches('v')).ok();
3675
3676 match (v1_semver, v2_semver) {
3677 (Some(sv1), Some(sv2)) => {
3678 // Both are valid semver - use proper semver comparison
3679 if sv1 >= sv2 {
3680 tracing::info!(
3681 "Resolving conflict: using version {} for {} (semver: {} >= {})",
3682 v1,
3683 resource_name,
3684 sv1,
3685 sv2
3686 );
3687 Ok(existing.clone())
3688 } else {
3689 tracing::info!(
3690 "Resolving conflict: using version {} for {} (semver: {} < {})",
3691 v2,
3692 resource_name,
3693 sv1,
3694 sv2
3695 );
3696 Ok(new_dep.clone())
3697 }
3698 }
3699 (Some(_), None) => {
3700 // v1 is semver, v2 is not (branch/commit) - prefer semver
3701 tracing::info!(
3702 "Resolving conflict: preferring semver version {} over git ref {} for {}",
3703 v1,
3704 v2,
3705 resource_name
3706 );
3707 Ok(existing.clone())
3708 }
3709 (None, Some(_)) => {
3710 // v1 is not semver (branch/commit), v2 is - prefer semver
3711 tracing::info!(
3712 "Resolving conflict: preferring semver version {} over git ref {} for {}",
3713 v2,
3714 v1,
3715 resource_name
3716 );
3717 Ok(new_dep.clone())
3718 }
3719 (None, None) => {
3720 // Neither is semver (both branches/commits)
3721 // Use deterministic ordering: alphabetical
3722 if v1 <= v2 {
3723 tracing::info!(
3724 "Resolving conflict: using git ref {} for {} (alphabetically first)",
3725 v1,
3726 resource_name
3727 );
3728 Ok(existing.clone())
3729 } else {
3730 tracing::info!(
3731 "Resolving conflict: using git ref {} for {} (alphabetically first)",
3732 v2,
3733 resource_name
3734 );
3735 Ok(new_dep.clone())
3736 }
3737 }
3738 }
3739 }
3740 (None, None) => {
3741 // Both want HEAD - no conflict
3742 Ok(existing.clone())
3743 }
3744 }
3745 }
3746
3747 /// Resolve a version conflict where at least one version is a semver range.
3748 ///
3749 /// This method handles version resolution when one or both dependencies specify
3750 /// semver ranges (like `^1.0.0`, `>=1.5.0`) instead of exact versions. It:
3751 /// 1. Fetches available versions from the Git repository
3752 /// 2. Uses the `ConflictDetector` to check if ranges are compatible
3753 /// 3. Finds the best version that satisfies both constraints
3754 /// 4. Returns a resolved dependency with the concrete version
3755 ///
3756 /// # Arguments
3757 ///
3758 /// * `resource_name` - Name of the conflicting resource
3759 /// * `existing` - Current dependency with version range
3760 /// * `new_dep` - New dependency with version range
3761 /// * `requester` - Name of the dependency requesting the new version
3762 ///
3763 /// # Returns
3764 ///
3765 /// A `ResourceDependency` with the resolved concrete version that satisfies both ranges.
3766 ///
3767 /// # Errors
3768 ///
3769 /// Returns an error if:
3770 /// - Dependencies are local (no source) but have version ranges
3771 /// - Source hasn't been synced yet
3772 /// - Version ranges are incompatible (no common version)
3773 /// - No available versions satisfy the constraints
3774 async fn resolve_semver_range_conflict(
3775 &self,
3776 resource_name: &str,
3777 existing: &ResourceDependency,
3778 new_dep: &ResourceDependency,
3779 requester: &str,
3780 ) -> Result<ResourceDependency> {
3781 use crate::manifest::DetailedDependency;
3782 use crate::resolver::version_resolution::parse_tags_to_versions;
3783 use semver::Version;
3784 use std::collections::HashMap;
3785
3786 let existing_version = existing.get_version().unwrap_or("HEAD");
3787 let new_version = new_dep.get_version().unwrap_or("HEAD");
3788
3789 tracing::info!(
3790 "Resolving semver range conflict for '{}': existing '{}' vs required '{}' by '{}'",
3791 resource_name,
3792 existing_version,
3793 new_version,
3794 requester
3795 );
3796
3797 // Get source (both should have same source for transitive deps)
3798 let source = existing.get_source().or_else(|| new_dep.get_source()).ok_or_else(|| {
3799 AgpmError::Other {
3800 message: format!(
3801 "Cannot resolve semver ranges for local dependencies: {}",
3802 resource_name
3803 ),
3804 }
3805 })?;
3806
3807 // Get bare repo path
3808 let repo_path =
3809 self.version_resolver.get_bare_repo_path(source).ok_or_else(|| AgpmError::Other {
3810 message: format!("Source '{}' not synced yet", source),
3811 })?;
3812
3813 // List available tags from repository
3814 let tags = self.get_available_versions(repo_path).await?;
3815 tracing::debug!("Found {} tags for source '{}'", tags.len(), source);
3816
3817 // Parse tags to semver versions
3818 let parsed_versions = parse_tags_to_versions(tags);
3819 let available_versions: Vec<Version> =
3820 parsed_versions.iter().map(|(_, v)| v.clone()).collect();
3821
3822 if available_versions.is_empty() {
3823 return Err(AgpmError::Other {
3824 message: format!(
3825 "No valid semver tags found for source '{}' to resolve range conflict",
3826 source
3827 ),
3828 }
3829 .into());
3830 }
3831
3832 // Use ConflictDetector to find best version
3833 let mut detector = ConflictDetector::new();
3834 let resource_id = format!("{}:{}", source, existing.get_path());
3835 detector.add_requirement(&resource_id, "existing", existing_version);
3836 detector.add_requirement(&resource_id, requester, new_version);
3837
3838 // Check for conflicts
3839 let conflicts = detector.detect_conflicts();
3840 if !conflicts.is_empty() {
3841 return Err(AgpmError::Other {
3842 message: format!(
3843 "Incompatible version ranges for '{}': existing '{}' vs required '{}' by '{}'",
3844 resource_name, existing_version, new_version, requester
3845 ),
3846 }
3847 .into());
3848 }
3849
3850 // Find best version that satisfies both ranges
3851 let mut versions_map = HashMap::new();
3852 versions_map.insert(resource_id.clone(), available_versions);
3853
3854 let resolved = detector.resolve_conflicts(&versions_map)?;
3855 let best_version = resolved.get(&resource_id).ok_or_else(|| AgpmError::Other {
3856 message: format!("Failed to resolve version for '{}'", resource_name),
3857 })?;
3858
3859 // Find the tag for this version
3860 let best_tag = parsed_versions
3861 .iter()
3862 .find(|(_, v)| v == best_version)
3863 .map(|(tag, _)| tag.clone())
3864 .ok_or_else(|| AgpmError::Other {
3865 message: format!("Version {} not found in tags", best_version),
3866 })?;
3867
3868 tracing::info!(
3869 "Resolved '{}' to version {} (satisfies both '{}' and '{}')",
3870 resource_name,
3871 best_tag,
3872 existing_version,
3873 new_version
3874 );
3875
3876 // Create new dependency with resolved version
3877 // We need to create a Detailed variant with the concrete version
3878 match new_dep {
3879 ResourceDependency::Detailed(d) => {
3880 let mut resolved_dep = (**d).clone();
3881 resolved_dep.version = Some(best_tag);
3882 resolved_dep.branch = None; // Clear branch/rev since we have concrete version
3883 resolved_dep.rev = None;
3884 Ok(ResourceDependency::Detailed(Box::new(resolved_dep)))
3885 }
3886 ResourceDependency::Simple(_) => {
3887 // Create a DetailedDependency from the simple path
3888 // This shouldn't happen since Simple deps don't have versions, but handle it
3889 Ok(ResourceDependency::Detailed(Box::new(DetailedDependency {
3890 source: Some(source.to_string()),
3891 path: existing.get_path().to_string(),
3892 version: Some(best_tag),
3893 branch: None,
3894 rev: None,
3895 command: None,
3896 args: None,
3897 target: None,
3898 filename: None,
3899 dependencies: None,
3900 tool: Some("claude-code".to_string()), // Default tool
3901 flatten: None,
3902 install: None,
3903 })))
3904 }
3905 }
3906 }
3907
3908 /// Updates an existing lockfile with new or changed dependencies.
3909 ///
3910 /// This method performs incremental dependency resolution by comparing
3911 /// the current manifest against an existing lockfile and updating only
3912 /// the specified dependencies (or all if none specified).
3913 ///
3914 /// # Update Strategy
3915 ///
3916 /// The update process follows these steps:
3917 /// 1. **Selective Resolution**: Only resolve specified dependencies
3918 /// 2. **Preserve Existing**: Keep unchanged dependencies from existing lockfile
3919 /// 3. **In-place Updates**: Replace matching entries with new versions
3920 /// 4. **New Additions**: Append newly added dependencies
3921 ///
3922 /// # Use Cases
3923 ///
3924 /// - **Selective Updates**: Update specific outdated dependencies
3925 /// - **Security Patches**: Update dependencies with known vulnerabilities
3926 /// - **Feature Updates**: Pull latest versions for active development
3927 /// - **Manifest Changes**: Reflect additions/modifications to agpm.toml
3928 ///
3929 /// # Parameters
3930 ///
3931 /// - `existing`: Current lockfile to update
3932 /// - `deps_to_update`: Optional list of specific dependencies to update.
3933 /// If `None`, all dependencies are updated.
3934 /// - `progress`: Optional progress bar for user feedback
3935 ///
3936 /// # Returns
3937 ///
3938 /// A new [`LockFile`] with updated dependencies. The original lockfile
3939 /// structure is preserved, with only specified entries modified.
3940 ///
3941 /// # Algorithm Complexity
3942 ///
3943 /// - **Time**: O(u + s·log(t)) where u = dependencies to update
3944 /// - **Space**: O(n) where n = total dependencies in lockfile
3945 ///
3946 /// # Performance Benefits
3947 ///
3948 /// - **Network Optimization**: Only syncs sources for updated dependencies
3949 /// - **Cache Utilization**: Reuses existing source repositories
3950 /// - **Parallel Processing**: Updates multiple dependencies concurrently
3951 ///
3952 /// # Errors
3953 ///
3954 /// Update can fail due to:
3955 /// - Network issues accessing source repositories
3956 /// - Version constraints that cannot be satisfied
3957 /// - Authentication failures for private sources
3958 /// - Corrupted or inaccessible cache directories
3959 ///
3960 /// [`LockFile`]: crate::lockfile::LockFile
3961 pub async fn update(
3962 &mut self,
3963 existing: &LockFile,
3964 deps_to_update: Option<Vec<String>>,
3965 ) -> Result<LockFile> {
3966 let mut lockfile = existing.clone();
3967
3968 // Update sources from manifest (handles source URL changes and new sources)
3969 lockfile.sources.clear();
3970 for (name, url) in &self.manifest.sources {
3971 lockfile.add_source(name.clone(), url.clone(), String::new());
3972 }
3973
3974 // Determine which dependencies to update
3975 let deps_to_check: HashSet<String> = if let Some(specific) = deps_to_update {
3976 specific.into_iter().collect()
3977 } else {
3978 // Update all dependencies
3979 self.manifest.all_dependencies().iter().map(|(name, _)| (*name).to_string()).collect()
3980 };
3981
3982 // Get all base dependencies with their types to avoid mis-typing same-named resources
3983 let base_deps: Vec<(String, ResourceDependency, crate::core::ResourceType)> = self
3984 .manifest
3985 .all_dependencies_with_types()
3986 .into_iter()
3987 .map(|(name, dep, resource_type)| (name.to_string(), dep.into_owned(), resource_type))
3988 .collect();
3989
3990 // Note: We assume the update command has already called pre_sync_sources
3991 // during the "Syncing sources" phase, so repositories are already available.
3992 // We just need to prepare and resolve versions now.
3993
3994 // Prepare remote groups to resolve versions (reuses pre-synced repos)
3995 // prepare_remote_groups only needs name and dep, not type
3996 let base_deps_for_prep: Vec<(String, ResourceDependency)> =
3997 base_deps.iter().map(|(name, dep, _)| (name.clone(), dep.clone())).collect();
3998 self.prepare_remote_groups(&base_deps_for_prep).await?;
3999
4000 // First, remove stale entries that are no longer in the manifest
4001 // This prevents conflicts from commented-out or removed dependencies
4002 self.remove_stale_manifest_entries(&mut lockfile);
4003
4004 // Remove old entries for manifest dependencies being updated
4005 // This handles source changes (e.g., Git -> local path) and type changes
4006 self.remove_manifest_entries_for_update(&mut lockfile, &deps_to_check);
4007
4008 // Resolve transitive dependencies (always enabled for update to maintain consistency)
4009 let deps = self.resolve_transitive_dependencies(&base_deps, true).await?;
4010
4011 for (name, dep, resource_type) in deps {
4012 // Determine if this dependency should be skipped:
4013 // Skip ONLY if it's a manifest dependency that's NOT being updated
4014 // Always process:
4015 // - Manifest deps being updated (name in deps_to_check)
4016 // - Pattern expansions whose alias is being updated
4017 // - Transitive deps (not in manifest at all)
4018
4019 let is_manifest_dep = self.manifest.all_dependencies().iter().any(|(n, _)| *n == name);
4020 let pattern_alias = self.pattern_alias_map.get(&(resource_type, name.clone()));
4021
4022 let should_skip = is_manifest_dep
4023 && !deps_to_check.contains(&name)
4024 && !pattern_alias.is_some_and(|alias| deps_to_check.contains(alias));
4025
4026 if should_skip {
4027 // This is a manifest dependency not being updated - skip to avoid unnecessary work
4028 continue;
4029 }
4030
4031 // Check if this is a pattern dependency
4032 if dep.is_pattern() {
4033 // Pattern dependencies resolve to multiple resources
4034 let entries = self.resolve_pattern_dependency(&name, &dep, resource_type).await?;
4035
4036 // Add each resolved entry to the appropriate resource type with deduplication
4037 // Use resource_type from transitive resolution to avoid recomputing
4038 for entry in entries {
4039 match resource_type {
4040 crate::core::ResourceType::Agent => {
4041 // Match by (name, source) to allow same-named resources from different sources
4042 if let Some(existing) = lockfile
4043 .agents
4044 .iter_mut()
4045 .find(|e| e.name == entry.name && e.source == entry.source)
4046 {
4047 *existing = entry;
4048 } else {
4049 lockfile.agents.push(entry);
4050 }
4051 }
4052 crate::core::ResourceType::Snippet => {
4053 if let Some(existing) = lockfile
4054 .snippets
4055 .iter_mut()
4056 .find(|e| e.name == entry.name && e.source == entry.source)
4057 {
4058 *existing = entry;
4059 } else {
4060 lockfile.snippets.push(entry);
4061 }
4062 }
4063 crate::core::ResourceType::Command => {
4064 if let Some(existing) = lockfile
4065 .commands
4066 .iter_mut()
4067 .find(|e| e.name == entry.name && e.source == entry.source)
4068 {
4069 *existing = entry;
4070 } else {
4071 lockfile.commands.push(entry);
4072 }
4073 }
4074 crate::core::ResourceType::Script => {
4075 if let Some(existing) = lockfile
4076 .scripts
4077 .iter_mut()
4078 .find(|e| e.name == entry.name && e.source == entry.source)
4079 {
4080 *existing = entry;
4081 } else {
4082 lockfile.scripts.push(entry);
4083 }
4084 }
4085 crate::core::ResourceType::Hook => {
4086 if let Some(existing) = lockfile
4087 .hooks
4088 .iter_mut()
4089 .find(|e| e.name == entry.name && e.source == entry.source)
4090 {
4091 *existing = entry;
4092 } else {
4093 lockfile.hooks.push(entry);
4094 }
4095 }
4096 crate::core::ResourceType::McpServer => {
4097 if let Some(existing) = lockfile
4098 .mcp_servers
4099 .iter_mut()
4100 .find(|e| e.name == entry.name && e.source == entry.source)
4101 {
4102 *existing = entry;
4103 } else {
4104 lockfile.mcp_servers.push(entry);
4105 }
4106 }
4107 }
4108 }
4109 } else {
4110 // Regular single dependency
4111 // Use resource_type from transitive resolution to avoid recomputing
4112 let entry = self.resolve_dependency(&name, &dep, resource_type).await?;
4113
4114 // Use the helper method to add or update the entry
4115 self.add_or_update_lockfile_entry(&mut lockfile, &name, entry);
4116 }
4117 }
4118
4119 // Progress bar completion is handled by the caller
4120
4121 // Post-process dependencies to add version information
4122 self.add_version_to_dependencies(&mut lockfile)?;
4123
4124 // Detect target-path conflicts before finalizing
4125 self.detect_target_conflicts(&lockfile)?;
4126
4127 Ok(lockfile)
4128 }
4129
4130 /// Adds a dependency to the conflict detector.
4131 ///
4132 /// Builds a resource identifier from the dependency's source and path,
4133 /// and records it along with the requirer and version constraint.
4134 ///
4135 /// # Parameters
4136 ///
4137 /// - `_name`: The dependency name (unused, kept for consistency)
4138 /// - `dep`: The dependency specification
4139 /// - `required_by`: Identifier of the resource requiring this dependency
4140 fn add_to_conflict_detector(
4141 &mut self,
4142 _name: &str,
4143 dep: &ResourceDependency,
4144 required_by: &str,
4145 ) {
4146 // Skip local dependencies (no version conflicts possible)
4147 if dep.is_local() {
4148 return;
4149 }
4150
4151 // Build resource identifier: source:path
4152 let source = dep.get_source().unwrap_or("unknown");
4153 let path = dep.get_path();
4154 let resource_id = format!("{source}:{path}");
4155
4156 // Get version constraint (None means HEAD/unspecified)
4157 if let Some(version) = dep.get_version() {
4158 // Add to conflict detector
4159 self.conflict_detector.add_requirement(&resource_id, required_by, version);
4160 } else {
4161 // No version specified - use HEAD marker
4162 self.conflict_detector.add_requirement(&resource_id, required_by, "HEAD");
4163 }
4164 }
4165
4166 /// Post-processes lockfile entries to add version information to dependencies.
4167 ///
4168 /// Updates the `dependencies` field in each lockfile entry from the format
4169 /// `"resource_type/name"` to `"resource_type/name@version"` by looking up
4170 /// the resolved version in the lockfile.
4171 fn add_version_to_dependencies(&self, lockfile: &mut LockFile) -> Result<()> {
4172 // Build a lookup map: (resource_type, path, source) -> unique_name
4173 // This allows us to resolve dependency paths to lockfile names
4174 // We store both the full path and just the filename for flexible lookup
4175 let mut lookup_map: HashMap<(crate::core::ResourceType, String, Option<String>), String> =
4176 HashMap::new();
4177
4178 // Helper to normalize path (strip leading ./, etc.)
4179 let normalize_path = |path: &str| -> String { path.trim_start_matches("./").to_string() };
4180
4181 // Helper to extract filename from path
4182 let extract_filename = |path: &str| -> Option<String> {
4183 path.split('/').next_back().map(std::string::ToString::to_string)
4184 };
4185
4186 // Build lookup map from all lockfile entries
4187 for entry in &lockfile.agents {
4188 let normalized_path = normalize_path(&entry.path);
4189 // Store by full path
4190 lookup_map.insert(
4191 (crate::core::ResourceType::Agent, normalized_path.clone(), entry.source.clone()),
4192 entry.name.clone(),
4193 );
4194 // Also store by filename for backward compatibility
4195 if let Some(filename) = extract_filename(&entry.path) {
4196 lookup_map.insert(
4197 (crate::core::ResourceType::Agent, filename, entry.source.clone()),
4198 entry.name.clone(),
4199 );
4200 }
4201 }
4202 for entry in &lockfile.snippets {
4203 let normalized_path = normalize_path(&entry.path);
4204 lookup_map.insert(
4205 (crate::core::ResourceType::Snippet, normalized_path.clone(), entry.source.clone()),
4206 entry.name.clone(),
4207 );
4208 if let Some(filename) = extract_filename(&entry.path) {
4209 lookup_map.insert(
4210 (crate::core::ResourceType::Snippet, filename, entry.source.clone()),
4211 entry.name.clone(),
4212 );
4213 }
4214 }
4215 for entry in &lockfile.commands {
4216 let normalized_path = normalize_path(&entry.path);
4217 lookup_map.insert(
4218 (crate::core::ResourceType::Command, normalized_path.clone(), entry.source.clone()),
4219 entry.name.clone(),
4220 );
4221 if let Some(filename) = extract_filename(&entry.path) {
4222 lookup_map.insert(
4223 (crate::core::ResourceType::Command, filename, entry.source.clone()),
4224 entry.name.clone(),
4225 );
4226 }
4227 }
4228 for entry in &lockfile.scripts {
4229 let normalized_path = normalize_path(&entry.path);
4230 lookup_map.insert(
4231 (crate::core::ResourceType::Script, normalized_path.clone(), entry.source.clone()),
4232 entry.name.clone(),
4233 );
4234 if let Some(filename) = extract_filename(&entry.path) {
4235 lookup_map.insert(
4236 (crate::core::ResourceType::Script, filename, entry.source.clone()),
4237 entry.name.clone(),
4238 );
4239 }
4240 }
4241 for entry in &lockfile.hooks {
4242 let normalized_path = normalize_path(&entry.path);
4243 lookup_map.insert(
4244 (crate::core::ResourceType::Hook, normalized_path.clone(), entry.source.clone()),
4245 entry.name.clone(),
4246 );
4247 if let Some(filename) = extract_filename(&entry.path) {
4248 lookup_map.insert(
4249 (crate::core::ResourceType::Hook, filename, entry.source.clone()),
4250 entry.name.clone(),
4251 );
4252 }
4253 }
4254 for entry in &lockfile.mcp_servers {
4255 let normalized_path = normalize_path(&entry.path);
4256 lookup_map.insert(
4257 (
4258 crate::core::ResourceType::McpServer,
4259 normalized_path.clone(),
4260 entry.source.clone(),
4261 ),
4262 entry.name.clone(),
4263 );
4264 if let Some(filename) = extract_filename(&entry.path) {
4265 lookup_map.insert(
4266 (crate::core::ResourceType::McpServer, filename, entry.source.clone()),
4267 entry.name.clone(),
4268 );
4269 }
4270 }
4271
4272 // Build a complete map of (resource_type, name, source) -> (source, version) for cross-source lookup
4273 // This needs to be done before we start mutating entries
4274 let mut resource_info_map: HashMap<ResourceKey, ResourceInfo> = HashMap::new();
4275
4276 for entry in &lockfile.agents {
4277 resource_info_map.insert(
4278 (crate::core::ResourceType::Agent, entry.name.clone(), entry.source.clone()),
4279 (entry.source.clone(), entry.version.clone()),
4280 );
4281 }
4282 for entry in &lockfile.snippets {
4283 resource_info_map.insert(
4284 (crate::core::ResourceType::Snippet, entry.name.clone(), entry.source.clone()),
4285 (entry.source.clone(), entry.version.clone()),
4286 );
4287 }
4288 for entry in &lockfile.commands {
4289 resource_info_map.insert(
4290 (crate::core::ResourceType::Command, entry.name.clone(), entry.source.clone()),
4291 (entry.source.clone(), entry.version.clone()),
4292 );
4293 }
4294 for entry in &lockfile.scripts {
4295 resource_info_map.insert(
4296 (crate::core::ResourceType::Script, entry.name.clone(), entry.source.clone()),
4297 (entry.source.clone(), entry.version.clone()),
4298 );
4299 }
4300 for entry in &lockfile.hooks {
4301 resource_info_map.insert(
4302 (crate::core::ResourceType::Hook, entry.name.clone(), entry.source.clone()),
4303 (entry.source.clone(), entry.version.clone()),
4304 );
4305 }
4306 for entry in &lockfile.mcp_servers {
4307 resource_info_map.insert(
4308 (crate::core::ResourceType::McpServer, entry.name.clone(), entry.source.clone()),
4309 (entry.source.clone(), entry.version.clone()),
4310 );
4311 }
4312
4313 // Helper function to update dependencies in a vector of entries
4314 let update_deps = |entries: &mut Vec<LockedResource>| {
4315 for entry in entries {
4316 let parent_source = entry.source.clone();
4317
4318 let updated_deps: Vec<String> =
4319 entry
4320 .dependencies
4321 .iter()
4322 .map(|dep| {
4323 // Parse "resource_type/path" format (e.g., "agent/rust-haiku.md" or "snippet/utils.md")
4324 if let Some((_resource_type_str, dep_path)) = dep.split_once('/') {
4325 // Parse resource type from string form (accepts both singular and plural)
4326 if let Ok(resource_type) =
4327 _resource_type_str.parse::<crate::core::ResourceType>()
4328 {
4329 // Normalize the path for lookup
4330 let dep_filename = normalize_path(dep_path);
4331
4332 // Look up the resource in the lookup map (same source as parent)
4333 if let Some(dep_name) = lookup_map.get(&(
4334 resource_type,
4335 dep_filename.clone(),
4336 parent_source.clone(),
4337 )) {
4338 // Found resource in same source - use singular form from enum
4339 return format!("{resource_type}/{dep_name}");
4340 }
4341
4342 // If not found with same source, try adding .md extension
4343 let dep_filename_with_ext = format!("{dep_filename}.md");
4344 if let Some(dep_name) = lookup_map.get(&(
4345 resource_type,
4346 dep_filename_with_ext.clone(),
4347 parent_source.clone(),
4348 )) {
4349 return format!("{resource_type}/{dep_name}");
4350 }
4351
4352 // Try looking for resource from ANY source (cross-source dependency)
4353 // Format: source:type/name:version
4354 for ((rt, filename, src), name) in &lookup_map {
4355 if *rt == resource_type
4356 && (filename == &dep_filename
4357 || filename == &dep_filename_with_ext)
4358 {
4359 // Found in different source - need to include source and version
4360 // Use the pre-built resource info map
4361 if let Some((source, version)) = resource_info_map
4362 .get(&(resource_type, name.clone(), src.clone()))
4363 {
4364 // Build full reference: source:type/name:version
4365 let mut dep_ref = String::new();
4366 if let Some(src) = source {
4367 dep_ref.push_str(src);
4368 dep_ref.push(':');
4369 }
4370 dep_ref.push_str(&resource_type.to_string());
4371 dep_ref.push('/');
4372 dep_ref.push_str(name);
4373 if let Some(ver) = version {
4374 dep_ref.push(':');
4375 dep_ref.push_str(ver);
4376 }
4377 return dep_ref;
4378 }
4379 }
4380 }
4381 }
4382 }
4383 // If parsing fails or resource not found, return as-is
4384 dep.clone()
4385 })
4386 .collect();
4387
4388 entry.dependencies = updated_deps;
4389 }
4390 };
4391
4392 // Update all entry types
4393 update_deps(&mut lockfile.agents);
4394 update_deps(&mut lockfile.snippets);
4395 update_deps(&mut lockfile.commands);
4396 update_deps(&mut lockfile.scripts);
4397 update_deps(&mut lockfile.hooks);
4398 update_deps(&mut lockfile.mcp_servers);
4399
4400 Ok(())
4401 }
4402
4403 /// Verifies that all dependencies can be resolved without performing resolution.
4404 ///
4405 /// This method performs a "dry run" validation of the manifest to detect
4406 /// issues before attempting actual resolution. It's faster than full resolution
4407 /// since it doesn't clone repositories or resolve specific versions.
4408 ///
4409 /// # Validation Steps
4410 ///
4411 /// 1. **Local Path Validation**: Verify local dependencies exist (for absolute paths)
4412 /// 2. **Source Validation**: Ensure all referenced sources are defined
4413 /// 3. **Constraint Validation**: Basic syntax checking of version constraints
4414 ///
4415 /// # Validation Scope
4416 ///
4417 /// - **Manifest Structure**: Validate TOML structure and required fields
4418 /// - **Source References**: Ensure all sources used by dependencies exist
4419 /// - **Local Dependencies**: Check absolute paths exist on filesystem
4420 ///
4421 /// # Performance
4422 ///
4423 /// Verification is designed to be fast:
4424 /// - No network operations (doesn't validate remote repositories)
4425 /// - No Git operations (doesn't check if versions exist)
4426 /// - Only filesystem access for absolute local paths
4427 ///
4428 /// # Parameters
4429 ///
4430 /// - `progress`: Optional progress bar for user feedback
4431 ///
4432 /// # Returns
4433 ///
4434 /// `Ok(())` if all dependencies pass basic validation.
4435 ///
4436 /// # Errors
4437 ///
4438 /// Verification fails if:
4439 /// - Local dependencies reference non-existent absolute paths
4440 /// - Dependencies reference undefined sources
4441 /// - Manifest structure is invalid or corrupted
4442 ///
4443 /// # Note
4444 ///
4445 /// Successful verification doesn't guarantee resolution will succeed,
4446 /// since network issues or missing versions can still cause failures.
4447 /// Use this method for fast validation before expensive resolution operations.
4448 pub fn verify(&mut self) -> Result<()> {
4449 // Try to resolve all dependencies
4450 let deps: Vec<(&str, std::borrow::Cow<'_, ResourceDependency>, crate::core::ResourceType)> =
4451 self.manifest.all_dependencies_with_types();
4452
4453 for (name, dep, _resource_type) in deps {
4454 if dep.is_local() {
4455 // Check if local path exists or is relative
4456 let path = Path::new(dep.get_path());
4457 if path.is_absolute() && !path.exists() {
4458 anyhow::bail!("Local dependency '{}' not found at: {}", name, path.display());
4459 }
4460 } else {
4461 // Verify source exists
4462 let source_name = dep.get_source().ok_or_else(|| AgpmError::ConfigError {
4463 message: format!("Dependency '{name}' has no source specified"),
4464 })?;
4465
4466 if !self.manifest.sources.contains_key(source_name) {
4467 anyhow::bail!(
4468 "Dependency '{name}' references undefined source: '{source_name}'"
4469 );
4470 }
4471 }
4472 }
4473
4474 // Progress bar completion is handled by the caller
4475
4476 Ok(())
4477 }
4478}
4479
4480#[cfg(test)]
4481mod tests {
4482 use super::*;
4483 use crate::manifest::DetailedDependency;
4484 use tempfile::TempDir;
4485
4486 #[test]
4487 fn test_resolver_new() {
4488 let manifest = Manifest::new();
4489 let temp_dir = TempDir::new().unwrap();
4490 let cache = Cache::with_dir(temp_dir.path().to_path_buf()).unwrap();
4491 let resolver = DependencyResolver::with_cache(manifest, cache);
4492
4493 assert_eq!(resolver.cache.get_cache_location(), temp_dir.path());
4494 }
4495
4496 #[tokio::test]
4497 async fn test_resolve_local_dependency() {
4498 let temp_dir = TempDir::new().unwrap();
4499 let mut manifest = Manifest::new();
4500 manifest.manifest_dir = Some(temp_dir.path().to_path_buf());
4501 manifest.add_dependency(
4502 "local-agent".to_string(),
4503 ResourceDependency::Simple("../agents/local.md".to_string()),
4504 true,
4505 );
4506
4507 // Create dummy file to allow transitive dependency extraction
4508 let agents_dir = temp_dir.path().parent().unwrap().join("agents");
4509 std::fs::create_dir_all(&agents_dir).unwrap();
4510 std::fs::write(agents_dir.join("local.md"), "# Local Agent").unwrap();
4511
4512 let cache = Cache::with_dir(temp_dir.path().to_path_buf()).unwrap();
4513 let mut resolver = DependencyResolver::with_cache(manifest, cache);
4514
4515 let lockfile = resolver.resolve().await.unwrap();
4516 assert_eq!(lockfile.agents.len(), 1);
4517
4518 let entry = &lockfile.agents[0];
4519 assert_eq!(entry.name, "local-agent");
4520 assert_eq!(entry.path, "../agents/local.md");
4521 assert!(entry.source.is_none());
4522 assert!(entry.url.is_none());
4523 }
4524
4525 #[tokio::test]
4526 async fn test_pre_sync_sources() {
4527 // Skip test if git is not available
4528 if std::process::Command::new("git").arg("--version").output().is_err() {
4529 eprintln!("Skipping test: git not available");
4530 return;
4531 }
4532
4533 // Create a test Git repository with resources
4534 let temp_dir = TempDir::new().unwrap();
4535 let repo_dir = temp_dir.path().join("test-repo");
4536 std::fs::create_dir(&repo_dir).unwrap();
4537
4538 // Initialize git repo
4539 std::process::Command::new("git").args(["init"]).current_dir(&repo_dir).output().unwrap();
4540
4541 std::process::Command::new("git")
4542 .args(["config", "user.email", "test@example.com"])
4543 .current_dir(&repo_dir)
4544 .output()
4545 .unwrap();
4546
4547 std::process::Command::new("git")
4548 .args(["config", "user.name", "Test User"])
4549 .current_dir(&repo_dir)
4550 .output()
4551 .unwrap();
4552
4553 // Create test files
4554 std::fs::create_dir_all(repo_dir.join("agents")).unwrap();
4555 std::fs::write(repo_dir.join("agents/test.md"), "# Test Agent\n\nTest content").unwrap();
4556
4557 // Commit files
4558 std::process::Command::new("git")
4559 .args(["add", "."])
4560 .current_dir(&repo_dir)
4561 .output()
4562 .unwrap();
4563
4564 std::process::Command::new("git")
4565 .args(["commit", "-m", "Initial commit"])
4566 .current_dir(&repo_dir)
4567 .output()
4568 .unwrap();
4569
4570 std::process::Command::new("git")
4571 .args(["tag", "v1.0.0"])
4572 .current_dir(&repo_dir)
4573 .output()
4574 .unwrap();
4575
4576 // Create a manifest with a dependency from this source
4577 let mut manifest = Manifest::new();
4578 let source_url = format!("file://{}", repo_dir.display());
4579 manifest.add_source("test-source".to_string(), source_url.clone());
4580
4581 manifest.add_dependency(
4582 "test-agent".to_string(),
4583 ResourceDependency::Detailed(Box::new(crate::manifest::DetailedDependency {
4584 source: Some("test-source".to_string()),
4585 path: "agents/test.md".to_string(),
4586 version: Some("v1.0.0".to_string()),
4587 branch: None,
4588 rev: None,
4589 command: None,
4590 args: None,
4591 target: None,
4592 filename: None,
4593 dependencies: None,
4594 tool: Some("claude-code".to_string()),
4595 flatten: None,
4596 install: None,
4597 })),
4598 true,
4599 );
4600
4601 // Create resolver with test cache
4602 let cache_dir = TempDir::new().unwrap();
4603 let cache = Cache::with_dir(cache_dir.path().to_path_buf()).unwrap();
4604 let mut resolver = DependencyResolver::with_cache(manifest.clone(), cache);
4605
4606 // Prepare dependencies for pre-sync
4607 let deps: Vec<(String, ResourceDependency)> = manifest
4608 .all_dependencies()
4609 .into_iter()
4610 .map(|(name, dep)| (name.to_string(), dep.clone()))
4611 .collect();
4612
4613 // Call pre_sync_sources - this should clone the repository and prepare entries
4614 resolver.pre_sync_sources(&deps).await.unwrap();
4615
4616 // Verify that entries and repos are prepared
4617 assert!(
4618 resolver.version_resolver.pending_count() > 0,
4619 "Should have entries after pre-sync"
4620 );
4621
4622 let bare_repo = resolver.version_resolver.get_bare_repo_path("test-source");
4623 assert!(bare_repo.is_some(), "Should have bare repo path cached");
4624
4625 // Verify the repository exists in cache (uses normalized name)
4626 let cached_repo_path = resolver.cache.get_cache_location().join("sources");
4627
4628 // The cache normalizes the source name, so we check if any .git directory exists
4629 let mut found_repo = false;
4630 if let Ok(entries) = std::fs::read_dir(&cached_repo_path) {
4631 for entry in entries.flatten() {
4632 if let Some(name) = entry.file_name().to_str()
4633 && name.ends_with(".git")
4634 {
4635 found_repo = true;
4636 break;
4637 }
4638 }
4639 }
4640 assert!(found_repo, "Repository should be cloned to cache");
4641
4642 // Now call resolve_all() - it should work without cloning again
4643 resolver.version_resolver.resolve_all().await.unwrap();
4644
4645 // Verify resolution succeeded by checking we have resolved versions
4646 let all_resolved = resolver.version_resolver.get_all_resolved();
4647 assert!(!all_resolved.is_empty(), "Resolution should produce resolved versions");
4648
4649 // Check that v1.0.0 was resolved to a SHA
4650 let key = ("test-source".to_string(), "v1.0.0".to_string());
4651 assert!(all_resolved.contains_key(&key), "Should have resolved v1.0.0");
4652
4653 let sha = all_resolved.get(&key).unwrap();
4654 assert_eq!(sha.len(), 40, "SHA should be 40 characters");
4655 }
4656
4657 #[test]
4658 fn test_verify_missing_source() {
4659 let mut manifest = Manifest::new();
4660
4661 // Add dependency without corresponding source
4662 manifest.add_dependency(
4663 "remote-agent".to_string(),
4664 ResourceDependency::Detailed(Box::new(crate::manifest::DetailedDependency {
4665 source: Some("nonexistent".to_string()),
4666 path: "agents/test.md".to_string(),
4667 version: None,
4668 branch: None,
4669 rev: None,
4670 command: None,
4671 args: None,
4672 target: None,
4673 filename: None,
4674 dependencies: None,
4675 tool: Some("claude-code".to_string()),
4676 flatten: None,
4677 install: None,
4678 })),
4679 true,
4680 );
4681
4682 let temp_dir = TempDir::new().unwrap();
4683 let cache = Cache::with_dir(temp_dir.path().to_path_buf()).unwrap();
4684 let mut resolver = DependencyResolver::with_cache(manifest, cache);
4685
4686 let result = resolver.verify();
4687 assert!(result.is_err());
4688 assert!(result.unwrap_err().to_string().contains("undefined source"));
4689 }
4690
4691 #[tokio::test]
4692 async fn test_resolve_with_source_dependency() {
4693 let temp_dir = TempDir::new().unwrap();
4694
4695 // Create a local mock git repository
4696 let source_dir = temp_dir.path().join("test-source");
4697 std::fs::create_dir_all(&source_dir).unwrap();
4698 std::process::Command::new("git")
4699 .args(["init"])
4700 .current_dir(&source_dir)
4701 .output()
4702 .expect("Failed to initialize git repository");
4703
4704 // Create the agents directory and test file
4705 let agents_dir = source_dir.join("agents");
4706 std::fs::create_dir_all(&agents_dir).unwrap();
4707 std::fs::write(agents_dir.join("test.md"), "# Test Agent").unwrap();
4708
4709 // Add and commit the file
4710 std::process::Command::new("git")
4711 .args(["add", "."])
4712 .current_dir(&source_dir)
4713 .output()
4714 .unwrap();
4715 std::process::Command::new("git")
4716 .args(["config", "user.email", "test@example.com"])
4717 .current_dir(&source_dir)
4718 .output()
4719 .unwrap();
4720 std::process::Command::new("git")
4721 .args(["config", "user.name", "Test User"])
4722 .current_dir(&source_dir)
4723 .output()
4724 .unwrap();
4725 std::process::Command::new("git")
4726 .args(["commit", "-m", "Initial commit"])
4727 .current_dir(&source_dir)
4728 .output()
4729 .unwrap();
4730
4731 // Create a tag for version
4732 std::process::Command::new("git")
4733 .args(["tag", "v1.0.0"])
4734 .current_dir(&source_dir)
4735 .output()
4736 .unwrap();
4737
4738 let mut manifest = Manifest::new();
4739 // Use file:// URL to ensure it's treated as a Git source, not a local path
4740 let source_url = format!("file://{}", source_dir.display());
4741 manifest.add_source("test".to_string(), source_url);
4742 manifest.add_dependency(
4743 "remote-agent".to_string(),
4744 ResourceDependency::Detailed(Box::new(crate::manifest::DetailedDependency {
4745 source: Some("test".to_string()),
4746 path: "agents/test.md".to_string(),
4747 version: Some("v1.0.0".to_string()),
4748 branch: None,
4749 rev: None,
4750 command: None,
4751 args: None,
4752 target: None,
4753 filename: None,
4754 dependencies: None,
4755 tool: Some("claude-code".to_string()),
4756 flatten: None,
4757 install: None,
4758 })),
4759 true,
4760 );
4761
4762 let cache_dir = temp_dir.path().join("cache");
4763 let cache = Cache::with_dir(cache_dir).unwrap();
4764 let mut resolver = DependencyResolver::with_cache(manifest, cache);
4765
4766 // This should now succeed with the local repository
4767 let result = resolver.resolve().await;
4768 assert!(result.is_ok());
4769 }
4770
4771 #[tokio::test]
4772 async fn test_resolve_with_progress() {
4773 let temp_dir = TempDir::new().unwrap();
4774 let mut manifest = Manifest::new();
4775 manifest.manifest_dir = Some(temp_dir.path().to_path_buf());
4776 manifest.add_dependency(
4777 "local".to_string(),
4778 ResourceDependency::Simple("test.md".to_string()),
4779 true,
4780 );
4781
4782 // Create dummy file to allow transitive dependency extraction
4783 std::fs::write(temp_dir.path().join("test.md"), "# Test").unwrap();
4784
4785 let cache = Cache::with_dir(temp_dir.path().to_path_buf()).unwrap();
4786 let mut resolver = DependencyResolver::with_cache(manifest, cache);
4787
4788 let lockfile = resolver.resolve().await.unwrap();
4789 assert_eq!(lockfile.agents.len(), 1);
4790 }
4791
4792 #[test]
4793 fn test_verify_with_progress() {
4794 let mut manifest = Manifest::new();
4795 manifest.add_source("test".to_string(), "https://github.com/test/repo.git".to_string());
4796 manifest.add_dependency(
4797 "agent".to_string(),
4798 ResourceDependency::Detailed(Box::new(crate::manifest::DetailedDependency {
4799 source: Some("test".to_string()),
4800 path: "agents/test.md".to_string(),
4801 version: None,
4802 branch: None,
4803 rev: None,
4804 command: None,
4805 args: None,
4806 target: None,
4807 filename: None,
4808 dependencies: None,
4809 tool: Some("claude-code".to_string()),
4810 flatten: None,
4811 install: None,
4812 })),
4813 true,
4814 );
4815
4816 let temp_dir = TempDir::new().unwrap();
4817 let cache = Cache::with_dir(temp_dir.path().to_path_buf()).unwrap();
4818 let mut resolver = DependencyResolver::with_cache(manifest, cache);
4819
4820 let result = resolver.verify();
4821 assert!(result.is_ok());
4822 }
4823
4824 #[tokio::test]
4825 async fn test_resolve_with_git_ref() {
4826 let temp_dir = TempDir::new().unwrap();
4827
4828 // Create a local mock git repository
4829 let source_dir = temp_dir.path().join("test-source");
4830 std::fs::create_dir_all(&source_dir).unwrap();
4831 std::process::Command::new("git")
4832 .args(["init"])
4833 .current_dir(&source_dir)
4834 .output()
4835 .expect("Failed to initialize git repository");
4836
4837 // Configure git
4838 std::process::Command::new("git")
4839 .args(["config", "user.email", "test@example.com"])
4840 .current_dir(&source_dir)
4841 .output()
4842 .unwrap();
4843 std::process::Command::new("git")
4844 .args(["config", "user.name", "Test User"])
4845 .current_dir(&source_dir)
4846 .output()
4847 .unwrap();
4848
4849 // Create the agents directory and test file
4850 let agents_dir = source_dir.join("agents");
4851 std::fs::create_dir_all(&agents_dir).unwrap();
4852 std::fs::write(agents_dir.join("test.md"), "# Test Agent").unwrap();
4853
4854 // Add and commit the file
4855 std::process::Command::new("git")
4856 .args(["add", "."])
4857 .current_dir(&source_dir)
4858 .output()
4859 .unwrap();
4860 std::process::Command::new("git")
4861 .args(["commit", "-m", "Initial commit"])
4862 .current_dir(&source_dir)
4863 .output()
4864 .unwrap();
4865
4866 // Create main branch (git may have created master)
4867 std::process::Command::new("git")
4868 .args(["branch", "-M", "main"])
4869 .current_dir(&source_dir)
4870 .output()
4871 .unwrap();
4872
4873 let mut manifest = Manifest::new();
4874 // Use file:// URL to ensure it's treated as a Git source, not a local path
4875 let source_url = format!("file://{}", source_dir.display());
4876 manifest.add_source("test".to_string(), source_url);
4877 manifest.add_dependency(
4878 "git-agent".to_string(),
4879 ResourceDependency::Detailed(Box::new(crate::manifest::DetailedDependency {
4880 source: Some("test".to_string()),
4881 path: "agents/test.md".to_string(),
4882 version: None,
4883 branch: None,
4884 rev: None,
4885 command: None,
4886 args: None,
4887 target: None,
4888 filename: None,
4889 dependencies: None,
4890 tool: Some("claude-code".to_string()),
4891 flatten: None,
4892 install: None,
4893 })),
4894 true,
4895 );
4896
4897 let cache_dir = temp_dir.path().join("cache");
4898 let cache = Cache::with_dir(cache_dir).unwrap();
4899 let mut resolver = DependencyResolver::with_cache(manifest, cache);
4900
4901 // This should now succeed with the local repository
4902 let result = resolver.resolve().await;
4903 if let Err(e) = &result {
4904 eprintln!("Test failed with error: {:#}", e);
4905 }
4906 assert!(result.is_ok());
4907 }
4908
4909 #[tokio::test]
4910 async fn test_new_with_global() {
4911 let manifest = Manifest::new();
4912 let cache = Cache::new().unwrap();
4913 let result = DependencyResolver::new_with_global(manifest, cache).await;
4914 assert!(result.is_ok());
4915 }
4916
4917 #[test]
4918 fn test_resolver_new_default() {
4919 let manifest = Manifest::new();
4920 let cache = Cache::new().unwrap();
4921 let result = DependencyResolver::new(manifest, cache);
4922 assert!(result.is_ok());
4923 }
4924
4925 #[tokio::test]
4926 async fn test_resolve_multiple_dependencies() {
4927 let temp_dir = TempDir::new().unwrap();
4928 let mut manifest = Manifest::new();
4929 manifest.manifest_dir = Some(temp_dir.path().to_path_buf());
4930 manifest.add_dependency(
4931 "agent1".to_string(),
4932 ResourceDependency::Simple("a1.md".to_string()),
4933 true,
4934 );
4935 manifest.add_dependency(
4936 "agent2".to_string(),
4937 ResourceDependency::Simple("a2.md".to_string()),
4938 true,
4939 );
4940 manifest.add_dependency(
4941 "snippet1".to_string(),
4942 ResourceDependency::Simple("s1.md".to_string()),
4943 false,
4944 );
4945
4946 // Create dummy files to allow transitive dependency extraction
4947 std::fs::write(temp_dir.path().join("a1.md"), "# Agent 1").unwrap();
4948 std::fs::write(temp_dir.path().join("a2.md"), "# Agent 2").unwrap();
4949 std::fs::write(temp_dir.path().join("s1.md"), "# Snippet 1").unwrap();
4950
4951 let cache = Cache::with_dir(temp_dir.path().to_path_buf()).unwrap();
4952 let mut resolver = DependencyResolver::with_cache(manifest, cache);
4953
4954 let lockfile = resolver.resolve().await.unwrap();
4955 assert_eq!(lockfile.agents.len(), 2);
4956 assert_eq!(lockfile.snippets.len(), 1);
4957 }
4958
4959 #[test]
4960 fn test_verify_local_dependency() {
4961 let mut manifest = Manifest::new();
4962 manifest.add_dependency(
4963 "local-agent".to_string(),
4964 ResourceDependency::Simple("../local/agent.md".to_string()),
4965 true,
4966 );
4967
4968 let temp_dir = TempDir::new().unwrap();
4969 let cache = Cache::with_dir(temp_dir.path().to_path_buf()).unwrap();
4970 let mut resolver = DependencyResolver::with_cache(manifest, cache);
4971
4972 let result = resolver.verify();
4973 assert!(result.is_ok());
4974 }
4975
4976 #[tokio::test]
4977 async fn test_resolve_with_empty_manifest() {
4978 let manifest = Manifest::new();
4979 let temp_dir = TempDir::new().unwrap();
4980 let cache = Cache::with_dir(temp_dir.path().to_path_buf()).unwrap();
4981 let mut resolver = DependencyResolver::with_cache(manifest, cache);
4982
4983 let lockfile = resolver.resolve().await.unwrap();
4984 assert_eq!(lockfile.agents.len(), 0);
4985 assert_eq!(lockfile.snippets.len(), 0);
4986 assert_eq!(lockfile.sources.len(), 0);
4987 }
4988
4989 #[tokio::test]
4990 async fn test_resolve_with_custom_target() {
4991 let temp_dir = TempDir::new().unwrap();
4992 let mut manifest = Manifest::new();
4993 manifest.manifest_dir = Some(temp_dir.path().to_path_buf());
4994
4995 // Add local dependency with custom target
4996 manifest.add_dependency(
4997 "custom-agent".to_string(),
4998 ResourceDependency::Detailed(Box::new(crate::manifest::DetailedDependency {
4999 source: None,
5000 path: "../test.md".to_string(),
5001 version: None,
5002 branch: None,
5003 rev: None,
5004 command: None,
5005 args: None,
5006 target: Some("integrations/custom".to_string()),
5007 filename: None,
5008 dependencies: None,
5009 tool: Some("claude-code".to_string()),
5010 flatten: None,
5011 install: None,
5012 })),
5013 true,
5014 );
5015
5016 // Create dummy file to allow transitive dependency extraction
5017 std::fs::write(temp_dir.path().parent().unwrap().join("test.md"), "# Test").unwrap();
5018
5019 let cache = Cache::with_dir(temp_dir.path().to_path_buf()).unwrap();
5020 let mut resolver = DependencyResolver::with_cache(manifest, cache);
5021
5022 let lockfile = resolver.resolve().await.unwrap();
5023 assert_eq!(lockfile.agents.len(), 1);
5024
5025 let agent = &lockfile.agents[0];
5026 assert_eq!(agent.name, "custom-agent");
5027 // Verify the custom target is relative to the default agents directory
5028 // Normalize path separators for cross-platform testing
5029 let normalized_path = normalize_path_for_storage(&agent.installed_at);
5030 assert!(normalized_path.contains(".claude/agents/integrations/custom"));
5031 // Path ../test.md extracts to test.md after stripping parent components
5032 assert_eq!(normalized_path, ".claude/agents/integrations/custom/test.md");
5033 }
5034
5035 #[tokio::test]
5036 async fn test_resolve_without_custom_target() {
5037 let temp_dir = TempDir::new().unwrap();
5038 let mut manifest = Manifest::new();
5039 manifest.manifest_dir = Some(temp_dir.path().to_path_buf());
5040
5041 // Add local dependency without custom target
5042 manifest.add_dependency(
5043 "standard-agent".to_string(),
5044 ResourceDependency::Detailed(Box::new(crate::manifest::DetailedDependency {
5045 source: None,
5046 path: "../test.md".to_string(),
5047 version: None,
5048 branch: None,
5049 rev: None,
5050 command: None,
5051 args: None,
5052 target: None,
5053 filename: None,
5054 dependencies: None,
5055 tool: Some("claude-code".to_string()),
5056 flatten: None,
5057 install: None,
5058 })),
5059 true,
5060 );
5061
5062 // Create dummy file to allow transitive dependency extraction
5063 std::fs::write(temp_dir.path().parent().unwrap().join("test.md"), "# Test").unwrap();
5064
5065 let cache = Cache::with_dir(temp_dir.path().to_path_buf()).unwrap();
5066 let mut resolver = DependencyResolver::with_cache(manifest, cache);
5067
5068 let lockfile = resolver.resolve().await.unwrap();
5069 assert_eq!(lockfile.agents.len(), 1);
5070
5071 let agent = &lockfile.agents[0];
5072 assert_eq!(agent.name, "standard-agent");
5073 // Verify the default target is used
5074 // Normalize path separators for cross-platform testing
5075 let normalized_path = normalize_path_for_storage(&agent.installed_at);
5076 // Path ../test.md extracts to test.md after stripping parent components
5077 assert_eq!(normalized_path, ".claude/agents/test.md");
5078 }
5079
5080 #[tokio::test]
5081 async fn test_resolve_with_custom_filename() {
5082 let temp_dir = TempDir::new().unwrap();
5083 let mut manifest = Manifest::new();
5084 manifest.manifest_dir = Some(temp_dir.path().to_path_buf());
5085
5086 // Add local dependency with custom filename
5087 manifest.add_dependency(
5088 "my-agent".to_string(),
5089 ResourceDependency::Detailed(Box::new(crate::manifest::DetailedDependency {
5090 source: None,
5091 path: "../test.md".to_string(),
5092 version: None,
5093 branch: None,
5094 rev: None,
5095 command: None,
5096 args: None,
5097 target: None,
5098 filename: Some("ai-assistant.txt".to_string()),
5099 dependencies: None,
5100 tool: Some("claude-code".to_string()),
5101 flatten: None,
5102 install: None,
5103 })),
5104 true,
5105 );
5106
5107 // Create dummy file to allow transitive dependency extraction
5108 std::fs::write(temp_dir.path().parent().unwrap().join("test.md"), "# Test").unwrap();
5109
5110 let cache = Cache::with_dir(temp_dir.path().to_path_buf()).unwrap();
5111 let mut resolver = DependencyResolver::with_cache(manifest, cache);
5112
5113 let lockfile = resolver.resolve().await.unwrap();
5114 assert_eq!(lockfile.agents.len(), 1);
5115
5116 let agent = &lockfile.agents[0];
5117 assert_eq!(agent.name, "my-agent");
5118 // Verify the custom filename is used
5119 // Normalize path separators for cross-platform testing
5120 let normalized_path = normalize_path_for_storage(&agent.installed_at);
5121 assert_eq!(normalized_path, ".claude/agents/ai-assistant.txt");
5122 }
5123
5124 #[tokio::test]
5125 async fn test_resolve_with_custom_filename_and_target() {
5126 let temp_dir = TempDir::new().unwrap();
5127 let mut manifest = Manifest::new();
5128 manifest.manifest_dir = Some(temp_dir.path().to_path_buf());
5129
5130 // Add local dependency with both custom filename and target
5131 manifest.add_dependency(
5132 "special-tool".to_string(),
5133 ResourceDependency::Detailed(Box::new(crate::manifest::DetailedDependency {
5134 source: None,
5135 path: "../test.md".to_string(),
5136 version: None,
5137 branch: None,
5138 rev: None,
5139 command: None,
5140 args: None,
5141 target: Some("tools/ai".to_string()),
5142 filename: Some("assistant.markdown".to_string()),
5143 dependencies: None,
5144 tool: Some("claude-code".to_string()),
5145 flatten: None,
5146 install: None,
5147 })),
5148 true,
5149 );
5150
5151 // Create dummy file to allow transitive dependency extraction
5152 std::fs::write(temp_dir.path().parent().unwrap().join("test.md"), "# Test").unwrap();
5153
5154 let cache = Cache::with_dir(temp_dir.path().to_path_buf()).unwrap();
5155 let mut resolver = DependencyResolver::with_cache(manifest, cache);
5156
5157 let lockfile = resolver.resolve().await.unwrap();
5158 assert_eq!(lockfile.agents.len(), 1);
5159
5160 let agent = &lockfile.agents[0];
5161 assert_eq!(agent.name, "special-tool");
5162 // Verify both custom target and filename are used
5163 // Custom target is relative to default agents directory
5164 // Normalize path separators for cross-platform testing
5165 let normalized_path = normalize_path_for_storage(&agent.installed_at);
5166 assert_eq!(normalized_path, ".claude/agents/tools/ai/assistant.markdown");
5167 }
5168
5169 #[tokio::test]
5170 async fn test_resolve_script_with_custom_filename() {
5171 let temp_dir = TempDir::new().unwrap();
5172 let mut manifest = Manifest::new();
5173 manifest.manifest_dir = Some(temp_dir.path().to_path_buf());
5174
5175 // Add script with custom filename (different extension)
5176 manifest.add_dependency(
5177 "analyzer".to_string(),
5178 ResourceDependency::Detailed(Box::new(crate::manifest::DetailedDependency {
5179 source: None,
5180 path: "../scripts/data-analyzer-v3.py".to_string(),
5181 version: None,
5182 branch: None,
5183 rev: None,
5184 command: None,
5185 args: None,
5186 target: None,
5187 filename: Some("analyze.py".to_string()),
5188 dependencies: None,
5189 tool: Some("claude-code".to_string()),
5190 flatten: None,
5191 install: None,
5192 })),
5193 false, // script (not agent)
5194 );
5195
5196 // Create dummy script file to allow transitive dependency extraction
5197 let scripts_dir = temp_dir.path().parent().unwrap().join("scripts");
5198 std::fs::create_dir_all(&scripts_dir).unwrap();
5199 std::fs::write(scripts_dir.join("data-analyzer-v3.py"), "#!/usr/bin/env python3").unwrap();
5200
5201 let cache = Cache::with_dir(temp_dir.path().to_path_buf()).unwrap();
5202 let mut resolver = DependencyResolver::with_cache(manifest, cache);
5203
5204 let lockfile = resolver.resolve().await.unwrap();
5205 // Scripts should be in snippets array for now (based on false flag)
5206 assert_eq!(lockfile.snippets.len(), 1);
5207
5208 let script = &lockfile.snippets[0];
5209 assert_eq!(script.name, "analyzer");
5210 // Verify custom filename is used (with custom extension)
5211 // Normalize path separators for cross-platform testing
5212 // Uses claude-code tool, so snippets go to .claude/snippets/
5213 let normalized_path = normalize_path_for_storage(&script.installed_at);
5214 assert_eq!(normalized_path, ".claude/snippets/analyze.py");
5215 }
5216
5217 // ============ NEW TESTS FOR UNCOVERED AREAS ============
5218
5219 // Disable pattern tests for now as they require changing directory which breaks parallel test safety
5220 // These tests would need to be rewritten to not use pattern dependencies or
5221 // the resolver would need to support absolute base paths for pattern resolution
5222
5223 #[tokio::test]
5224 async fn test_resolve_pattern_dependency_local() {
5225 let temp_dir = TempDir::new().unwrap();
5226 let project_dir = temp_dir.path();
5227
5228 // Create local agent files
5229 let agents_dir = project_dir.join("agents");
5230 std::fs::create_dir_all(&agents_dir).unwrap();
5231 std::fs::write(agents_dir.join("helper.md"), "# Helper Agent").unwrap();
5232 std::fs::write(agents_dir.join("assistant.md"), "# Assistant Agent").unwrap();
5233 std::fs::write(agents_dir.join("tester.md"), "# Tester Agent").unwrap();
5234
5235 // Create manifest with local pattern dependency
5236 let mut manifest = Manifest::new();
5237 manifest.manifest_dir = Some(project_dir.to_path_buf());
5238 manifest.add_dependency(
5239 "local-agents".to_string(),
5240 ResourceDependency::Simple(format!("{}/agents/*.md", project_dir.display())),
5241 true,
5242 );
5243
5244 // Create resolver and resolve dependencies
5245 let cache_dir = temp_dir.path().join("cache");
5246 let cache = Cache::with_dir(cache_dir).unwrap();
5247 let mut resolver = DependencyResolver::with_cache(manifest, cache);
5248
5249 let lockfile = resolver.resolve().await.unwrap();
5250
5251 // Verify all agents were resolved
5252 assert_eq!(lockfile.agents.len(), 3);
5253 let agent_names: Vec<String> = lockfile.agents.iter().map(|a| a.name.clone()).collect();
5254 assert!(agent_names.contains(&"helper".to_string()));
5255 assert!(agent_names.contains(&"assistant".to_string()));
5256 assert!(agent_names.contains(&"tester".to_string()));
5257 }
5258
5259 #[tokio::test]
5260 async fn test_resolve_pattern_dependency_remote() {
5261 let temp_dir = TempDir::new().unwrap();
5262
5263 // Create a local mock git repository with pattern-matching files
5264 let source_dir = temp_dir.path().join("test-source");
5265 std::fs::create_dir_all(&source_dir).unwrap();
5266 std::process::Command::new("git")
5267 .args(["init"])
5268 .current_dir(&source_dir)
5269 .output()
5270 .expect("Failed to initialize git repository");
5271
5272 // Configure git
5273 std::process::Command::new("git")
5274 .args(["config", "user.email", "test@example.com"])
5275 .current_dir(&source_dir)
5276 .output()
5277 .unwrap();
5278 std::process::Command::new("git")
5279 .args(["config", "user.name", "Test User"])
5280 .current_dir(&source_dir)
5281 .output()
5282 .unwrap();
5283
5284 // Create multiple agent files
5285 let agents_dir = source_dir.join("agents");
5286 std::fs::create_dir_all(&agents_dir).unwrap();
5287 std::fs::write(agents_dir.join("python-linter.md"), "# Python Linter").unwrap();
5288 std::fs::write(agents_dir.join("python-formatter.md"), "# Python Formatter").unwrap();
5289 std::fs::write(agents_dir.join("rust-linter.md"), "# Rust Linter").unwrap();
5290
5291 // Add and commit
5292 std::process::Command::new("git")
5293 .args(["add", "."])
5294 .current_dir(&source_dir)
5295 .output()
5296 .unwrap();
5297 std::process::Command::new("git")
5298 .args(["commit", "-m", "Add agents"])
5299 .current_dir(&source_dir)
5300 .output()
5301 .unwrap();
5302
5303 // Create a tag
5304 std::process::Command::new("git")
5305 .args(["tag", "v1.0.0"])
5306 .current_dir(&source_dir)
5307 .output()
5308 .unwrap();
5309
5310 let mut manifest = Manifest::new();
5311 // Use file:// URL to ensure it's treated as a Git source, not a local path
5312 let source_url = format!("file://{}", source_dir.display());
5313 manifest.add_source("test".to_string(), source_url);
5314
5315 // Add pattern dependency for python agents
5316 manifest.add_dependency(
5317 "python-tools".to_string(),
5318 ResourceDependency::Detailed(Box::new(crate::manifest::DetailedDependency {
5319 source: Some("test".to_string()),
5320 path: "agents/python-*.md".to_string(),
5321 version: Some("v1.0.0".to_string()),
5322 branch: None,
5323 rev: None,
5324 command: None,
5325 args: None,
5326 target: None,
5327 filename: None,
5328 dependencies: None,
5329 tool: Some("claude-code".to_string()),
5330 flatten: None,
5331 install: None,
5332 })),
5333 true, // agents
5334 );
5335
5336 let cache_dir = temp_dir.path().join("cache");
5337 let cache = Cache::with_dir(cache_dir).unwrap();
5338 let mut resolver = DependencyResolver::with_cache(manifest, cache);
5339
5340 let lockfile = resolver.resolve().await.unwrap();
5341 // Should have resolved to 2 python agents
5342 assert_eq!(lockfile.agents.len(), 2);
5343
5344 // Check that both python agents were found
5345 let agent_names: Vec<String> = lockfile.agents.iter().map(|a| a.name.clone()).collect();
5346 assert!(agent_names.contains(&"python-linter".to_string()));
5347 assert!(agent_names.contains(&"python-formatter".to_string()));
5348 assert!(!agent_names.contains(&"rust-linter".to_string()));
5349 }
5350
5351 #[tokio::test]
5352 async fn test_resolve_pattern_dependency_with_custom_target() {
5353 let temp_dir = TempDir::new().unwrap();
5354 let project_dir = temp_dir.path();
5355
5356 // Create local agent files
5357 let agents_dir = project_dir.join("agents");
5358 std::fs::create_dir_all(&agents_dir).unwrap();
5359 std::fs::write(agents_dir.join("helper.md"), "# Helper Agent").unwrap();
5360 std::fs::write(agents_dir.join("assistant.md"), "# Assistant Agent").unwrap();
5361
5362 // Create manifest with local pattern dependency and custom target
5363 let mut manifest = Manifest::new();
5364 manifest.manifest_dir = Some(project_dir.to_path_buf());
5365 manifest.add_dependency(
5366 "custom-agents".to_string(),
5367 ResourceDependency::Detailed(Box::new(crate::manifest::DetailedDependency {
5368 source: None,
5369 path: format!("{}/agents/*.md", project_dir.display()),
5370 version: None,
5371 branch: None,
5372 rev: None,
5373 command: None,
5374 args: None,
5375 target: Some("custom/agents".to_string()),
5376 filename: None,
5377 dependencies: None,
5378 tool: Some("claude-code".to_string()),
5379 flatten: None,
5380 install: None,
5381 })),
5382 true,
5383 );
5384
5385 // Create resolver and resolve dependencies
5386 let cache_dir = temp_dir.path().join("cache");
5387 let cache = Cache::with_dir(cache_dir).unwrap();
5388 let mut resolver = DependencyResolver::with_cache(manifest, cache);
5389
5390 let lockfile = resolver.resolve().await.unwrap();
5391
5392 // Verify agents were resolved with custom target
5393 // Custom target is relative to default agents directory
5394 assert_eq!(lockfile.agents.len(), 2);
5395 for agent in &lockfile.agents {
5396 assert!(agent.installed_at.starts_with(".claude/agents/custom/agents/"));
5397 }
5398
5399 let agent_names: Vec<String> = lockfile.agents.iter().map(|a| a.name.clone()).collect();
5400 assert!(agent_names.contains(&"helper".to_string()));
5401 assert!(agent_names.contains(&"assistant".to_string()));
5402 }
5403
5404 #[tokio::test]
5405 async fn test_update_specific_dependencies() {
5406 let temp_dir = TempDir::new().unwrap();
5407
5408 // Create a local mock git repository
5409 let source_dir = temp_dir.path().join("test-source");
5410 std::fs::create_dir_all(&source_dir).unwrap();
5411 std::process::Command::new("git")
5412 .args(["init"])
5413 .current_dir(&source_dir)
5414 .output()
5415 .expect("Failed to initialize git repository");
5416
5417 // Configure git
5418 std::process::Command::new("git")
5419 .args(["config", "user.email", "test@example.com"])
5420 .current_dir(&source_dir)
5421 .output()
5422 .unwrap();
5423 std::process::Command::new("git")
5424 .args(["config", "user.name", "Test User"])
5425 .current_dir(&source_dir)
5426 .output()
5427 .unwrap();
5428
5429 // Create initial files
5430 let agents_dir = source_dir.join("agents");
5431 std::fs::create_dir_all(&agents_dir).unwrap();
5432 std::fs::write(agents_dir.join("agent1.md"), "# Agent 1 v1").unwrap();
5433 std::fs::write(agents_dir.join("agent2.md"), "# Agent 2 v1").unwrap();
5434
5435 // Initial commit
5436 std::process::Command::new("git")
5437 .args(["add", "."])
5438 .current_dir(&source_dir)
5439 .output()
5440 .unwrap();
5441 std::process::Command::new("git")
5442 .args(["commit", "-m", "Initial"])
5443 .current_dir(&source_dir)
5444 .output()
5445 .unwrap();
5446 std::process::Command::new("git")
5447 .args(["tag", "v1.0.0"])
5448 .current_dir(&source_dir)
5449 .output()
5450 .unwrap();
5451
5452 // Update agent1 and create v2.0.0
5453 std::fs::write(agents_dir.join("agent1.md"), "# Agent 1 v2").unwrap();
5454 std::process::Command::new("git")
5455 .args(["add", "."])
5456 .current_dir(&source_dir)
5457 .output()
5458 .unwrap();
5459 std::process::Command::new("git")
5460 .args(["commit", "-m", "Update agent1"])
5461 .current_dir(&source_dir)
5462 .output()
5463 .unwrap();
5464 std::process::Command::new("git")
5465 .args(["tag", "v2.0.0"])
5466 .current_dir(&source_dir)
5467 .output()
5468 .unwrap();
5469
5470 let mut manifest = Manifest::new();
5471 // Use file:// URL to ensure it's treated as a Git source, not a local path
5472 let source_url = format!("file://{}", source_dir.display());
5473 manifest.add_source("test".to_string(), source_url);
5474
5475 // Add dependencies - initially both at v1.0.0
5476 manifest.add_dependency(
5477 "agent1".to_string(),
5478 ResourceDependency::Detailed(Box::new(crate::manifest::DetailedDependency {
5479 source: Some("test".to_string()),
5480 path: "agents/agent1.md".to_string(),
5481 version: Some("v1.0.0".to_string()), // Start with v1.0.0
5482 branch: None,
5483 rev: None,
5484 command: None,
5485 args: None,
5486 target: None,
5487 filename: None,
5488 dependencies: None,
5489 tool: Some("claude-code".to_string()),
5490 flatten: None,
5491 install: None,
5492 })),
5493 true,
5494 );
5495 manifest.add_dependency(
5496 "agent2".to_string(),
5497 ResourceDependency::Detailed(Box::new(crate::manifest::DetailedDependency {
5498 source: Some("test".to_string()),
5499 path: "agents/agent2.md".to_string(),
5500 version: Some("v1.0.0".to_string()), // Start with v1.0.0
5501 branch: None,
5502 rev: None,
5503 command: None,
5504 args: None,
5505 target: None,
5506 filename: None,
5507 dependencies: None,
5508 tool: Some("claude-code".to_string()),
5509 flatten: None,
5510 install: None,
5511 })),
5512 true,
5513 );
5514
5515 let cache_dir = temp_dir.path().join("cache");
5516 let cache = Cache::with_dir(cache_dir.clone()).unwrap();
5517 let mut resolver = DependencyResolver::with_cache(manifest.clone(), cache);
5518
5519 // First resolve with v1.0.0 for both
5520 let initial_lockfile = resolver.resolve().await.unwrap();
5521 assert_eq!(initial_lockfile.agents.len(), 2);
5522
5523 // Create a new manifest with agent1 updated to v2.0.0
5524 let mut updated_manifest = Manifest::new();
5525 // Use file:// URL to ensure it's treated as a Git source, not a local path
5526 updated_manifest.add_source("test".to_string(), format!("file://{}", source_dir.display()));
5527 updated_manifest.add_dependency(
5528 "agent1".to_string(),
5529 ResourceDependency::Detailed(Box::new(crate::manifest::DetailedDependency {
5530 source: Some("test".to_string()),
5531 path: "agents/agent1.md".to_string(),
5532 version: Some("v2.0.0".to_string()),
5533 branch: None,
5534 rev: None,
5535 command: None,
5536 args: None,
5537 target: None,
5538 filename: None,
5539 dependencies: None,
5540 tool: Some("claude-code".to_string()),
5541 flatten: None,
5542 install: None,
5543 })),
5544 true,
5545 );
5546 updated_manifest.add_dependency(
5547 "agent2".to_string(),
5548 ResourceDependency::Detailed(Box::new(crate::manifest::DetailedDependency {
5549 source: Some("test".to_string()),
5550 path: "agents/agent2.md".to_string(),
5551 version: Some("v1.0.0".to_string()), // Keep v1.0.0
5552 branch: None,
5553 rev: None,
5554 command: None,
5555 args: None,
5556 target: None,
5557 filename: None,
5558 dependencies: None,
5559 tool: Some("claude-code".to_string()),
5560 flatten: None,
5561 install: None,
5562 })),
5563 true,
5564 );
5565
5566 // Now update only agent1
5567 let cache2 = Cache::with_dir(cache_dir).unwrap();
5568 let mut resolver2 = DependencyResolver::with_cache(updated_manifest, cache2);
5569 let updated_lockfile =
5570 resolver2.update(&initial_lockfile, Some(vec!["agent1".to_string()])).await.unwrap();
5571
5572 // agent1 should be updated to v2.0.0
5573 let agent1 = updated_lockfile
5574 .agents
5575 .iter()
5576 .find(|a| a.name == "agent1" && a.version.as_deref() == Some("v2.0.0"))
5577 .unwrap();
5578 assert_eq!(agent1.version.as_ref().unwrap(), "v2.0.0");
5579
5580 // agent2 should remain at v1.0.0
5581 let agent2 = updated_lockfile
5582 .agents
5583 .iter()
5584 .find(|a| a.name == "agent2" && a.version.as_deref() == Some("v1.0.0"))
5585 .unwrap();
5586 assert_eq!(agent2.version.as_ref().unwrap(), "v1.0.0");
5587 }
5588
5589 #[tokio::test]
5590 async fn test_update_all_dependencies() {
5591 let temp_dir = TempDir::new().unwrap();
5592 let mut manifest = Manifest::new();
5593 manifest.manifest_dir = Some(temp_dir.path().to_path_buf());
5594 manifest.add_dependency(
5595 "local1".to_string(),
5596 ResourceDependency::Simple("../a1.md".to_string()),
5597 true,
5598 );
5599 manifest.add_dependency(
5600 "local2".to_string(),
5601 ResourceDependency::Simple("../a2.md".to_string()),
5602 true,
5603 );
5604
5605 // Create dummy files to allow transitive dependency extraction
5606 let parent = temp_dir.path().parent().unwrap();
5607 std::fs::write(parent.join("a1.md"), "# Agent 1").unwrap();
5608 std::fs::write(parent.join("a2.md"), "# Agent 2").unwrap();
5609
5610 let cache = Cache::with_dir(temp_dir.path().to_path_buf()).unwrap();
5611 let mut resolver = DependencyResolver::with_cache(manifest.clone(), cache);
5612
5613 // Initial resolve
5614 let initial_lockfile = resolver.resolve().await.unwrap();
5615 assert_eq!(initial_lockfile.agents.len(), 2);
5616
5617 // Update all (None means update all)
5618 let cache2 = Cache::with_dir(temp_dir.path().to_path_buf()).unwrap();
5619 let mut resolver2 = DependencyResolver::with_cache(manifest, cache2);
5620 let updated_lockfile = resolver2.update(&initial_lockfile, None).await.unwrap();
5621
5622 // All dependencies should be present
5623 assert_eq!(updated_lockfile.agents.len(), 2);
5624 }
5625
5626 // NOTE: Comprehensive integration tests for update() with transitive dependencies
5627 // are in tests/integration_incremental_add.rs. These provide end-to-end testing
5628 // of the incremental `agpm add dep` scenario which exercises the update() method.
5629
5630 #[tokio::test]
5631 async fn test_resolve_hooks_resource_type() {
5632 let temp_dir = TempDir::new().unwrap();
5633 let mut manifest = Manifest::new();
5634 manifest.manifest_dir = Some(temp_dir.path().to_path_buf());
5635
5636 // Add hook dependencies
5637 manifest.hooks.insert(
5638 "pre-commit".to_string(),
5639 ResourceDependency::Simple("../hooks/pre-commit.json".to_string()),
5640 );
5641 manifest.hooks.insert(
5642 "post-commit".to_string(),
5643 ResourceDependency::Simple("../hooks/post-commit.json".to_string()),
5644 );
5645
5646 // Create dummy hook files to allow transitive dependency extraction
5647 let hooks_dir = temp_dir.path().parent().unwrap().join("hooks");
5648 std::fs::create_dir_all(&hooks_dir).unwrap();
5649 std::fs::write(hooks_dir.join("pre-commit.json"), "{}").unwrap();
5650 std::fs::write(hooks_dir.join("post-commit.json"), "{}").unwrap();
5651
5652 let cache = Cache::with_dir(temp_dir.path().to_path_buf()).unwrap();
5653 let mut resolver = DependencyResolver::with_cache(manifest, cache);
5654
5655 let lockfile = resolver.resolve().await.unwrap();
5656 assert_eq!(lockfile.hooks.len(), 2);
5657
5658 // Check that hooks point to the config file where they're configured
5659 for hook in &lockfile.hooks {
5660 assert_eq!(
5661 hook.installed_at, ".claude/settings.local.json",
5662 "Hooks should reference the config file where they're configured"
5663 );
5664 }
5665 }
5666
5667 #[tokio::test]
5668 async fn test_resolve_scripts_resource_type() {
5669 let temp_dir = TempDir::new().unwrap();
5670 let mut manifest = Manifest::new();
5671 manifest.manifest_dir = Some(temp_dir.path().to_path_buf());
5672
5673 // Add script dependencies
5674 manifest.scripts.insert(
5675 "build".to_string(),
5676 ResourceDependency::Simple("../scripts/build.sh".to_string()),
5677 );
5678 manifest.scripts.insert(
5679 "test".to_string(),
5680 ResourceDependency::Simple("../scripts/test.py".to_string()),
5681 );
5682
5683 // Create dummy script files to allow transitive dependency extraction
5684 let scripts_dir = temp_dir.path().parent().unwrap().join("scripts");
5685 std::fs::create_dir_all(&scripts_dir).unwrap();
5686 std::fs::write(scripts_dir.join("build.sh"), "#!/bin/bash").unwrap();
5687 std::fs::write(scripts_dir.join("test.py"), "#!/usr/bin/env python3").unwrap();
5688
5689 let cache = Cache::with_dir(temp_dir.path().to_path_buf()).unwrap();
5690 let mut resolver = DependencyResolver::with_cache(manifest, cache);
5691
5692 let lockfile = resolver.resolve().await.unwrap();
5693 assert_eq!(lockfile.scripts.len(), 2);
5694
5695 // Check that scripts maintain their extensions
5696 let build_script = lockfile.scripts.iter().find(|s| s.name == "build").unwrap();
5697 assert!(build_script.installed_at.ends_with("build.sh"));
5698
5699 let test_script = lockfile.scripts.iter().find(|s| s.name == "test").unwrap();
5700 assert!(test_script.installed_at.ends_with("test.py"));
5701 }
5702
5703 #[tokio::test]
5704 async fn test_resolve_mcp_servers_resource_type() {
5705 let temp_dir = TempDir::new().unwrap();
5706 let mut manifest = Manifest::new();
5707 manifest.manifest_dir = Some(temp_dir.path().to_path_buf());
5708
5709 // Add MCP server dependencies
5710 manifest.mcp_servers.insert(
5711 "filesystem".to_string(),
5712 ResourceDependency::Simple("../mcp/filesystem.json".to_string()),
5713 );
5714 manifest.mcp_servers.insert(
5715 "database".to_string(),
5716 ResourceDependency::Simple("../mcp/database.json".to_string()),
5717 );
5718
5719 // Create dummy MCP server files to allow transitive dependency extraction
5720 let mcp_dir = temp_dir.path().parent().unwrap().join("mcp");
5721 std::fs::create_dir_all(&mcp_dir).unwrap();
5722 std::fs::write(mcp_dir.join("filesystem.json"), "{}").unwrap();
5723 std::fs::write(mcp_dir.join("database.json"), "{}").unwrap();
5724
5725 let cache = Cache::with_dir(temp_dir.path().to_path_buf()).unwrap();
5726 let mut resolver = DependencyResolver::with_cache(manifest, cache);
5727
5728 let lockfile = resolver.resolve().await.unwrap();
5729 assert_eq!(lockfile.mcp_servers.len(), 2);
5730
5731 // Check that MCP servers point to the config file where they're configured
5732 for server in &lockfile.mcp_servers {
5733 assert_eq!(
5734 server.installed_at, ".mcp.json",
5735 "MCP servers should reference the config file where they're configured"
5736 );
5737 }
5738 }
5739
5740 #[tokio::test]
5741 async fn test_resolve_commands_resource_type() {
5742 let temp_dir = TempDir::new().unwrap();
5743 let mut manifest = Manifest::new();
5744 manifest.manifest_dir = Some(temp_dir.path().to_path_buf());
5745
5746 // Add command dependencies
5747 manifest.commands.insert(
5748 "deploy".to_string(),
5749 ResourceDependency::Simple("../commands/deploy.md".to_string()),
5750 );
5751 manifest.commands.insert(
5752 "lint".to_string(),
5753 ResourceDependency::Simple("../commands/lint.md".to_string()),
5754 );
5755
5756 // Create dummy command files to allow transitive dependency extraction
5757 let commands_dir = temp_dir.path().parent().unwrap().join("commands");
5758 std::fs::create_dir_all(&commands_dir).unwrap();
5759 std::fs::write(commands_dir.join("deploy.md"), "# Deploy").unwrap();
5760 std::fs::write(commands_dir.join("lint.md"), "# Lint").unwrap();
5761
5762 let cache = Cache::with_dir(temp_dir.path().to_path_buf()).unwrap();
5763 let mut resolver = DependencyResolver::with_cache(manifest, cache);
5764
5765 let lockfile = resolver.resolve().await.unwrap();
5766 assert_eq!(lockfile.commands.len(), 2);
5767
5768 // Check that commands are installed to the correct location
5769 for command in &lockfile.commands {
5770 // Normalize path separators for cross-platform testing
5771 let normalized_path = normalize_path_for_storage(&command.installed_at);
5772 assert!(normalized_path.contains(".claude/commands/"));
5773 assert!(command.installed_at.ends_with(".md"));
5774 }
5775 }
5776
5777 #[tokio::test]
5778 async fn test_checkout_version_with_constraint() {
5779 let temp_dir = TempDir::new().unwrap();
5780
5781 // Create a git repo with multiple version tags
5782 let source_dir = temp_dir.path().join("test-source");
5783 std::fs::create_dir_all(&source_dir).unwrap();
5784 std::process::Command::new("git")
5785 .args(["init"])
5786 .current_dir(&source_dir)
5787 .output()
5788 .expect("Failed to initialize git repository");
5789
5790 // Configure git
5791 std::process::Command::new("git")
5792 .args(["config", "user.email", "test@example.com"])
5793 .current_dir(&source_dir)
5794 .output()
5795 .unwrap();
5796 std::process::Command::new("git")
5797 .args(["config", "user.name", "Test User"])
5798 .current_dir(&source_dir)
5799 .output()
5800 .unwrap();
5801
5802 // Create file and make commits with version tags
5803 let test_file = source_dir.join("test.txt");
5804
5805 // v1.0.0
5806 std::fs::write(&test_file, "v1.0.0").unwrap();
5807 std::process::Command::new("git")
5808 .args(["add", "."])
5809 .current_dir(&source_dir)
5810 .output()
5811 .unwrap();
5812 std::process::Command::new("git")
5813 .args(["commit", "-m", "v1.0.0"])
5814 .current_dir(&source_dir)
5815 .output()
5816 .unwrap();
5817 std::process::Command::new("git")
5818 .args(["tag", "v1.0.0"])
5819 .current_dir(&source_dir)
5820 .output()
5821 .unwrap();
5822
5823 // v1.1.0
5824 std::fs::write(&test_file, "v1.1.0").unwrap();
5825 std::process::Command::new("git")
5826 .args(["add", "."])
5827 .current_dir(&source_dir)
5828 .output()
5829 .unwrap();
5830 std::process::Command::new("git")
5831 .args(["commit", "-m", "v1.1.0"])
5832 .current_dir(&source_dir)
5833 .output()
5834 .unwrap();
5835 std::process::Command::new("git")
5836 .args(["tag", "v1.1.0"])
5837 .current_dir(&source_dir)
5838 .output()
5839 .unwrap();
5840
5841 // v1.2.0
5842 std::fs::write(&test_file, "v1.2.0").unwrap();
5843 std::process::Command::new("git")
5844 .args(["add", "."])
5845 .current_dir(&source_dir)
5846 .output()
5847 .unwrap();
5848 std::process::Command::new("git")
5849 .args(["commit", "-m", "v1.2.0"])
5850 .current_dir(&source_dir)
5851 .output()
5852 .unwrap();
5853 std::process::Command::new("git")
5854 .args(["tag", "v1.2.0"])
5855 .current_dir(&source_dir)
5856 .output()
5857 .unwrap();
5858
5859 // v2.0.0
5860 std::fs::write(&test_file, "v2.0.0").unwrap();
5861 std::process::Command::new("git")
5862 .args(["add", "."])
5863 .current_dir(&source_dir)
5864 .output()
5865 .unwrap();
5866 std::process::Command::new("git")
5867 .args(["commit", "-m", "v2.0.0"])
5868 .current_dir(&source_dir)
5869 .output()
5870 .unwrap();
5871 std::process::Command::new("git")
5872 .args(["tag", "v2.0.0"])
5873 .current_dir(&source_dir)
5874 .output()
5875 .unwrap();
5876
5877 let mut manifest = Manifest::new();
5878 // Use file:// URL to ensure it's treated as a Git source, not a local path
5879 let source_url = format!("file://{}", source_dir.display());
5880 manifest.add_source("test".to_string(), source_url);
5881
5882 // Test version constraint resolution (^1.0.0 should resolve to 1.2.0)
5883 manifest.add_dependency(
5884 "constrained-dep".to_string(),
5885 ResourceDependency::Detailed(Box::new(crate::manifest::DetailedDependency {
5886 source: Some("test".to_string()),
5887 path: "test.txt".to_string(),
5888 version: Some("^1.0.0".to_string()), // Constraint: compatible with 1.x.x
5889 branch: None,
5890 rev: None,
5891 command: None,
5892 args: None,
5893 target: None,
5894 filename: None,
5895 dependencies: None,
5896 tool: Some("claude-code".to_string()),
5897 flatten: None,
5898 install: None,
5899 })),
5900 true,
5901 );
5902
5903 let cache_dir = temp_dir.path().join("cache");
5904 let cache = Cache::with_dir(cache_dir).unwrap();
5905 let mut resolver = DependencyResolver::with_cache(manifest, cache);
5906
5907 let lockfile = resolver.resolve().await.unwrap();
5908 assert_eq!(lockfile.agents.len(), 1);
5909
5910 let agent = &lockfile.agents[0];
5911 // Should resolve to highest 1.x version (1.2.0), not 2.0.0
5912 assert_eq!(agent.version.as_ref().unwrap(), "v1.2.0");
5913 }
5914
5915 #[tokio::test]
5916 async fn test_verify_absolute_path_error() {
5917 let mut manifest = Manifest::new();
5918
5919 // Add dependency with non-existent absolute path
5920 // Use platform-specific absolute path
5921 let nonexistent_path = if cfg!(windows) {
5922 "C:\\nonexistent\\path\\agent.md"
5923 } else {
5924 "/nonexistent/path/agent.md"
5925 };
5926
5927 manifest.add_dependency(
5928 "missing-agent".to_string(),
5929 ResourceDependency::Simple(nonexistent_path.to_string()),
5930 true,
5931 );
5932
5933 let temp_dir = TempDir::new().unwrap();
5934 let cache = Cache::with_dir(temp_dir.path().to_path_buf()).unwrap();
5935 let mut resolver = DependencyResolver::with_cache(manifest, cache);
5936
5937 let result = resolver.verify();
5938 assert!(result.is_err());
5939 assert!(result.unwrap_err().to_string().contains("not found"));
5940 }
5941
5942 #[tokio::test]
5943 async fn test_resolve_pattern_dependency_error() {
5944 let mut manifest = Manifest::new();
5945
5946 // Add pattern dependency without source (should error in resolve_pattern_dependency)
5947 manifest.add_dependency(
5948 "pattern-dep".to_string(),
5949 ResourceDependency::Detailed(Box::new(crate::manifest::DetailedDependency {
5950 source: Some("nonexistent".to_string()),
5951 path: "agents/*.md".to_string(), // Pattern path
5952 version: None,
5953 branch: None,
5954 rev: None,
5955 command: None,
5956 args: None,
5957 target: None,
5958 filename: None,
5959 dependencies: None,
5960 tool: Some("claude-code".to_string()),
5961 flatten: None,
5962 install: None,
5963 })),
5964 true,
5965 );
5966
5967 let temp_dir = TempDir::new().unwrap();
5968 let cache = Cache::with_dir(temp_dir.path().to_path_buf()).unwrap();
5969 let mut resolver = DependencyResolver::with_cache(manifest, cache);
5970
5971 let result = resolver.resolve().await;
5972 assert!(result.is_err());
5973 }
5974
5975 #[tokio::test]
5976 async fn test_checkout_version_with_branch() {
5977 let temp_dir = TempDir::new().unwrap();
5978
5979 // Create a git repo with a branch
5980 let source_dir = temp_dir.path().join("test-source");
5981 std::fs::create_dir_all(&source_dir).unwrap();
5982 std::process::Command::new("git")
5983 .args(["init"])
5984 .current_dir(&source_dir)
5985 .output()
5986 .expect("Failed to initialize git repository");
5987
5988 // Configure git
5989 std::process::Command::new("git")
5990 .args(["config", "user.email", "test@example.com"])
5991 .current_dir(&source_dir)
5992 .output()
5993 .unwrap();
5994 std::process::Command::new("git")
5995 .args(["config", "user.name", "Test User"])
5996 .current_dir(&source_dir)
5997 .output()
5998 .unwrap();
5999
6000 // Create initial commit on main
6001 let test_file = source_dir.join("test.txt");
6002 std::fs::write(&test_file, "main").unwrap();
6003 std::process::Command::new("git")
6004 .args(["add", "."])
6005 .current_dir(&source_dir)
6006 .output()
6007 .unwrap();
6008 std::process::Command::new("git")
6009 .args(["commit", "-m", "Initial"])
6010 .current_dir(&source_dir)
6011 .output()
6012 .unwrap();
6013
6014 // Create and switch to develop branch
6015 std::process::Command::new("git")
6016 .args(["checkout", "-b", "develop"])
6017 .current_dir(&source_dir)
6018 .output()
6019 .unwrap();
6020
6021 // Make a commit on develop
6022 std::fs::write(&test_file, "develop").unwrap();
6023 std::process::Command::new("git")
6024 .args(["add", "."])
6025 .current_dir(&source_dir)
6026 .output()
6027 .unwrap();
6028 std::process::Command::new("git")
6029 .args(["commit", "-m", "Develop commit"])
6030 .current_dir(&source_dir)
6031 .output()
6032 .unwrap();
6033
6034 let mut manifest = Manifest::new();
6035 // Use file:// URL to ensure it's treated as a Git source, not a local path
6036 let source_url = format!("file://{}", source_dir.display());
6037 manifest.add_source("test".to_string(), source_url);
6038
6039 // Test branch checkout
6040 manifest.add_dependency(
6041 "branch-dep".to_string(),
6042 ResourceDependency::Detailed(Box::new(crate::manifest::DetailedDependency {
6043 source: Some("test".to_string()),
6044 path: "test.txt".to_string(),
6045 version: Some("develop".to_string()), // Branch name
6046 branch: None,
6047 rev: None,
6048 command: None,
6049 args: None,
6050 target: None,
6051 filename: None,
6052 dependencies: None,
6053 tool: Some("claude-code".to_string()),
6054 flatten: None,
6055 install: None,
6056 })),
6057 true,
6058 );
6059
6060 let cache_dir = temp_dir.path().join("cache");
6061 let cache = Cache::with_dir(cache_dir).unwrap();
6062 let mut resolver = DependencyResolver::with_cache(manifest, cache);
6063
6064 let lockfile = resolver.resolve().await.unwrap();
6065 assert_eq!(lockfile.agents.len(), 1);
6066
6067 // Should have resolved to develop branch
6068 let agent = &lockfile.agents[0];
6069 assert!(agent.resolved_commit.is_some());
6070 }
6071
6072 #[tokio::test]
6073 async fn test_checkout_version_with_commit_hash() {
6074 let temp_dir = TempDir::new().unwrap();
6075
6076 // Create a git repo
6077 let source_dir = temp_dir.path().join("test-source");
6078 std::fs::create_dir_all(&source_dir).unwrap();
6079 std::process::Command::new("git")
6080 .args(["init"])
6081 .current_dir(&source_dir)
6082 .output()
6083 .expect("Failed to initialize git repository");
6084
6085 // Configure git
6086 std::process::Command::new("git")
6087 .args(["config", "user.email", "test@example.com"])
6088 .current_dir(&source_dir)
6089 .output()
6090 .unwrap();
6091 std::process::Command::new("git")
6092 .args(["config", "user.name", "Test User"])
6093 .current_dir(&source_dir)
6094 .output()
6095 .unwrap();
6096
6097 // Create a commit
6098 let test_file = source_dir.join("test.txt");
6099 std::fs::write(&test_file, "content").unwrap();
6100 std::process::Command::new("git")
6101 .args(["add", "."])
6102 .current_dir(&source_dir)
6103 .output()
6104 .unwrap();
6105 std::process::Command::new("git")
6106 .args(["commit", "-m", "Test commit"])
6107 .current_dir(&source_dir)
6108 .output()
6109 .unwrap();
6110
6111 // Get the commit hash
6112 let output = std::process::Command::new("git")
6113 .args(["rev-parse", "HEAD"])
6114 .current_dir(&source_dir)
6115 .output()
6116 .unwrap();
6117 let commit_hash = String::from_utf8_lossy(&output.stdout).trim().to_string();
6118
6119 let mut manifest = Manifest::new();
6120 // Use file:// URL to ensure it's treated as a Git source, not a local path
6121 let source_url = format!("file://{}", source_dir.display());
6122 manifest.add_source("test".to_string(), source_url);
6123
6124 // Test commit hash checkout (use first 7 chars for short hash)
6125 manifest.add_dependency(
6126 "commit-dep".to_string(),
6127 ResourceDependency::Detailed(Box::new(crate::manifest::DetailedDependency {
6128 source: Some("test".to_string()),
6129 path: "test.txt".to_string(),
6130 version: Some(commit_hash[..7].to_string()), // Short commit hash
6131 branch: None,
6132 rev: None,
6133 command: None,
6134 args: None,
6135 target: None,
6136 filename: None,
6137 dependencies: None,
6138 tool: Some("claude-code".to_string()),
6139 flatten: None,
6140 install: None,
6141 })),
6142 true,
6143 );
6144
6145 let cache_dir = temp_dir.path().join("cache");
6146 let cache = Cache::with_dir(cache_dir).unwrap();
6147 let mut resolver = DependencyResolver::with_cache(manifest, cache);
6148
6149 let lockfile = resolver.resolve().await.unwrap();
6150 assert_eq!(lockfile.agents.len(), 1);
6151
6152 let agent = &lockfile.agents[0];
6153 assert!(agent.resolved_commit.is_some());
6154 // The resolved commit should start with our short hash
6155 assert!(agent.resolved_commit.as_ref().unwrap().starts_with(&commit_hash[..7]));
6156 }
6157
6158 #[tokio::test]
6159 async fn test_mixed_resource_types() {
6160 let temp_dir = TempDir::new().unwrap();
6161 let mut manifest = Manifest::new();
6162 manifest.manifest_dir = Some(temp_dir.path().to_path_buf());
6163
6164 // Add various resource types
6165 manifest.add_dependency(
6166 "agent1".to_string(),
6167 ResourceDependency::Simple("../agents/a1.md".to_string()),
6168 true,
6169 );
6170
6171 manifest.scripts.insert(
6172 "build".to_string(),
6173 ResourceDependency::Simple("../scripts/build.sh".to_string()),
6174 );
6175
6176 manifest.hooks.insert(
6177 "pre-commit".to_string(),
6178 ResourceDependency::Simple("../hooks/pre-commit.json".to_string()),
6179 );
6180
6181 manifest.commands.insert(
6182 "deploy".to_string(),
6183 ResourceDependency::Simple("../commands/deploy.md".to_string()),
6184 );
6185
6186 manifest.mcp_servers.insert(
6187 "filesystem".to_string(),
6188 ResourceDependency::Simple("../mcp/filesystem.json".to_string()),
6189 );
6190
6191 // Create dummy files for all resource types to allow transitive dependency extraction
6192 let parent = temp_dir.path().parent().unwrap();
6193 std::fs::create_dir_all(parent.join("agents")).unwrap();
6194 std::fs::create_dir_all(parent.join("scripts")).unwrap();
6195 std::fs::create_dir_all(parent.join("hooks")).unwrap();
6196 std::fs::create_dir_all(parent.join("commands")).unwrap();
6197 std::fs::create_dir_all(parent.join("mcp")).unwrap();
6198 std::fs::write(parent.join("agents/a1.md"), "# Agent").unwrap();
6199 std::fs::write(parent.join("scripts/build.sh"), "#!/bin/bash").unwrap();
6200 std::fs::write(parent.join("hooks/pre-commit.json"), "{}").unwrap();
6201 std::fs::write(parent.join("commands/deploy.md"), "# Deploy").unwrap();
6202 std::fs::write(parent.join("mcp/filesystem.json"), "{}").unwrap();
6203
6204 let cache = Cache::with_dir(temp_dir.path().to_path_buf()).unwrap();
6205 let mut resolver = DependencyResolver::with_cache(manifest, cache);
6206
6207 let lockfile = resolver.resolve().await.unwrap();
6208
6209 // Check all resource types are resolved
6210 assert_eq!(lockfile.agents.len(), 1);
6211 assert_eq!(lockfile.scripts.len(), 1);
6212 assert_eq!(lockfile.hooks.len(), 1);
6213 assert_eq!(lockfile.commands.len(), 1);
6214 assert_eq!(lockfile.mcp_servers.len(), 1);
6215 }
6216
6217 #[tokio::test]
6218 async fn test_resolve_version_conflict_semver_preference() {
6219 let manifest = Manifest::new();
6220 let temp_dir = TempDir::new().unwrap();
6221 let cache = Cache::with_dir(temp_dir.path().to_path_buf()).unwrap();
6222 let resolver = DependencyResolver::with_cache(manifest, cache);
6223
6224 // Test: semver version preferred over git branch
6225 let existing_semver = ResourceDependency::Detailed(Box::new(DetailedDependency {
6226 source: Some("test".to_string()),
6227 path: "agents/test.md".to_string(),
6228 version: Some("v1.0.0".to_string()),
6229 branch: None,
6230 rev: None,
6231 command: None,
6232 args: None,
6233 target: None,
6234 filename: None,
6235 dependencies: None,
6236 tool: Some("claude-code".to_string()),
6237 flatten: None,
6238 install: None,
6239 }));
6240
6241 let new_branch = ResourceDependency::Detailed(Box::new(DetailedDependency {
6242 source: Some("test".to_string()),
6243 path: "agents/test.md".to_string(),
6244 version: None,
6245 branch: Some("main".to_string()),
6246 rev: None,
6247 command: None,
6248 args: None,
6249 target: None,
6250 filename: None,
6251 dependencies: None,
6252 tool: Some("claude-code".to_string()),
6253 flatten: None,
6254 install: None,
6255 }));
6256
6257 let result = resolver
6258 .resolve_version_conflict("test-agent", &existing_semver, &new_branch, "app1")
6259 .await;
6260 assert!(result.is_ok(), "Should succeed with semver preference");
6261 let resolved = result.unwrap();
6262 assert_eq!(
6263 resolved.get_version(),
6264 Some("v1.0.0"),
6265 "Should prefer semver version over branch"
6266 );
6267
6268 // Test reverse: git branch vs semver version
6269 let result2 = resolver
6270 .resolve_version_conflict("test-agent", &new_branch, &existing_semver, "app2")
6271 .await;
6272 assert!(result2.is_ok(), "Should succeed with semver preference");
6273 let resolved2 = result2.unwrap();
6274 assert_eq!(
6275 resolved2.get_version(),
6276 Some("v1.0.0"),
6277 "Should prefer semver version over branch (reversed order)"
6278 );
6279 }
6280
6281 #[tokio::test]
6282 async fn test_resolve_version_conflict_semver_comparison() {
6283 let manifest = Manifest::new();
6284 let temp_dir = TempDir::new().unwrap();
6285 let cache = Cache::with_dir(temp_dir.path().to_path_buf()).unwrap();
6286 let resolver = DependencyResolver::with_cache(manifest, cache);
6287
6288 // Test: higher semver version wins
6289 let existing_v1 = ResourceDependency::Detailed(Box::new(DetailedDependency {
6290 source: Some("test".to_string()),
6291 path: "agents/test.md".to_string(),
6292 version: Some("v1.5.0".to_string()),
6293 branch: None,
6294 rev: None,
6295 command: None,
6296 args: None,
6297 target: None,
6298 filename: None,
6299 dependencies: None,
6300 tool: Some("claude-code".to_string()),
6301 flatten: None,
6302 install: None,
6303 }));
6304
6305 let new_v2 = ResourceDependency::Detailed(Box::new(DetailedDependency {
6306 source: Some("test".to_string()),
6307 path: "agents/test.md".to_string(),
6308 version: Some("v2.0.0".to_string()),
6309 branch: None,
6310 rev: None,
6311 command: None,
6312 args: None,
6313 target: None,
6314 filename: None,
6315 dependencies: None,
6316 tool: Some("claude-code".to_string()),
6317 flatten: None,
6318 install: None,
6319 }));
6320
6321 let result =
6322 resolver.resolve_version_conflict("test-agent", &existing_v1, &new_v2, "app1").await;
6323 assert!(result.is_ok(), "Should succeed with higher version");
6324 let resolved = result.unwrap();
6325 assert_eq!(resolved.get_version(), Some("v2.0.0"), "Should use higher semver version");
6326
6327 // Test reverse order
6328 let result2 =
6329 resolver.resolve_version_conflict("test-agent", &new_v2, &existing_v1, "app2").await;
6330 assert!(result2.is_ok(), "Should succeed with higher version");
6331 let resolved2 = result2.unwrap();
6332 assert_eq!(
6333 resolved2.get_version(),
6334 Some("v2.0.0"),
6335 "Should use higher semver version (reversed order)"
6336 );
6337 }
6338
6339 #[tokio::test]
6340 async fn test_resolve_version_conflict_git_refs() {
6341 let manifest = Manifest::new();
6342 let temp_dir = TempDir::new().unwrap();
6343 let cache = Cache::with_dir(temp_dir.path().to_path_buf()).unwrap();
6344 let resolver = DependencyResolver::with_cache(manifest, cache);
6345
6346 // Test: git refs use alphabetical ordering
6347 let existing_main = ResourceDependency::Detailed(Box::new(DetailedDependency {
6348 source: Some("test".to_string()),
6349 path: "agents/test.md".to_string(),
6350 version: None,
6351 branch: Some("main".to_string()),
6352 rev: None,
6353 command: None,
6354 args: None,
6355 target: None,
6356 filename: None,
6357 dependencies: None,
6358 tool: Some("claude-code".to_string()),
6359 flatten: None,
6360 install: None,
6361 }));
6362
6363 let new_develop = ResourceDependency::Detailed(Box::new(DetailedDependency {
6364 source: Some("test".to_string()),
6365 path: "agents/test.md".to_string(),
6366 version: None,
6367 branch: Some("develop".to_string()),
6368 rev: None,
6369 command: None,
6370 args: None,
6371 target: None,
6372 filename: None,
6373 dependencies: None,
6374 tool: Some("claude-code".to_string()),
6375 flatten: None,
6376 install: None,
6377 }));
6378
6379 let result = resolver
6380 .resolve_version_conflict("test-agent", &existing_main, &new_develop, "app1")
6381 .await;
6382 assert!(result.is_ok(), "Should succeed with alphabetical ordering");
6383 let resolved = result.unwrap();
6384 assert_eq!(
6385 resolved.get_version(),
6386 Some("develop"),
6387 "Should use alphabetically first git ref"
6388 );
6389 }
6390
6391 #[tokio::test]
6392 async fn test_resolve_version_conflict_head_vs_specific() {
6393 let manifest = Manifest::new();
6394 let temp_dir = TempDir::new().unwrap();
6395 let cache = Cache::with_dir(temp_dir.path().to_path_buf()).unwrap();
6396 let resolver = DependencyResolver::with_cache(manifest, cache);
6397
6398 // Test: specific version preferred over HEAD (None)
6399 let existing_head = ResourceDependency::Simple("agents/test.md".to_string());
6400
6401 let new_specific = ResourceDependency::Detailed(Box::new(DetailedDependency {
6402 source: Some("test".to_string()),
6403 path: "agents/test.md".to_string(),
6404 version: Some("v1.0.0".to_string()),
6405 branch: None,
6406 rev: None,
6407 command: None,
6408 args: None,
6409 target: None,
6410 filename: None,
6411 dependencies: None,
6412 tool: Some("claude-code".to_string()),
6413 flatten: None,
6414 install: None,
6415 }));
6416
6417 let result = resolver
6418 .resolve_version_conflict("test-agent", &existing_head, &new_specific, "app1")
6419 .await;
6420 assert!(result.is_ok(), "Should succeed with specific version");
6421 let resolved = result.unwrap();
6422 assert_eq!(
6423 resolved.get_version(),
6424 Some("v1.0.0"),
6425 "Should prefer specific version over HEAD"
6426 );
6427 }
6428
6429 #[test]
6430 fn test_generate_dependency_name_manifest_relative() {
6431 let manifest = Manifest::new();
6432 let temp_dir = TempDir::new().unwrap();
6433 let cache = Cache::with_dir(temp_dir.path().to_path_buf()).unwrap();
6434 let resolver = DependencyResolver::with_cache(manifest, cache);
6435
6436 // Regular relative paths - strip resource type directory
6437 assert_eq!(resolver.generate_dependency_name("agents/helper.md"), "helper");
6438 assert_eq!(resolver.generate_dependency_name("snippets/utils.md"), "utils");
6439
6440 // Cross-directory paths with ../ - keep all components to avoid collisions
6441 assert_eq!(resolver.generate_dependency_name("../shared/utils.md"), "../shared/utils");
6442 assert_eq!(resolver.generate_dependency_name("../other/utils.md"), "../other/utils");
6443
6444 // Ensure different cross-directory paths get different names
6445 let name1 = resolver.generate_dependency_name("../shared/foo.md");
6446 let name2 = resolver.generate_dependency_name("../other/foo.md");
6447 assert_ne!(name1, name2, "Different parent directories should produce different names");
6448 }
6449}