Skip to main content

harn_cli/package/
registry.rs

1use super::errors::PackageError;
2use super::*;
3
4#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)]
5pub(crate) struct PackageCacheMetadata {
6    version: u32,
7    source: String,
8    commit: String,
9    content_hash: String,
10    cached_at_unix_ms: u128,
11}
12
13#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)]
14pub(crate) struct PackageRegistryIndex {
15    version: u32,
16    #[serde(default, rename = "package")]
17    packages: Vec<RegistryPackage>,
18}
19
20#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)]
21pub(crate) struct RegistryPackage {
22    name: String,
23    #[serde(default)]
24    description: Option<String>,
25    repository: String,
26    #[serde(default)]
27    license: Option<String>,
28    #[serde(default, alias = "harn_version", alias = "harn_version_range")]
29    harn: Option<String>,
30    #[serde(default)]
31    exports: Vec<String>,
32    #[serde(default, alias = "connector-contract")]
33    connector_contract: Option<String>,
34    #[serde(default)]
35    docs_url: Option<String>,
36    #[serde(default)]
37    checksum: Option<String>,
38    #[serde(default)]
39    provenance: Option<String>,
40    #[serde(default, rename = "version")]
41    versions: Vec<RegistryPackageVersion>,
42}
43
44#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)]
45pub(crate) struct RegistryPackageVersion {
46    version: String,
47    git: String,
48    #[serde(default)]
49    rev: Option<String>,
50    #[serde(default)]
51    branch: Option<String>,
52    #[serde(default)]
53    package: Option<String>,
54    #[serde(default)]
55    checksum: Option<String>,
56    #[serde(default)]
57    provenance: Option<String>,
58    #[serde(default)]
59    yanked: bool,
60}
61
62#[derive(Debug, Clone, PartialEq, Eq, Serialize)]
63pub(crate) struct RegistryPackageInfo {
64    package: RegistryPackage,
65    selected_version: Option<RegistryPackageVersion>,
66}
67
68pub(crate) fn manifest_has_git_dependencies(manifest: &Manifest) -> bool {
69    manifest
70        .dependencies
71        .values()
72        .any(|dependency| dependency.git_url().is_some())
73}
74
75pub(crate) fn ensure_git_available() -> Result<(), PackageError> {
76    process::Command::new("git")
77        .arg("--version")
78        .env_remove("GIT_DIR")
79        .env_remove("GIT_WORK_TREE")
80        .env_remove("GIT_INDEX_FILE")
81        .output()
82        .map(|_| ())
83        .map_err(|_| {
84            PackageError::Registry(
85                "git is required for git dependencies but was not found in PATH".to_string(),
86            )
87        })
88}
89
90pub(crate) fn cache_root() -> Result<PathBuf, PackageError> {
91    PackageWorkspace::from_current_dir()?.cache_root()
92}
93
94pub(crate) fn sha256_hex(bytes: impl AsRef<[u8]>) -> String {
95    hex_bytes(Sha256::digest(bytes.as_ref()))
96}
97
98pub(crate) fn hex_bytes(bytes: impl AsRef<[u8]>) -> String {
99    const HEX: &[u8; 16] = b"0123456789abcdef";
100    let bytes = bytes.as_ref();
101    let mut out = String::with_capacity(bytes.len() * 2);
102    for &byte in bytes {
103        out.push(HEX[(byte >> 4) as usize] as char);
104        out.push(HEX[(byte & 0x0f) as usize] as char);
105    }
106    out
107}
108
109pub(crate) fn git_cache_dir_in(
110    workspace: &PackageWorkspace,
111    source: &str,
112    commit: &str,
113) -> Result<PathBuf, PackageError> {
114    Ok(workspace
115        .cache_root()?
116        .join("git")
117        .join(sha256_hex(source))
118        .join(commit))
119}
120
121pub(crate) fn git_cache_lock_path_in(
122    workspace: &PackageWorkspace,
123    source: &str,
124    commit: &str,
125) -> Result<PathBuf, PackageError> {
126    Ok(workspace
127        .cache_root()?
128        .join("locks")
129        .join(format!("{}-{commit}.lock", sha256_hex(source))))
130}
131
132pub(crate) fn acquire_git_cache_lock_in(
133    workspace: &PackageWorkspace,
134    source: &str,
135    commit: &str,
136) -> Result<File, PackageError> {
137    let path = git_cache_lock_path_in(workspace, source, commit)?;
138    if let Some(parent) = path.parent() {
139        fs::create_dir_all(parent)
140            .map_err(|error| format!("failed to create {}: {error}", parent.display()))?;
141    }
142    let file = File::create(&path)
143        .map_err(|error| format!("failed to open {}: {error}", path.display()))?;
144    file.lock_exclusive()
145        .map_err(|error| format!("failed to lock {}: {error}", path.display()))?;
146    Ok(file)
147}
148
149pub(crate) fn read_cached_content_hash(dir: &Path) -> Result<Option<String>, PackageError> {
150    let path = dir.join(CONTENT_HASH_FILE);
151    match fs::read_to_string(&path) {
152        Ok(value) => Ok(Some(value.trim().to_string())),
153        Err(error) if error.kind() == std::io::ErrorKind::NotFound => Ok(None),
154        Err(error) => Err(format!("failed to read {}: {error}", path.display()).into()),
155    }
156}
157
158pub(crate) fn write_cached_content_hash(dir: &Path, hash: &str) -> Result<(), PackageError> {
159    let path = dir.join(CONTENT_HASH_FILE);
160    harn_vm::atomic_io::atomic_write(&path, format!("{hash}\n").as_bytes()).map_err(|error| {
161        PackageError::Registry(format!("failed to write {}: {error}", path.display()))
162    })
163}
164
165pub(crate) fn read_cache_metadata(
166    dir: &Path,
167) -> Result<Option<PackageCacheMetadata>, PackageError> {
168    let path = dir.join(CACHE_METADATA_FILE);
169    let content = match fs::read_to_string(&path) {
170        Ok(content) => content,
171        Err(error) if error.kind() == std::io::ErrorKind::NotFound => return Ok(None),
172        Err(error) => return Err(format!("failed to read {}: {error}", path.display()).into()),
173    };
174    let metadata = toml::from_str::<PackageCacheMetadata>(&content)
175        .map_err(|error| format!("failed to parse {}: {error}", path.display()))?;
176    if metadata.version != CACHE_METADATA_VERSION {
177        return Err(format!(
178            "unsupported {} version {} (expected {})",
179            path.display(),
180            metadata.version,
181            CACHE_METADATA_VERSION
182        )
183        .into());
184    }
185    Ok(Some(metadata))
186}
187
188pub(crate) fn write_cache_metadata(
189    dir: &Path,
190    source: &str,
191    commit: &str,
192    content_hash: &str,
193) -> Result<(), PackageError> {
194    let cached_at_unix_ms = SystemTime::now()
195        .duration_since(UNIX_EPOCH)
196        .map_err(|error| format!("system clock error: {error}"))?
197        .as_millis();
198    let metadata = PackageCacheMetadata {
199        version: CACHE_METADATA_VERSION,
200        source: source.to_string(),
201        commit: commit.to_string(),
202        content_hash: content_hash.to_string(),
203        cached_at_unix_ms,
204    };
205    let body = toml::to_string_pretty(&metadata)
206        .map_err(|error| format!("failed to encode cache metadata: {error}"))?;
207    let path = dir.join(CACHE_METADATA_FILE);
208    harn_vm::atomic_io::atomic_write(&path, body.as_bytes()).map_err(|error| {
209        PackageError::Registry(format!("failed to write {}: {error}", path.display()))
210    })
211}
212
213pub(crate) fn normalized_relative_path(path: &Path) -> String {
214    path.components()
215        .map(|component| component.as_os_str().to_string_lossy())
216        .collect::<Vec<_>>()
217        .join("/")
218}
219
220pub(crate) fn collect_hashable_files(
221    root: &Path,
222    cursor: &Path,
223    out: &mut Vec<PathBuf>,
224) -> Result<(), PackageError> {
225    for entry in fs::read_dir(cursor)
226        .map_err(|error| format!("failed to read {}: {error}", cursor.display()))?
227    {
228        let entry =
229            entry.map_err(|error| format!("failed to read {} entry: {error}", cursor.display()))?;
230        let path = entry.path();
231        let file_type = entry
232            .file_type()
233            .map_err(|error| format!("failed to stat {}: {error}", path.display()))?;
234        let name = entry.file_name();
235        if name == OsStr::new(".git")
236            || name == OsStr::new(".gitignore")
237            || name == OsStr::new(CONTENT_HASH_FILE)
238            || name == OsStr::new(CACHE_METADATA_FILE)
239        {
240            continue;
241        }
242        if file_type.is_dir() {
243            collect_hashable_files(root, &path, out)?;
244        } else if file_type.is_file() {
245            let relative = path
246                .strip_prefix(root)
247                .map_err(|error| format!("failed to relativize {}: {error}", path.display()))?;
248            out.push(relative.to_path_buf());
249        }
250    }
251    Ok(())
252}
253
254pub(crate) fn compute_content_hash(dir: &Path) -> Result<String, PackageError> {
255    let mut files = Vec::new();
256    collect_hashable_files(dir, dir, &mut files)?;
257    files.sort();
258    let mut hasher = Sha256::new();
259    for relative in files {
260        let normalized = normalized_relative_path(&relative);
261        let contents = fs::read(dir.join(&relative)).map_err(|error| {
262            format!("failed to read {}: {error}", dir.join(&relative).display())
263        })?;
264        hasher.update(normalized.as_bytes());
265        hasher.update([0]);
266        hasher.update(sha256_hex(contents).as_bytes());
267    }
268    Ok(format!("sha256:{}", hex_bytes(hasher.finalize())))
269}
270
271pub(crate) fn verify_content_hash_or_compute(
272    dir: &Path,
273    expected: &str,
274) -> Result<(), PackageError> {
275    let actual = compute_content_hash(dir)?;
276    if actual != expected {
277        return Err(format!(
278            "content hash mismatch for {}: expected {}, got {}",
279            dir.display(),
280            expected,
281            actual
282        )
283        .into());
284    }
285    if read_cached_content_hash(dir)?.as_deref() != Some(expected) {
286        write_cached_content_hash(dir, expected)?;
287    }
288    Ok(())
289}
290
291pub(crate) fn copy_dir_recursive(src: &Path, dst: &Path) -> Result<(), PackageError> {
292    fs::create_dir_all(dst)
293        .map_err(|error| format!("failed to create {}: {error}", dst.display()))?;
294    for entry in
295        fs::read_dir(src).map_err(|error| format!("failed to read {}: {error}", src.display()))?
296    {
297        let entry =
298            entry.map_err(|error| format!("failed to read {} entry: {error}", src.display()))?;
299        let ty = entry
300            .file_type()
301            .map_err(|error| format!("failed to stat {}: {error}", entry.path().display()))?;
302        let name = entry.file_name();
303        if name == OsStr::new(".git")
304            || name == OsStr::new(CONTENT_HASH_FILE)
305            || name == OsStr::new(CACHE_METADATA_FILE)
306        {
307            continue;
308        }
309        let dest_path = dst.join(entry.file_name());
310        if ty.is_dir() {
311            copy_dir_recursive(&entry.path(), &dest_path)?;
312        } else if ty.is_file() {
313            if let Some(parent) = dest_path.parent() {
314                fs::create_dir_all(parent)
315                    .map_err(|error| format!("failed to create {}: {error}", parent.display()))?;
316            }
317            fs::copy(entry.path(), &dest_path).map_err(|error| {
318                format!(
319                    "failed to copy {} to {}: {error}",
320                    entry.path().display(),
321                    dest_path.display()
322                )
323            })?;
324        }
325    }
326    Ok(())
327}
328
329pub(crate) fn remove_materialized_package(
330    packages_dir: &Path,
331    alias: &str,
332) -> Result<(), PackageError> {
333    remove_materialized_path(&packages_dir.join(alias))?;
334    remove_materialized_path(&packages_dir.join(format!("{alias}.harn")))?;
335    Ok(())
336}
337
338fn remove_materialized_path(path: &Path) -> Result<(), PackageError> {
339    match fs::symlink_metadata(path) {
340        Ok(metadata) if is_link_like(&metadata) => remove_link_like_path(path)
341            .map_err(|error| format!("failed to remove {}: {error}", path.display()).into()),
342        Ok(metadata) if metadata.is_file() => fs::remove_file(path)
343            .map_err(|error| format!("failed to remove {}: {error}", path.display()).into()),
344        Ok(metadata) if metadata.is_dir() => fs::remove_dir_all(path)
345            .map_err(|error| format!("failed to remove {}: {error}", path.display()).into()),
346        Ok(_) => Ok(()),
347        Err(error) if error.kind() == std::io::ErrorKind::NotFound => Ok(()),
348        Err(error) => Err(format!("failed to stat {}: {error}", path.display()).into()),
349    }
350}
351
352fn is_link_like(metadata: &fs::Metadata) -> bool {
353    metadata.file_type().is_symlink() || is_windows_reparse_point(metadata)
354}
355
356#[cfg(windows)]
357fn is_windows_reparse_point(metadata: &fs::Metadata) -> bool {
358    use std::os::windows::fs::MetadataExt;
359
360    const FILE_ATTRIBUTE_REPARSE_POINT: u32 = 0x400;
361    metadata.file_attributes() & FILE_ATTRIBUTE_REPARSE_POINT != 0
362}
363
364#[cfg(not(windows))]
365fn is_windows_reparse_point(_metadata: &fs::Metadata) -> bool {
366    false
367}
368
369fn remove_link_like_path(path: &Path) -> std::io::Result<()> {
370    match fs::remove_file(path) {
371        Ok(()) => Ok(()),
372        Err(file_error) => match fs::remove_dir(path) {
373            Ok(()) => Ok(()),
374            Err(_) => Err(file_error),
375        },
376    }
377}
378
379#[cfg(unix)]
380pub(crate) fn symlink_path_dependency(source: &Path, dest: &Path) -> Result<(), PackageError> {
381    std::os::unix::fs::symlink(source, dest).map_err(|error| {
382        PackageError::Registry(format!(
383            "failed to symlink {} to {}: {error}",
384            source.display(),
385            dest.display()
386        ))
387    })
388}
389
390#[cfg(windows)]
391pub(crate) fn symlink_path_dependency(source: &Path, dest: &Path) -> Result<(), PackageError> {
392    if source.is_dir() {
393        std::os::windows::fs::symlink_dir(source, dest)
394    } else {
395        std::os::windows::fs::symlink_file(source, dest)
396    }
397    .map_err(|error| {
398        PackageError::Registry(format!(
399            "failed to symlink {} to {}: {error}",
400            source.display(),
401            dest.display()
402        ))
403    })
404}
405
406#[cfg(not(any(unix, windows)))]
407pub(crate) fn symlink_path_dependency(_source: &Path, _dest: &Path) -> Result<(), PackageError> {
408    Err("symlinks are not supported on this platform"
409        .to_string()
410        .into())
411}
412
413pub(crate) fn materialize_path_dependency(
414    source: &Path,
415    dest_root: &Path,
416    alias: &str,
417) -> Result<(), PackageError> {
418    remove_materialized_package(dest_root, alias)?;
419    if source.is_dir() {
420        let dest = dest_root.join(alias);
421        match symlink_path_dependency(source, &dest) {
422            Ok(()) => Ok(()),
423            Err(_) => copy_dir_recursive(source, &dest),
424        }
425    } else {
426        let dest = dest_root.join(format!("{alias}.harn"));
427        if let Some(parent) = dest.parent() {
428            fs::create_dir_all(parent)
429                .map_err(|error| format!("failed to create {}: {error}", parent.display()))?;
430        }
431        match symlink_path_dependency(source, &dest) {
432            Ok(()) => Ok(()),
433            Err(_) => {
434                fs::copy(source, &dest).map_err(|error| {
435                    format!(
436                        "failed to copy {} to {}: {error}",
437                        source.display(),
438                        dest.display()
439                    )
440                })?;
441                Ok(())
442            }
443        }
444    }
445}
446
447pub(crate) fn materialized_hash_matches(dir: &Path, expected: &str) -> bool {
448    verify_content_hash_or_compute(dir, expected).is_ok()
449}
450
451pub(crate) fn resolve_path_dependency_source(
452    manifest_dir: &Path,
453    raw: &str,
454) -> Result<PathBuf, PackageError> {
455    let source = {
456        let candidate = PathBuf::from(raw);
457        if candidate.is_absolute() {
458            candidate
459        } else {
460            manifest_dir.join(candidate)
461        }
462    };
463    if source.exists() {
464        return source.canonicalize().map_err(|error| {
465            PackageError::Registry(format!(
466                "failed to canonicalize {}: {error}",
467                source.display()
468            ))
469        });
470    }
471    if source.extension().is_none() {
472        let with_ext = source.with_extension("harn");
473        if with_ext.exists() {
474            return with_ext.canonicalize().map_err(|error| {
475                PackageError::Registry(format!(
476                    "failed to canonicalize {}: {error}",
477                    with_ext.display()
478                ))
479            });
480        }
481    }
482    Err(format!("package source not found: {}", source.display()).into())
483}
484
485pub(crate) fn path_source_uri(path: &Path) -> Result<String, PackageError> {
486    let url = Url::from_file_path(path)
487        .map_err(|_| format!("failed to convert {} to file:// URL", path.display()))?;
488    Ok(format!("path+{}", url))
489}
490
491pub(crate) fn path_from_source_uri(source: &str) -> Result<PathBuf, PackageError> {
492    let raw = source
493        .strip_prefix("path+")
494        .ok_or_else(|| format!("invalid path source: {source}"))?;
495    if let Ok(url) = Url::parse(raw) {
496        return url
497            .to_file_path()
498            .map_err(|_| PackageError::Registry(format!("invalid file:// path source: {source}")));
499    }
500    Ok(PathBuf::from(raw))
501}
502
503pub(crate) fn registry_file_url_or_path(raw: &str) -> Result<Option<PathBuf>, PackageError> {
504    if let Ok(url) = Url::parse(raw) {
505        if url.scheme() == "file" {
506            return url.to_file_path().map(Some).map_err(|_| {
507                PackageError::Registry(format!("invalid file:// registry URL: {raw}"))
508            });
509        }
510        return Ok(None);
511    }
512    Ok(Some(PathBuf::from(raw)))
513}
514
515pub(crate) fn read_registry_source(source: &str) -> Result<String, PackageError> {
516    if let Some(path) = registry_file_url_or_path(source)? {
517        return fs::read_to_string(&path).map_err(|error| {
518            PackageError::Registry(format!(
519                "failed to read package registry {}: {error}",
520                path.display()
521            ))
522        });
523    }
524
525    let url = Url::parse(source)
526        .map_err(|error| format!("invalid package registry URL {source:?}: {error}"))?;
527    match url.scheme() {
528        "http" | "https" => {}
529        other => return Err(format!("unsupported package registry URL scheme: {other}").into()),
530    }
531    let response = reqwest::blocking::Client::builder()
532        .timeout(Duration::from_secs(20))
533        .build()
534        .map_err(|error| format!("failed to build package registry client: {error}"))?
535        .get(url)
536        .send()
537        .map_err(|error| format!("failed to fetch package registry {source}: {error}"))?;
538    let status = response.status();
539    if !status.is_success() {
540        return Err(format!("GET {source} returned HTTP {status}").into());
541    }
542    response.text().map_err(|error| {
543        PackageError::Registry(format!("failed to read package registry response: {error}"))
544    })
545}
546
547pub(crate) fn resolve_configured_registry_source(
548    explicit: Option<&str>,
549) -> Result<String, PackageError> {
550    PackageWorkspace::from_current_dir()?.resolve_registry_source(explicit)
551}
552
553pub(crate) fn is_valid_registry_segment(segment: &str) -> bool {
554    let mut chars = segment.chars();
555    let Some(first) = chars.next() else {
556        return false;
557    };
558    first.is_ascii_alphanumeric()
559        && chars.all(|ch| ch.is_ascii_alphanumeric() || matches!(ch, '-' | '_' | '.'))
560}
561
562pub(crate) fn is_valid_registry_package_name(name: &str) -> bool {
563    let trimmed = name.trim();
564    if trimmed != name || trimmed.is_empty() || trimmed.contains("://") || trimmed.ends_with('/') {
565        return false;
566    }
567    if let Some(scoped) = trimmed.strip_prefix('@') {
568        let Some((scope, package)) = scoped.split_once('/') else {
569            return false;
570        };
571        return !package.contains('/')
572            && is_valid_registry_segment(scope)
573            && is_valid_registry_segment(package);
574    }
575    !trimmed.contains('/') && is_valid_registry_segment(trimmed)
576}
577
578pub(crate) fn parse_registry_package_spec(spec: &str) -> Option<(&str, Option<&str>)> {
579    let trimmed = spec.trim();
580    if !trimmed.starts_with('@') {
581        if let Some((name, version)) = trimmed.rsplit_once('@') {
582            if is_valid_registry_package_name(name) && !version.trim().is_empty() {
583                return Some((name, Some(version)));
584            }
585        }
586        if is_valid_registry_package_name(trimmed) {
587            return Some((trimmed, None));
588        }
589        return None;
590    }
591
592    if let Some((name, version)) = trimmed.rsplit_once('@') {
593        if !name.is_empty()
594            && name != trimmed
595            && is_valid_registry_package_name(name)
596            && !version.trim().is_empty()
597        {
598            return Some((name, Some(version)));
599        }
600    }
601    if is_valid_registry_package_name(trimmed) {
602        return Some((trimmed, None));
603    }
604    None
605}
606
607pub(crate) fn parse_package_registry_index(
608    source: &str,
609    content: &str,
610) -> Result<PackageRegistryIndex, PackageError> {
611    let mut index = toml::from_str::<PackageRegistryIndex>(content)
612        .map_err(|error| format!("failed to parse package registry {source}: {error}"))?;
613    if index.version != REGISTRY_INDEX_VERSION {
614        return Err(format!(
615            "unsupported package registry {source} version {} (expected {})",
616            index.version, REGISTRY_INDEX_VERSION
617        )
618        .into());
619    }
620    validate_package_registry_index(source, &mut index)?;
621    Ok(index)
622}
623
624pub(crate) fn validate_package_registry_index(
625    source: &str,
626    index: &mut PackageRegistryIndex,
627) -> Result<(), PackageError> {
628    let mut names = HashSet::new();
629    for package in &mut index.packages {
630        if !is_valid_registry_package_name(&package.name) {
631            return Err(format!(
632                "package registry {source} has invalid package name '{}'",
633                package.name
634            )
635            .into());
636        }
637        if !names.insert(package.name.clone()) {
638            return Err(format!(
639                "package registry {source} declares '{}' more than once",
640                package.name
641            )
642            .into());
643        }
644        normalize_git_url(&package.repository).map_err(|error| {
645            format!(
646                "package registry {source} has invalid repository for '{}': {error}",
647                package.name
648            )
649        })?;
650        let mut versions = HashSet::new();
651        for version in &package.versions {
652            if version.version.trim().is_empty() {
653                return Err(format!(
654                    "package registry {source} has empty version for '{}'",
655                    package.name
656                )
657                .into());
658            }
659            if !versions.insert(version.version.clone()) {
660                return Err(format!(
661                    "package registry {source} declares '{}@{}' more than once",
662                    package.name, version.version
663                )
664                .into());
665            }
666            if version.rev.is_none() && version.branch.is_none() {
667                return Err(format!(
668                    "package registry {source} entry '{}@{}' must specify rev or branch",
669                    package.name, version.version
670                )
671                .into());
672            }
673            normalize_git_url(&version.git).map_err(|error| {
674                format!(
675                    "package registry {source} has invalid git source for '{}@{}': {error}",
676                    package.name, version.version
677                )
678            })?;
679        }
680    }
681    index
682        .packages
683        .sort_by(|left, right| left.name.cmp(&right.name));
684    Ok(())
685}
686
687pub(crate) fn load_package_registry_in(
688    workspace: &PackageWorkspace,
689    explicit: Option<&str>,
690) -> Result<(String, PackageRegistryIndex), PackageError> {
691    let source = workspace.resolve_registry_source(explicit)?;
692    let content = read_registry_source(&source)?;
693    let index = parse_package_registry_index(&source, &content)?;
694    Ok((source, index))
695}
696
697pub(crate) fn registry_package_matches(package: &RegistryPackage, query: &str) -> bool {
698    if query.trim().is_empty() {
699        return true;
700    }
701    let query = query.to_ascii_lowercase();
702    package.name.to_ascii_lowercase().contains(&query)
703        || package
704            .description
705            .as_deref()
706            .is_some_and(|value| value.to_ascii_lowercase().contains(&query))
707        || package.repository.to_ascii_lowercase().contains(&query)
708        || package
709            .exports
710            .iter()
711            .any(|export| export.to_ascii_lowercase().contains(&query))
712}
713
714pub(crate) fn latest_registry_version(
715    package: &RegistryPackage,
716) -> Option<&RegistryPackageVersion> {
717    package
718        .versions
719        .iter()
720        .rev()
721        .find(|version| !version.yanked)
722}
723
724pub(crate) fn find_registry_package_version(
725    index: &PackageRegistryIndex,
726    name: &str,
727    version: Option<&str>,
728) -> Result<RegistryPackageInfo, PackageError> {
729    let package = index
730        .packages
731        .iter()
732        .find(|package| package.name == name)
733        .ok_or_else(|| format!("package registry does not contain {name}"))?;
734    let selected_version = match version {
735        Some(version) => Some(
736            package
737                .versions
738                .iter()
739                .find(|entry| entry.version == version)
740                .ok_or_else(|| format!("package registry does not contain {name}@{version}"))?
741                .clone(),
742        ),
743        None => latest_registry_version(package).cloned(),
744    };
745    Ok(RegistryPackageInfo {
746        package: package.clone(),
747        selected_version,
748    })
749}
750
751pub(crate) fn search_package_registry_impl(
752    query: Option<&str>,
753    registry: Option<&str>,
754) -> Result<Vec<RegistryPackage>, PackageError> {
755    search_package_registry_in(&PackageWorkspace::from_current_dir()?, query, registry)
756}
757
758pub(crate) fn search_package_registry_in(
759    workspace: &PackageWorkspace,
760    query: Option<&str>,
761    registry: Option<&str>,
762) -> Result<Vec<RegistryPackage>, PackageError> {
763    let (_, index) = load_package_registry_in(workspace, registry)?;
764    Ok(index
765        .packages
766        .into_iter()
767        .filter(|package| registry_package_matches(package, query.unwrap_or("")))
768        .collect())
769}
770
771pub(crate) fn package_registry_info_impl(
772    spec: &str,
773    registry: Option<&str>,
774) -> Result<RegistryPackageInfo, PackageError> {
775    package_registry_info_in(&PackageWorkspace::from_current_dir()?, spec, registry)
776}
777
778pub(crate) fn package_registry_info_in(
779    workspace: &PackageWorkspace,
780    spec: &str,
781    registry: Option<&str>,
782) -> Result<RegistryPackageInfo, PackageError> {
783    let Some((name, version)) = parse_registry_package_spec(spec) else {
784        return Err(format!(
785            "invalid registry package name '{spec}'; use names like @burin/notion-sdk or acme-lib"
786        )
787        .into());
788    };
789    let (_, index) = load_package_registry_in(workspace, registry)?;
790    find_registry_package_version(&index, name, version)
791}
792
793pub(crate) fn registry_dependency_from_spec_in(
794    workspace: &PackageWorkspace,
795    spec: &str,
796    alias: Option<&str>,
797    registry: Option<&str>,
798) -> Result<(String, Dependency), PackageError> {
799    let Some((name, Some(version))) = parse_registry_package_spec(spec) else {
800        return Err(format!(
801            "registry dependency '{spec}' must include a version, for example {spec}@1.2.3"
802        )
803        .into());
804    };
805    let info = package_registry_info_in(workspace, &format!("{name}@{version}"), registry)?;
806    let selected = info
807        .selected_version
808        .ok_or_else(|| format!("package registry does not contain {name}@{version}"))?;
809    if selected.yanked {
810        return Err(format!("{name}@{version} is yanked in the package registry").into());
811    }
812    let git = normalize_git_url(&selected.git)?;
813    let package_name = selected
814        .package
815        .clone()
816        .map(Ok)
817        .unwrap_or_else(|| derive_repo_name_from_source(&git))?;
818    let alias = alias.unwrap_or(package_name.as_str()).to_string();
819    Ok((
820        alias.clone(),
821        Dependency::Table(DepTable {
822            git: Some(git),
823            tag: None,
824            rev: selected.rev,
825            branch: selected.branch,
826            path: None,
827            package: (alias != package_name).then_some(package_name),
828        }),
829    ))
830}
831
832pub(crate) fn is_probable_shorthand_git_url(raw: &str) -> bool {
833    !raw.contains("://")
834        && !raw.starts_with("git@")
835        && raw.contains('/')
836        && raw
837            .split('/')
838            .next()
839            .is_some_and(|segment| segment.contains('.'))
840}
841
842pub(crate) fn normalize_git_url(raw: &str) -> Result<String, PackageError> {
843    let trimmed = raw.trim();
844    if trimmed.is_empty() {
845        return Err("git URL cannot be empty".to_string().into());
846    }
847
848    let candidate_path = PathBuf::from(trimmed);
849    if candidate_path.exists() {
850        let canonical = candidate_path
851            .canonicalize()
852            .map_err(|error| format!("failed to canonicalize {}: {error}", trimmed))?;
853        let url = Url::from_file_path(canonical)
854            .map_err(|_| format!("failed to convert {} to file:// URL", trimmed))?;
855        return Ok(url.to_string().trim_end_matches('/').to_string());
856    }
857
858    if let Some(rest) = trimmed.strip_prefix("git@") {
859        if let Some((host, path)) = rest.split_once(':') {
860            return Ok(format!(
861                "ssh://git@{}/{}",
862                host,
863                path.trim_start_matches('/').trim_end_matches('/')
864            ));
865        }
866    }
867
868    let with_scheme = if is_probable_shorthand_git_url(trimmed) {
869        format!("https://{trimmed}")
870    } else {
871        trimmed.to_string()
872    };
873    let parsed =
874        Url::parse(&with_scheme).map_err(|error| format!("invalid git URL {trimmed}: {error}"))?;
875    let mut normalized = parsed.to_string();
876    while normalized.ends_with('/') {
877        normalized.pop();
878    }
879    if parsed.scheme() != "file" && normalized.ends_with(".git") {
880        normalized.truncate(normalized.len() - 4);
881    }
882    Ok(normalized)
883}
884
885pub(crate) fn derive_repo_name_from_source(source: &str) -> Result<String, PackageError> {
886    let url = Url::parse(source).map_err(|error| format!("invalid git URL {source}: {error}"))?;
887    let segment = url
888        .path_segments()
889        .and_then(|mut segments| segments.rfind(|segment| !segment.is_empty()))
890        .ok_or_else(|| format!("failed to derive package name from {source}"))?;
891    Ok(segment.trim_end_matches(".git").to_string())
892}
893
894pub(crate) fn parse_positional_git_spec(spec: &str) -> (&str, Option<&str>) {
895    if let Some((source, candidate_ref)) = spec.rsplit_once('@') {
896        if !candidate_ref.is_empty()
897            && !candidate_ref.contains('/')
898            && !candidate_ref.contains(':')
899            && !source.ends_with("://")
900        {
901            return (source, Some(candidate_ref));
902        }
903    }
904    (spec, None)
905}
906
907pub(crate) fn existing_local_path_spec(spec: &str) -> Option<PathBuf> {
908    if spec.trim().is_empty() || spec.contains("://") || spec.starts_with("git@") {
909        return None;
910    }
911    let candidate = PathBuf::from(spec);
912    if candidate.exists() {
913        return Some(candidate);
914    }
915    if candidate.extension().is_none() {
916        let with_ext = candidate.with_extension("harn");
917        if with_ext.exists() {
918            return Some(with_ext);
919        }
920    }
921    if is_probable_shorthand_git_url(spec) {
922        return None;
923    }
924    None
925}
926
927pub(crate) fn package_manifest_name(path: &Path) -> Option<String> {
928    let manifest_path = if path.is_dir() {
929        path.join(MANIFEST)
930    } else {
931        path.parent()?.join(MANIFEST)
932    };
933    let manifest = read_manifest_from_path(&manifest_path).ok()?;
934    manifest
935        .package
936        .and_then(|pkg| pkg.name)
937        .map(|name| name.trim().to_string())
938        .filter(|name| !name.is_empty())
939}
940
941pub(crate) fn derive_package_alias_from_path(path: &Path) -> Result<String, PackageError> {
942    if let Some(name) = package_manifest_name(path) {
943        return Ok(name);
944    }
945    let fallback = if path.is_dir() {
946        path.file_name()
947    } else {
948        path.file_stem()
949    };
950    fallback
951        .and_then(|name| name.to_str())
952        .map(str::trim)
953        .filter(|name| !name.is_empty())
954        .map(str::to_string)
955        .ok_or_else(|| {
956            PackageError::Registry(format!(
957                "failed to derive package alias from {}",
958                path.display()
959            ))
960        })
961}
962
963pub(crate) fn is_full_git_sha(value: &str) -> bool {
964    value.len() == 40 && value.as_bytes().iter().all(|byte| byte.is_ascii_hexdigit())
965}
966
967pub(crate) fn git_output<I, S>(
968    args: I,
969    cwd: Option<&Path>,
970) -> Result<std::process::Output, PackageError>
971where
972    I: IntoIterator<Item = S>,
973    S: AsRef<OsStr>,
974{
975    let mut command = process::Command::new("git");
976    command.args(args);
977    if let Some(dir) = cwd {
978        command.current_dir(dir);
979    }
980    command
981        .env_remove("GIT_DIR")
982        .env_remove("GIT_WORK_TREE")
983        .env_remove("GIT_INDEX_FILE")
984        .output()
985        .map_err(|error| PackageError::Registry(format!("failed to run git: {error}")))
986}
987
988pub(crate) fn resolve_git_commit(
989    url: &str,
990    rev: Option<&str>,
991    branch: Option<&str>,
992) -> Result<String, PackageError> {
993    let requested = branch.or(rev).unwrap_or("HEAD");
994    if branch.is_none() && is_full_git_sha(requested) {
995        return Ok(requested.to_string());
996    }
997
998    let refs = if let Some(branch) = branch {
999        vec![format!("refs/heads/{branch}")]
1000    } else if requested == "HEAD" {
1001        vec!["HEAD".to_string()]
1002    } else {
1003        vec![
1004            requested.to_string(),
1005            format!("refs/tags/{requested}^{{}}"),
1006            format!("refs/tags/{requested}"),
1007            format!("refs/heads/{requested}"),
1008        ]
1009    };
1010
1011    let output = git_output(
1012        std::iter::once("ls-remote".to_string())
1013            .chain(std::iter::once(url.to_string()))
1014            .chain(refs.clone()),
1015        None,
1016    )?;
1017    if !output.status.success() {
1018        return Err(format!(
1019            "failed to resolve git ref from {url}: {}",
1020            String::from_utf8_lossy(&output.stderr).trim()
1021        )
1022        .into());
1023    }
1024    let stdout = String::from_utf8_lossy(&output.stdout);
1025    let commit = stdout
1026        .lines()
1027        .filter_map(|line| line.split_whitespace().next())
1028        .find(|value| is_full_git_sha(value))
1029        .ok_or_else(|| format!("could not resolve {requested} from {url}"))?;
1030    Ok(commit.to_string())
1031}
1032
1033pub(crate) fn clone_git_commit_to(
1034    url: &str,
1035    commit: &str,
1036    dest: &Path,
1037) -> Result<(), PackageError> {
1038    if dest.exists() {
1039        fs::remove_dir_all(dest)
1040            .map_err(|error| format!("failed to reset {}: {error}", dest.display()))?;
1041    }
1042    fs::create_dir_all(dest)
1043        .map_err(|error| format!("failed to create {}: {error}", dest.display()))?;
1044
1045    let init = git_output(["init", "--quiet"], Some(dest))?;
1046    if !init.status.success() {
1047        return Err(format!(
1048            "failed to initialize git repo in {}: {}",
1049            dest.display(),
1050            String::from_utf8_lossy(&init.stderr).trim()
1051        )
1052        .into());
1053    }
1054
1055    let remote = git_output(["remote", "add", "origin", url], Some(dest))?;
1056    if !remote.status.success() {
1057        return Err(format!(
1058            "failed to add git remote {url}: {}",
1059            String::from_utf8_lossy(&remote.stderr).trim()
1060        )
1061        .into());
1062    }
1063
1064    let fetch = git_output(["fetch", "--depth", "1", "origin", commit], Some(dest))?;
1065    if !fetch.status.success() {
1066        let fallback_dir = dest.with_extension("full-clone");
1067        if fallback_dir.exists() {
1068            fs::remove_dir_all(&fallback_dir)
1069                .map_err(|error| format!("failed to remove {}: {error}", fallback_dir.display()))?;
1070        }
1071        let clone = git_output(
1072            ["clone", url, fallback_dir.to_string_lossy().as_ref()],
1073            None,
1074        )?;
1075        if !clone.status.success() {
1076            return Err(format!(
1077                "failed to fetch {commit} from {url}: {}",
1078                String::from_utf8_lossy(&fetch.stderr).trim()
1079            )
1080            .into());
1081        }
1082        let checkout = git_output(["checkout", commit], Some(&fallback_dir))?;
1083        if !checkout.status.success() {
1084            return Err(format!(
1085                "failed to checkout {commit} in {}: {}",
1086                fallback_dir.display(),
1087                String::from_utf8_lossy(&checkout.stderr).trim()
1088            )
1089            .into());
1090        }
1091        fs::remove_dir_all(dest)
1092            .map_err(|error| format!("failed to remove {}: {error}", dest.display()))?;
1093        fs::rename(&fallback_dir, dest).map_err(|error| {
1094            format!(
1095                "failed to move {} to {}: {error}",
1096                fallback_dir.display(),
1097                dest.display()
1098            )
1099        })?;
1100    } else {
1101        let checkout = git_output(["checkout", "--detach", "FETCH_HEAD"], Some(dest))?;
1102        if !checkout.status.success() {
1103            return Err(format!(
1104                "failed to checkout FETCH_HEAD in {}: {}",
1105                dest.display(),
1106                String::from_utf8_lossy(&checkout.stderr).trim()
1107            )
1108            .into());
1109        }
1110    }
1111
1112    let git_dir = dest.join(".git");
1113    if git_dir.exists() {
1114        fs::remove_dir_all(&git_dir)
1115            .map_err(|error| format!("failed to remove {}: {error}", git_dir.display()))?;
1116    }
1117    Ok(())
1118}
1119
1120pub(crate) fn unique_temp_dir(base: &Path, label: &str) -> Result<PathBuf, PackageError> {
1121    for _ in 0..16 {
1122        let suffix = uuid::Uuid::now_v7();
1123        let candidate = base.join(format!("{label}-{suffix}"));
1124        if !candidate.exists() {
1125            return Ok(candidate);
1126        }
1127    }
1128    Err(format!(
1129        "failed to allocate a unique temporary directory under {}",
1130        base.display()
1131    )
1132    .into())
1133}
1134
1135pub(crate) fn ensure_git_cache_populated_in(
1136    workspace: &PackageWorkspace,
1137    url: &str,
1138    source: &str,
1139    commit: &str,
1140    expected_hash: Option<&str>,
1141    refetch: bool,
1142    offline: bool,
1143) -> Result<String, PackageError> {
1144    let cache_dir = git_cache_dir_in(workspace, source, commit)?;
1145    let _lock = acquire_git_cache_lock_in(workspace, source, commit)?;
1146    if refetch && cache_dir.exists() {
1147        fs::remove_dir_all(&cache_dir)
1148            .map_err(|error| format!("failed to remove {}: {error}", cache_dir.display()))?;
1149    }
1150    if cache_dir.exists() {
1151        if let Some(expected) = expected_hash {
1152            verify_content_hash_or_compute(&cache_dir, expected)?;
1153            write_cache_metadata(&cache_dir, source, commit, expected)?;
1154            return Ok(expected.to_string());
1155        }
1156        let hash = compute_content_hash(&cache_dir)?;
1157        write_cached_content_hash(&cache_dir, &hash)?;
1158        write_cache_metadata(&cache_dir, source, commit, &hash)?;
1159        return Ok(hash);
1160    }
1161
1162    if offline {
1163        return Err(format!(
1164            "package cache entry for {source} at {commit} is missing; cannot fetch in offline mode"
1165        )
1166        .into());
1167    }
1168
1169    let parent = cache_dir
1170        .parent()
1171        .ok_or_else(|| format!("invalid cache path {}", cache_dir.display()))?;
1172    fs::create_dir_all(parent)
1173        .map_err(|error| format!("failed to create {}: {error}", parent.display()))?;
1174    let temp_dir = unique_temp_dir(parent, "tmp")?;
1175    let populated = (|| -> Result<String, PackageError> {
1176        clone_git_commit_to(url, commit, &temp_dir)?;
1177        let hash = compute_content_hash(&temp_dir)?;
1178        if let Some(expected) = expected_hash {
1179            if hash != expected {
1180                return Err(format!(
1181                    "content hash mismatch for {} at {}: expected {}, got {}",
1182                    source, commit, expected, hash
1183                )
1184                .into());
1185            }
1186        }
1187        write_cached_content_hash(&temp_dir, &hash)?;
1188        write_cache_metadata(&temp_dir, source, commit, &hash)?;
1189        fs::rename(&temp_dir, &cache_dir).map_err(|error| {
1190            format!(
1191                "failed to move {} to {}: {error}",
1192                temp_dir.display(),
1193                cache_dir.display()
1194            )
1195        })?;
1196        Ok(hash)
1197    })();
1198    let hash = match populated {
1199        Ok(hash) => hash,
1200        Err(error) => {
1201            let _ = fs::remove_dir_all(&temp_dir);
1202            return Err(error);
1203        }
1204    };
1205    Ok(hash)
1206}
1207
1208#[derive(Debug, Clone)]
1209pub(crate) struct PackageCacheEntry {
1210    path: PathBuf,
1211    source_hash: String,
1212    commit: String,
1213    metadata: Option<PackageCacheMetadata>,
1214}
1215
1216pub(crate) fn git_cache_root_in(workspace: &PackageWorkspace) -> Result<PathBuf, PackageError> {
1217    Ok(workspace.cache_root()?.join("git"))
1218}
1219
1220pub(crate) fn discover_git_cache_entries() -> Result<Vec<PackageCacheEntry>, PackageError> {
1221    discover_git_cache_entries_in(&PackageWorkspace::from_current_dir()?)
1222}
1223
1224pub(crate) fn discover_git_cache_entries_in(
1225    workspace: &PackageWorkspace,
1226) -> Result<Vec<PackageCacheEntry>, PackageError> {
1227    let root = git_cache_root_in(workspace)?;
1228    let mut entries = Vec::new();
1229    let source_dirs = match fs::read_dir(&root) {
1230        Ok(source_dirs) => source_dirs,
1231        Err(error) if error.kind() == std::io::ErrorKind::NotFound => return Ok(entries),
1232        Err(error) => return Err(format!("failed to read {}: {error}", root.display()).into()),
1233    };
1234    for source_dir in source_dirs {
1235        let source_dir = source_dir
1236            .map_err(|error| format!("failed to read {} entry: {error}", root.display()))?;
1237        let source_type = source_dir
1238            .file_type()
1239            .map_err(|error| format!("failed to stat {}: {error}", source_dir.path().display()))?;
1240        if !source_type.is_dir() {
1241            continue;
1242        }
1243        let source_hash = source_dir.file_name().to_string_lossy().to_string();
1244        let commit_dirs = fs::read_dir(source_dir.path())
1245            .map_err(|error| format!("failed to read {}: {error}", source_dir.path().display()))?;
1246        for commit_dir in commit_dirs {
1247            let commit_dir = commit_dir.map_err(|error| {
1248                format!(
1249                    "failed to read {} entry: {error}",
1250                    source_dir.path().display()
1251                )
1252            })?;
1253            let commit_type = commit_dir.file_type().map_err(|error| {
1254                format!("failed to stat {}: {error}", commit_dir.path().display())
1255            })?;
1256            if !commit_type.is_dir() {
1257                continue;
1258            }
1259            let commit = commit_dir.file_name().to_string_lossy().to_string();
1260            if commit.starts_with("tmp-") || commit.ends_with(".full-clone") {
1261                continue;
1262            }
1263            let metadata = read_cache_metadata(&commit_dir.path())?;
1264            entries.push(PackageCacheEntry {
1265                path: commit_dir.path(),
1266                source_hash: source_hash.clone(),
1267                commit,
1268                metadata,
1269            });
1270        }
1271    }
1272    entries.sort_by(|left, right| {
1273        left.source_hash
1274            .cmp(&right.source_hash)
1275            .then_with(|| left.commit.cmp(&right.commit))
1276    });
1277    Ok(entries)
1278}
1279
1280pub(crate) fn locked_git_cache_paths_in(
1281    workspace: &PackageWorkspace,
1282    lock: &LockFile,
1283) -> Result<HashSet<PathBuf>, PackageError> {
1284    let mut keep = HashSet::new();
1285    for entry in &lock.packages {
1286        validate_package_alias(&entry.name)?;
1287        if !entry.source.starts_with("git+") {
1288            continue;
1289        }
1290        let commit = entry
1291            .commit
1292            .as_deref()
1293            .ok_or_else(|| format!("missing locked commit for {}", entry.name))?;
1294        keep.insert(git_cache_dir_in(workspace, &entry.source, commit)?);
1295    }
1296    Ok(keep)
1297}
1298
1299pub(crate) fn verify_lock_entry_cache_in(
1300    workspace: &PackageWorkspace,
1301    entry: &LockEntry,
1302) -> Result<bool, PackageError> {
1303    validate_package_alias(&entry.name)?;
1304    if !entry.source.starts_with("git+") {
1305        if entry.source.starts_with("path+") {
1306            let path = path_from_source_uri(&entry.source)?;
1307            if !path.exists() {
1308                return Err(format!(
1309                    "path dependency {} source is missing: {}",
1310                    entry.name,
1311                    path.display()
1312                )
1313                .into());
1314            }
1315        }
1316        return Ok(false);
1317    }
1318    let commit = entry
1319        .commit
1320        .as_deref()
1321        .ok_or_else(|| format!("missing locked commit for {}", entry.name))?;
1322    let expected_hash = entry
1323        .content_hash
1324        .as_deref()
1325        .ok_or_else(|| format!("missing content hash for {}", entry.name))?;
1326    let cache_dir = git_cache_dir_in(workspace, &entry.source, commit)?;
1327    if !cache_dir.is_dir() {
1328        return Err(format!(
1329            "package cache entry for {} is missing: {}",
1330            entry.name,
1331            cache_dir.display()
1332        )
1333        .into());
1334    }
1335    verify_content_hash_or_compute(&cache_dir, expected_hash)?;
1336    match read_cache_metadata(&cache_dir)? {
1337        Some(metadata)
1338            if metadata.source == entry.source
1339                && metadata.commit == commit
1340                && metadata.content_hash == expected_hash => {}
1341        Some(metadata) => {
1342            return Err(format!(
1343                "package cache metadata mismatch for {}: expected {} {} {}, got {} {} {}",
1344                entry.name,
1345                entry.source,
1346                commit,
1347                expected_hash,
1348                metadata.source,
1349                metadata.commit,
1350                metadata.content_hash
1351            )
1352            .into());
1353        }
1354        None => write_cache_metadata(&cache_dir, &entry.source, commit, expected_hash)?,
1355    }
1356    Ok(true)
1357}
1358
1359pub(crate) fn verify_materialized_lock_entry(
1360    ctx: &ManifestContext,
1361    entry: &LockEntry,
1362) -> Result<bool, PackageError> {
1363    validate_package_alias(&entry.name)?;
1364    let packages_dir = ctx.packages_dir();
1365    if entry.source.starts_with("path+") {
1366        let dir = packages_dir.join(&entry.name);
1367        let file = packages_dir.join(format!("{}.harn", entry.name));
1368        if !dir.exists() && !file.exists() {
1369            return Err(format!(
1370                "materialized path dependency {} is missing under {}",
1371                entry.name,
1372                packages_dir.display()
1373            )
1374            .into());
1375        }
1376        return Ok(true);
1377    }
1378    if !entry.source.starts_with("git+") {
1379        return Ok(false);
1380    }
1381    let expected_hash = entry
1382        .content_hash
1383        .as_deref()
1384        .ok_or_else(|| format!("missing content hash for {}", entry.name))?;
1385    let dest_dir = packages_dir.join(&entry.name);
1386    if !dest_dir.is_dir() {
1387        return Err(format!(
1388            "materialized package {} is missing: {}",
1389            entry.name,
1390            dest_dir.display()
1391        )
1392        .into());
1393    }
1394    verify_content_hash_or_compute(&dest_dir, expected_hash)?;
1395    Ok(true)
1396}
1397
1398pub(crate) fn verify_package_cache_impl(materialized: bool) -> Result<usize, PackageError> {
1399    verify_package_cache_in(&PackageWorkspace::from_current_dir()?, materialized)
1400}
1401
1402pub(crate) fn verify_package_cache_in(
1403    workspace: &PackageWorkspace,
1404    materialized: bool,
1405) -> Result<usize, PackageError> {
1406    let ctx = workspace.load_manifest_context()?;
1407    let lock = LockFile::load(&ctx.lock_path())?
1408        .ok_or_else(|| format!("{} is missing", ctx.lock_path().display()))?;
1409    validate_lock_matches_manifest(&ctx, &lock)?;
1410    let mut verified = 0usize;
1411    for entry in &lock.packages {
1412        if verify_lock_entry_cache_in(workspace, entry)? {
1413            verified += 1;
1414        }
1415        if materialized && verify_materialized_lock_entry(&ctx, entry)? {
1416            verified += 1;
1417        }
1418    }
1419    Ok(verified)
1420}
1421
1422pub(crate) fn clean_package_cache_impl(all: bool) -> Result<usize, PackageError> {
1423    clean_package_cache_in(&PackageWorkspace::from_current_dir()?, all)
1424}
1425
1426pub(crate) fn clean_package_cache_in(
1427    workspace: &PackageWorkspace,
1428    all: bool,
1429) -> Result<usize, PackageError> {
1430    let entries = discover_git_cache_entries_in(workspace)?;
1431    if entries.is_empty() {
1432        return Ok(0);
1433    }
1434    if all {
1435        let root = workspace.cache_root()?;
1436        for child in ["git", "locks"] {
1437            let path = root.join(child);
1438            if path.exists() {
1439                fs::remove_dir_all(&path)
1440                    .map_err(|error| format!("failed to remove {}: {error}", path.display()))?;
1441            }
1442        }
1443        return Ok(entries.len());
1444    }
1445
1446    let ctx = workspace.load_manifest_context()?;
1447    let lock = LockFile::load(&ctx.lock_path())?.ok_or_else(|| {
1448        format!(
1449            "{} is missing; pass --all to clean every cache entry",
1450            LOCK_FILE
1451        )
1452    })?;
1453    validate_lock_matches_manifest(&ctx, &lock)?;
1454    let keep = locked_git_cache_paths_in(workspace, &lock)?;
1455    let mut removed = 0usize;
1456    for entry in entries {
1457        if keep.contains(&entry.path) {
1458            continue;
1459        }
1460        fs::remove_dir_all(&entry.path)
1461            .map_err(|error| format!("failed to remove {}: {error}", entry.path.display()))?;
1462        removed += 1;
1463        if let Some(parent) = entry.path.parent() {
1464            let is_empty = fs::read_dir(parent)
1465                .map(|mut children| children.next().is_none())
1466                .unwrap_or(false);
1467            if is_empty {
1468                fs::remove_dir(parent)
1469                    .map_err(|error| format!("failed to remove {}: {error}", parent.display()))?;
1470            }
1471        }
1472    }
1473    Ok(removed)
1474}
1475
1476pub fn list_package_cache() {
1477    let result = (|| -> Result<(PathBuf, Vec<PackageCacheEntry>), PackageError> {
1478        Ok((cache_root()?, discover_git_cache_entries()?))
1479    })();
1480
1481    match result {
1482        Ok((root, entries)) => {
1483            println!("Cache root: {}", root.display());
1484            if entries.is_empty() {
1485                println!("No cached git packages.");
1486                return;
1487            }
1488            println!("commit\tcontent_hash\tsource\tpath");
1489            for entry in entries {
1490                let (source, content_hash) = entry
1491                    .metadata
1492                    .as_ref()
1493                    .map(|metadata| (metadata.source.as_str(), metadata.content_hash.as_str()))
1494                    .unwrap_or(("(unknown)", "(unknown)"));
1495                println!(
1496                    "{}\t{}\t{}\t{}",
1497                    entry.commit,
1498                    content_hash,
1499                    source,
1500                    entry.path.display()
1501                );
1502            }
1503        }
1504        Err(error) => {
1505            eprintln!("error: {error}");
1506            process::exit(1);
1507        }
1508    }
1509}
1510
1511pub fn clean_package_cache(all: bool) {
1512    match clean_package_cache_impl(all) {
1513        Ok(removed) => println!("Removed {removed} cached package entries."),
1514        Err(error) => {
1515            eprintln!("error: {error}");
1516            process::exit(1);
1517        }
1518    }
1519}
1520
1521pub fn verify_package_cache(materialized: bool) {
1522    match verify_package_cache_impl(materialized) {
1523        Ok(verified) => println!("Verified {verified} package cache entries."),
1524        Err(error) => {
1525            eprintln!("error: {error}");
1526            process::exit(1);
1527        }
1528    }
1529}
1530
1531pub fn search_package_registry(query: Option<&str>, registry: Option<&str>, json: bool) {
1532    match search_package_registry_impl(query, registry) {
1533        Ok(packages) if json => {
1534            println!(
1535                "{}",
1536                serde_json::to_string_pretty(&packages)
1537                    .unwrap_or_else(|error| format!(r#"{{"error":"{error}"}}"#))
1538            );
1539        }
1540        Ok(packages) => {
1541            if packages.is_empty() {
1542                println!("No packages found.");
1543                return;
1544            }
1545            println!("name\tlatest\tharn\tcontract\tdescription");
1546            for package in packages {
1547                let latest = latest_registry_version(&package)
1548                    .map(|version| version.version.as_str())
1549                    .unwrap_or("-");
1550                println!(
1551                    "{}\t{}\t{}\t{}\t{}",
1552                    package.name,
1553                    latest,
1554                    package.harn.as_deref().unwrap_or("-"),
1555                    package.connector_contract.as_deref().unwrap_or("-"),
1556                    package.description.as_deref().unwrap_or("")
1557                );
1558            }
1559        }
1560        Err(error) => {
1561            eprintln!("error: {error}");
1562            process::exit(1);
1563        }
1564    }
1565}
1566
1567pub fn show_package_registry_info(spec: &str, registry: Option<&str>, json: bool) {
1568    match package_registry_info_impl(spec, registry) {
1569        Ok(info) if json => {
1570            println!(
1571                "{}",
1572                serde_json::to_string_pretty(&info)
1573                    .unwrap_or_else(|error| format!(r#"{{"error":"{error}"}}"#))
1574            );
1575        }
1576        Ok(info) => {
1577            let package = info.package;
1578            println!("{}", package.name);
1579            if let Some(description) = package.description.as_deref() {
1580                println!("description: {description}");
1581            }
1582            println!("repository: {}", package.repository);
1583            if let Some(license) = package.license.as_deref() {
1584                println!("license: {license}");
1585            }
1586            if let Some(harn) = package.harn.as_deref() {
1587                println!("harn: {harn}");
1588            }
1589            if let Some(contract) = package.connector_contract.as_deref() {
1590                println!("connector_contract: {contract}");
1591            }
1592            if let Some(docs) = package.docs_url.as_deref() {
1593                println!("docs: {docs}");
1594            }
1595            if let Some(checksum) = package.checksum.as_deref() {
1596                println!("checksum: {checksum}");
1597            }
1598            if let Some(provenance) = package.provenance.as_deref() {
1599                println!("provenance: {provenance}");
1600            }
1601            if !package.exports.is_empty() {
1602                println!("exports: {}", package.exports.join(", "));
1603            }
1604            if let Some(version) = info.selected_version {
1605                println!("selected: {}", version.version);
1606                println!("git: {}", version.git);
1607                if let Some(rev) = version.rev.as_deref() {
1608                    println!("rev: {rev}");
1609                }
1610                if let Some(branch) = version.branch.as_deref() {
1611                    println!("branch: {branch}");
1612                }
1613                if let Some(package_name) = version.package.as_deref() {
1614                    println!("package: {package_name}");
1615                }
1616            }
1617            if !package.versions.is_empty() {
1618                let versions = package
1619                    .versions
1620                    .iter()
1621                    .map(|version| {
1622                        if version.yanked {
1623                            format!("{} (yanked)", version.version)
1624                        } else {
1625                            version.version.clone()
1626                        }
1627                    })
1628                    .collect::<Vec<_>>()
1629                    .join(", ");
1630                println!("versions: {versions}");
1631            }
1632        }
1633        Err(error) => {
1634            eprintln!("error: {error}");
1635            process::exit(1);
1636        }
1637    }
1638}
1639
1640#[cfg(test)]
1641mod tests {
1642    use super::*;
1643    use crate::package::test_support::*;
1644
1645    #[test]
1646    fn compute_content_hash_ignores_git_and_hash_marker() {
1647        let tmp = tempfile::tempdir().unwrap();
1648        let root = tmp.path();
1649        fs::create_dir_all(root.join(".git")).unwrap();
1650        fs::write(root.join(".git/HEAD"), "ref: refs/heads/main\n").unwrap();
1651        fs::write(root.join(".gitignore"), "ignored\n").unwrap();
1652        fs::write(root.join(CONTENT_HASH_FILE), "stale\n").unwrap();
1653        fs::write(
1654            root.join("lib.harn"),
1655            "pub fn value() -> number { return 1 }\n",
1656        )
1657        .unwrap();
1658        let first = compute_content_hash(root).unwrap();
1659        fs::write(root.join(".git/HEAD"), "changed\n").unwrap();
1660        fs::write(root.join(".gitignore"), "changed\n").unwrap();
1661        fs::write(root.join(CONTENT_HASH_FILE), "changed\n").unwrap();
1662        let second = compute_content_hash(root).unwrap();
1663        assert_eq!(first, second);
1664    }
1665
1666    #[cfg(unix)]
1667    #[test]
1668    fn remove_materialized_package_unlinks_directory_symlink_without_touching_source() {
1669        let tmp = tempfile::tempdir().unwrap();
1670        let source = tmp.path().join("source");
1671        let packages = tmp.path().join(".harn/packages");
1672        fs::create_dir_all(&source).unwrap();
1673        fs::create_dir_all(&packages).unwrap();
1674        fs::write(
1675            source.join("lib.harn"),
1676            "pub fn value() -> number { return 1 }\n",
1677        )
1678        .unwrap();
1679
1680        let materialized = packages.join("acme");
1681        std::os::unix::fs::symlink(&source, &materialized).unwrap();
1682
1683        remove_materialized_package(&packages, "acme").unwrap();
1684
1685        assert!(!materialized.exists());
1686        assert!(source.join("lib.harn").is_file());
1687    }
1688
1689    #[test]
1690    fn package_cache_verify_detects_tampering_even_with_stale_marker() {
1691        let (_repo_tmp, repo, _branch) = create_git_package_repo();
1692        let project_tmp = tempfile::tempdir().unwrap();
1693        let root = project_tmp.path();
1694        let workspace = TestWorkspace::new(root);
1695        fs::create_dir_all(root.join(".git")).unwrap();
1696        let git = normalize_git_url(repo.to_string_lossy().as_ref()).unwrap();
1697        fs::write(
1698            root.join(MANIFEST),
1699            format!(
1700                r#"
1701    [package]
1702    name = "workspace"
1703    version = "0.1.0"
1704
1705    [dependencies]
1706    acme-lib = {{ git = "{git}", rev = "v1.0.0" }}
1707    "#
1708            ),
1709        )
1710        .unwrap();
1711
1712        install_packages_in(workspace.env(), false, None, false).unwrap();
1713        let lock = LockFile::load(&root.join(LOCK_FILE)).unwrap().unwrap();
1714        let entry = lock.find("acme-lib").unwrap();
1715        let cache_dir = git_cache_dir_in(
1716            workspace.env(),
1717            &entry.source,
1718            entry.commit.as_deref().unwrap(),
1719        )
1720        .unwrap();
1721        fs::write(
1722            cache_dir.join("lib.harn"),
1723            "pub fn value() { return \"pwned\" }\n",
1724        )
1725        .unwrap();
1726
1727        let error = verify_package_cache_in(workspace.env(), false).unwrap_err();
1728        assert!(error.to_string().contains("content hash mismatch"));
1729    }
1730
1731    #[test]
1732    fn package_cache_clean_all_removes_cached_git_entries() {
1733        let (_repo_tmp, repo, _branch) = create_git_package_repo();
1734        let project_tmp = tempfile::tempdir().unwrap();
1735        let root = project_tmp.path();
1736        let workspace = TestWorkspace::new(root);
1737        fs::create_dir_all(root.join(".git")).unwrap();
1738        let git = normalize_git_url(repo.to_string_lossy().as_ref()).unwrap();
1739        fs::write(
1740            root.join(MANIFEST),
1741            format!(
1742                r#"
1743    [package]
1744    name = "workspace"
1745    version = "0.1.0"
1746
1747    [dependencies]
1748    acme-lib = {{ git = "{git}", rev = "v1.0.0" }}
1749    "#
1750            ),
1751        )
1752        .unwrap();
1753
1754        install_packages_in(workspace.env(), false, None, false).unwrap();
1755        assert_eq!(
1756            discover_git_cache_entries_in(workspace.env())
1757                .unwrap()
1758                .len(),
1759            1
1760        );
1761
1762        let removed = clean_package_cache_in(workspace.env(), true).unwrap();
1763        assert_eq!(removed, 1);
1764        assert!(discover_git_cache_entries_in(workspace.env())
1765            .unwrap()
1766            .is_empty());
1767    }
1768
1769    #[test]
1770    fn registry_index_search_and_info_use_local_file_without_network() {
1771        let (_repo_tmp, repo, _branch) = create_git_package_repo();
1772        let project_tmp = tempfile::tempdir().unwrap();
1773        let root = project_tmp.path();
1774        let workspace = TestWorkspace::new(root);
1775        let registry_path = root.join("index.toml");
1776        let git = normalize_git_url(repo.to_string_lossy().as_ref()).unwrap();
1777        write_package_registry_index(&registry_path, "@burin/acme-lib", &git, "acme-lib");
1778        fs::create_dir_all(root.join(".git")).unwrap();
1779        fs::write(
1780            root.join(MANIFEST),
1781            r#"
1782    [package]
1783    name = "workspace"
1784    version = "0.1.0"
1785    "#,
1786        )
1787        .unwrap();
1788
1789        let matches = search_package_registry_in(
1790            workspace.env(),
1791            Some("acme"),
1792            Some(registry_path.to_string_lossy().as_ref()),
1793        )
1794        .unwrap();
1795        assert_eq!(matches.len(), 1);
1796        assert_eq!(matches[0].name, "@burin/acme-lib");
1797        assert_eq!(
1798            matches[0].harn.as_deref(),
1799            Some(crate::package::current_harn_range_example().as_str())
1800        );
1801        assert_eq!(matches[0].connector_contract.as_deref(), Some("v1"));
1802        assert_eq!(matches[0].exports, vec!["lib"]);
1803
1804        let info = package_registry_info_in(
1805            workspace.env(),
1806            "@burin/acme-lib@1.0.0",
1807            Some(registry_path.to_string_lossy().as_ref()),
1808        )
1809        .unwrap();
1810        assert_eq!(info.package.license.as_deref(), Some("MIT OR Apache-2.0"));
1811        assert_eq!(
1812            info.selected_version
1813                .as_ref()
1814                .map(|version| version.git.as_str()),
1815            Some(git.as_str())
1816        );
1817    }
1818
1819    #[test]
1820    fn add_registry_dependency_writes_existing_git_dependency_shape() {
1821        let (_repo_tmp, repo, _branch) = create_git_package_repo();
1822        let project_tmp = tempfile::tempdir().unwrap();
1823        let root = project_tmp.path();
1824        let registry_path = root.join("index.toml");
1825        let workspace =
1826            TestWorkspace::new(root).with_registry_source(registry_path.display().to_string());
1827        let git = normalize_git_url(repo.to_string_lossy().as_ref()).unwrap();
1828        write_package_registry_index(&registry_path, "@burin/acme-lib", &git, "acme-lib");
1829        fs::create_dir_all(root.join(".git")).unwrap();
1830        fs::write(
1831            root.join(MANIFEST),
1832            r#"
1833    [package]
1834    name = "workspace"
1835    version = "0.1.0"
1836    "#,
1837        )
1838        .unwrap();
1839
1840        add_package_to(
1841            workspace.env(),
1842            "@burin/acme-lib@1.0.0",
1843            None,
1844            None,
1845            None,
1846            None,
1847            None,
1848            None,
1849            None,
1850        )
1851        .unwrap();
1852
1853        let manifest = fs::read_to_string(root.join(MANIFEST)).unwrap();
1854        assert!(
1855            manifest.contains(&format!(
1856                "acme-lib = {{ git = \"{git}\", rev = \"v1.0.0\" }}"
1857            )),
1858            "registry install should write the same dependency line as a direct git add: {manifest}"
1859        );
1860        let lock = LockFile::load(&root.join(LOCK_FILE)).unwrap().unwrap();
1861        let entry = lock.find("acme-lib").unwrap();
1862        assert_eq!(entry.source, format!("git+{git}"));
1863        assert!(root
1864            .join(PKG_DIR)
1865            .join("acme-lib")
1866            .join("lib.harn")
1867            .is_file());
1868    }
1869
1870    #[test]
1871    fn registry_index_rejects_invalid_names_and_duplicate_versions() {
1872        let content = r#"
1873    version = 1
1874
1875    [[package]]
1876    name = "@bad/"
1877    repository = "https://github.com/acme/acme-lib"
1878
1879    [[package.version]]
1880    version = "1.0.0"
1881    git = "https://github.com/acme/acme-lib"
1882    rev = "v1.0.0"
1883    "#;
1884        let error = parse_package_registry_index("fixture", content).unwrap_err();
1885        assert!(error.to_string().contains("invalid package name"));
1886
1887        let content = r#"
1888    version = 1
1889
1890    [[package]]
1891    name = "@burin/acme-lib"
1892    repository = "https://github.com/acme/acme-lib"
1893
1894    [[package.version]]
1895    version = "1.0.0"
1896    git = "https://github.com/acme/acme-lib"
1897    rev = "v1.0.0"
1898
1899    [[package.version]]
1900    version = "1.0.0"
1901    git = "https://github.com/acme/acme-lib"
1902    rev = "v1.0.0"
1903    "#;
1904        let error = parse_package_registry_index("fixture", content).unwrap_err();
1905        assert!(error.to_string().contains("more than once"));
1906    }
1907}