1use crate::agent_cx::AgentCx;
10use crate::config::Config;
11use crate::error::{Error, Result};
12use crate::extension_index::ExtensionIndexStore;
13use crate::extensions::{CompatibilityScanner, load_extension_manifest};
14use asupersync::channel::oneshot;
15use serde::{Deserialize, Serialize};
16use serde_json::Value;
17use sha2::{Digest, Sha256};
18use std::ffi::OsStr;
19use std::fmt::Write as _;
20use std::fs;
21use std::io::Write as _;
22use std::path::{Path, PathBuf};
23use std::process::{Command, Stdio};
24use std::thread;
25use tracing::{info, warn};
26
27#[derive(Debug, Clone, Copy, PartialEq, Eq)]
28pub enum PackageScope {
29 User,
30 Project,
31 Temporary,
32}
33
34#[derive(Debug, Clone)]
35pub struct PackageEntry {
36 pub scope: PackageScope,
37 pub source: String,
38 pub filter: Option<PackageFilter>,
39}
40
41#[derive(Debug, Clone, Default)]
46pub struct PackageFilter {
47 pub extensions: Option<Vec<String>>,
48 pub skills: Option<Vec<String>>,
49 pub prompts: Option<Vec<String>>,
50 pub themes: Option<Vec<String>>,
51}
52
53#[derive(Debug, Clone)]
54pub struct PathMetadata {
55 pub source: String,
56 pub scope: PackageScope,
57 pub origin: ResourceOrigin,
58 pub base_dir: Option<PathBuf>,
59}
60
61#[derive(Debug, Clone, Copy, PartialEq, Eq)]
62pub enum ResourceOrigin {
63 Package,
64 TopLevel,
65}
66
67#[derive(Debug, Clone)]
68pub struct ResolvedResource {
69 pub path: PathBuf,
70 pub enabled: bool,
71 pub metadata: PathMetadata,
72}
73
74#[derive(Debug, Clone, Default)]
75pub struct ResolvedPaths {
76 pub extensions: Vec<ResolvedResource>,
77 pub skills: Vec<ResolvedResource>,
78 pub prompts: Vec<ResolvedResource>,
79 pub themes: Vec<ResolvedResource>,
80}
81
82#[derive(Debug, Clone)]
87pub struct ResolveRoots {
88 pub global_settings_path: PathBuf,
89 pub project_settings_path: PathBuf,
90 pub global_base_dir: PathBuf,
91 pub project_base_dir: PathBuf,
92}
93
94impl ResolveRoots {
95 #[must_use]
97 pub fn from_env(cwd: &Path) -> Self {
98 Self {
99 global_settings_path: global_settings_path(),
100 project_settings_path: project_settings_path(cwd),
101 global_base_dir: Config::global_dir(),
102 project_base_dir: cwd.join(Config::project_dir()),
103 }
104 }
105}
106
107#[derive(Debug, Clone)]
108pub struct PackageManager {
109 cwd: PathBuf,
110}
111
112#[derive(Debug, Clone, Copy, PartialEq, Eq)]
113pub enum PackageLockAction {
114 Install,
115 Update,
116}
117
118#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)]
119pub struct PackageLockfile {
120 pub schema: String,
121 #[serde(default)]
122 pub entries: Vec<PackageLockEntry>,
123}
124
125impl Default for PackageLockfile {
126 fn default() -> Self {
127 Self {
128 schema: PACKAGE_LOCK_SCHEMA.to_string(),
129 entries: Vec::new(),
130 }
131 }
132}
133
134#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)]
135pub struct PackageLockEntry {
136 pub identity: String,
137 pub source: String,
138 pub source_kind: PackageSourceKind,
139 pub resolved: PackageResolvedProvenance,
140 pub digest_sha256: String,
141 pub trust_state: PackageEntryTrustState,
142}
143
144#[derive(Debug, Clone, Copy, Serialize, Deserialize, PartialEq, Eq)]
145#[serde(rename_all = "snake_case")]
146pub enum PackageSourceKind {
147 Npm,
148 Git,
149 Local,
150}
151
152#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)]
153#[serde(tag = "kind", rename_all = "snake_case")]
154pub enum PackageResolvedProvenance {
155 Npm {
156 name: String,
157 requested_spec: String,
158 requested_version: Option<String>,
159 installed_version: String,
160 pinned: bool,
161 },
162 Git {
163 repo: String,
164 host: String,
165 path: String,
166 requested_ref: Option<String>,
167 resolved_commit: String,
168 origin_url: Option<String>,
169 pinned: bool,
170 },
171 Local {
172 resolved_path: String,
173 },
174}
175
176#[derive(Debug, Clone, Copy, Serialize, Deserialize, PartialEq, Eq)]
177#[serde(rename_all = "snake_case")]
178pub enum PackageEntryTrustState {
179 Trusted,
180 Rejected,
181}
182
183#[derive(Debug, Clone, Serialize)]
184pub struct PackageTrustAuditEvent {
185 pub schema: &'static str,
186 pub timestamp: String,
187 pub action: String,
188 pub scope: String,
189 pub source: String,
190 pub identity: String,
191 pub from_state: String,
192 pub to_state: String,
193 pub reason_codes: Vec<String>,
194 pub remediation: Option<String>,
195 pub details: serde_json::Value,
196}
197
198#[derive(Debug, Clone)]
199pub struct LockTransitionPlan {
200 pub reason_codes: Vec<String>,
201 pub from_state: String,
202 pub to_state: String,
203}
204
205#[derive(Debug, Clone)]
206pub struct PackageLockMismatch {
207 pub code: &'static str,
208 pub reason: String,
209 pub remediation: String,
210}
211
212pub const PACKAGE_LOCK_SCHEMA: &str = "pi.package_lock.v1";
213pub const PACKAGE_TRUST_AUDIT_SCHEMA: &str = "pi.package_trust_audit.v1";
214
215impl PackageManager {
216 pub const fn new(cwd: PathBuf) -> Self {
217 Self { cwd }
218 }
219
220 pub fn resolve_install_source_alias(&self, source: &str) -> String {
224 let source = source.trim();
225 resolve_install_source_alias(source, &self.cwd).unwrap_or_else(|| source.to_string())
226 }
227
228 pub fn package_identity(&self, source: &str) -> String {
235 match parse_source(source, &self.cwd) {
236 ParsedSource::Npm { name, .. } => format!("npm:{name}"),
237 ParsedSource::Git { repo, .. } => format!("git:{repo}"),
238 ParsedSource::Local { path } => format!("local:{}", path.display()),
239 }
240 }
241
242 pub async fn install(&self, source: &str, scope: PackageScope) -> Result<()> {
243 let this = self.clone();
244 let source = source.to_string();
245 let (tx, rx) = oneshot::channel();
246
247 thread::spawn(move || {
248 let res = this.install_sync(&source, scope);
249 let cx = AgentCx::for_request();
250 let _ = tx.send(cx.cx(), res);
251 });
252
253 let cx = AgentCx::for_request();
254 rx.recv(cx.cx())
255 .await
256 .map_err(|_| Error::tool("package_manager", "Install task cancelled"))?
257 }
258
259 fn install_sync(&self, source: &str, scope: PackageScope) -> Result<()> {
260 let parsed = parse_source(source, &self.cwd);
261 match parsed {
262 ParsedSource::Npm { spec, .. } => self.install_npm(&spec, scope),
263 ParsedSource::Git {
264 repo,
265 host,
266 path,
267 r#ref,
268 ..
269 } => self.install_git(&repo, &host, &path, r#ref.as_deref(), scope),
270 ParsedSource::Local { path } => {
271 if path.exists() {
272 Ok(())
273 } else {
274 Err(Error::config(format!(
275 "Local package path does not exist: {}",
276 path.display()
277 )))
278 }
279 }
280 }?;
281
282 self.verify_and_record_lock(source, scope, PackageLockAction::Install)
283 }
284
285 pub async fn remove(&self, source: &str, scope: PackageScope) -> Result<()> {
286 let this = self.clone();
287 let source = source.to_string();
288 let (tx, rx) = oneshot::channel();
289
290 thread::spawn(move || {
291 let res = this.remove_sync(&source, scope);
292 let cx = AgentCx::for_request();
293 let _ = tx.send(cx.cx(), res);
294 });
295
296 let cx = AgentCx::for_request();
297 rx.recv(cx.cx())
298 .await
299 .map_err(|_| Error::tool("package_manager", "Remove task cancelled"))?
300 }
301
302 fn remove_sync(&self, source: &str, scope: PackageScope) -> Result<()> {
303 let parsed = parse_source(source, &self.cwd);
304 match parsed {
305 ParsedSource::Npm { name, .. } => self.uninstall_npm(&name, scope),
306 ParsedSource::Git { host, path, .. } => self.remove_git(&host, &path, scope),
307 ParsedSource::Local { .. } => Ok(()),
308 }?;
309
310 self.remove_lock_entry(source, scope)
311 }
312
313 pub async fn update_source(&self, source: &str, scope: PackageScope) -> Result<()> {
314 let this = self.clone();
315 let source = source.to_string();
316 let (tx, rx) = oneshot::channel();
317
318 thread::spawn(move || {
319 let res = this.update_source_sync(&source, scope);
320 let cx = AgentCx::for_request();
321 let _ = tx.send(cx.cx(), res);
322 });
323
324 let cx = AgentCx::for_request();
325 rx.recv(cx.cx())
326 .await
327 .map_err(|_| Error::tool("package_manager", "Update task cancelled"))?
328 }
329
330 fn update_source_sync(&self, source: &str, scope: PackageScope) -> Result<()> {
331 let parsed = parse_source(source, &self.cwd);
332 match parsed {
333 ParsedSource::Npm { spec, pinned, .. } => {
334 if !pinned {
335 self.install_npm(&spec, scope)?;
336 }
337 }
338 ParsedSource::Git {
339 repo,
340 host,
341 path,
342 pinned,
343 ..
344 } => {
345 if !pinned {
346 self.update_git(&repo, &host, &path, scope)?;
347 }
348 }
349 ParsedSource::Local { .. } => {}
350 }
351
352 self.verify_and_record_lock(source, scope, PackageLockAction::Update)
353 }
354
355 pub async fn installed_path(
356 &self,
357 source: &str,
358 scope: PackageScope,
359 ) -> Result<Option<PathBuf>> {
360 let this = self.clone();
361 let source = source.to_string();
362 let (tx, rx) = oneshot::channel();
363
364 thread::spawn(move || {
365 let res = this.installed_path_sync(&source, scope);
366 let cx = AgentCx::for_request();
367 let _ = tx.send(cx.cx(), res);
368 });
369
370 let cx = AgentCx::for_request();
371 rx.recv(cx.cx())
372 .await
373 .map_err(|_| Error::tool("package_manager", "Installed path lookup cancelled"))?
374 }
375
376 pub fn installed_path_blocking(
378 &self,
379 source: &str,
380 scope: PackageScope,
381 ) -> Result<Option<PathBuf>> {
382 self.installed_path_sync(source, scope)
383 }
384
385 fn installed_path_sync(&self, source: &str, scope: PackageScope) -> Result<Option<PathBuf>> {
386 let parsed = parse_source(source, &self.cwd);
387 Ok(match parsed {
388 ParsedSource::Npm { name, .. } => self.npm_install_path(&name, scope)?,
389 ParsedSource::Git { host, path, .. } => {
390 Some(self.git_install_path(&host, &path, scope))
391 }
392 ParsedSource::Local { path } => Some(path),
393 })
394 }
395
396 pub async fn list_packages(&self) -> Result<Vec<PackageEntry>> {
397 let this = self.clone();
398 let (tx, rx) = oneshot::channel();
399
400 thread::spawn(move || {
401 let res = this.list_packages_sync();
402 let cx = AgentCx::for_request();
403 let _ = tx.send(cx.cx(), res);
404 });
405
406 let cx = AgentCx::for_request();
407 rx.recv(cx.cx())
408 .await
409 .map_err(|_| Error::tool("package_manager", "List packages task cancelled"))?
410 }
411
412 pub fn list_packages_blocking(&self) -> Result<Vec<PackageEntry>> {
414 self.list_packages_sync()
415 }
416
417 fn list_packages_sync(&self) -> Result<Vec<PackageEntry>> {
418 let global = list_packages_in_settings(&global_settings_path())?
419 .into_iter()
420 .map(|mut p| {
421 p.scope = PackageScope::User;
422 p
423 });
424 let project = list_packages_in_settings(&project_settings_path(&self.cwd))?
425 .into_iter()
426 .map(|mut p| {
427 p.scope = PackageScope::Project;
428 p
429 });
430 Ok(global.chain(project).collect())
431 }
432
433 pub fn resolve_package_resources_blocking(&self) -> Result<Option<ResolvedPaths>> {
440 let roots = ResolveRoots::from_env(&self.cwd);
441 self.resolve_package_resources_with_roots_blocking(&roots)
442 }
443
444 fn resolve_package_resources_with_roots_blocking(
445 &self,
446 roots: &ResolveRoots,
447 ) -> Result<Option<ResolvedPaths>> {
448 let global = read_settings_snapshot(&roots.global_settings_path)?;
449 let project = read_settings_snapshot(&roots.project_settings_path)?;
450
451 let mut all_packages: Vec<ScopedPackage> = Vec::new();
452 all_packages.extend(global.packages.iter().cloned().map(|pkg| ScopedPackage {
453 pkg,
454 scope: PackageScope::User,
455 }));
456 all_packages.extend(project.packages.iter().cloned().map(|pkg| ScopedPackage {
457 pkg,
458 scope: PackageScope::Project,
459 }));
460 let package_sources = self.dedupe_packages(all_packages);
461
462 let mut accumulator = ResourceAccumulator::new();
463
464 for entry in package_sources {
465 let source_str = entry.pkg.source.trim();
466 if source_str.is_empty() {
467 continue;
468 }
469
470 let parsed = parse_source(source_str, &self.cwd);
471 let mut metadata = PathMetadata {
472 source: source_str.to_string(),
473 scope: entry.scope,
474 origin: ResourceOrigin::Package,
475 base_dir: None,
476 };
477
478 match parsed {
479 ParsedSource::Local { path } => {
480 Self::resolve_local_extension_source(
481 &path,
482 &mut accumulator,
483 entry.pkg.filter.as_ref(),
484 &mut metadata,
485 );
486 }
487 ParsedSource::Npm { name, .. } => {
488 let installed_path = self
489 .npm_install_path(&name, entry.scope)?
490 .unwrap_or_else(|| self.cwd.join("node_modules").join(&name));
491
492 if !installed_path.exists() {
493 return Ok(None);
494 }
495
496 metadata.base_dir = Some(installed_path.clone());
497 Self::collect_package_resources(
498 &installed_path,
499 &mut accumulator,
500 entry.pkg.filter.as_ref(),
501 &metadata,
502 );
503 }
504 ParsedSource::Git { host, path, .. } => {
505 let installed_path = self.git_install_path(&host, &path, entry.scope);
506 if !installed_path.exists() {
507 return Ok(None);
508 }
509
510 metadata.base_dir = Some(installed_path.clone());
511 Self::collect_package_resources(
512 &installed_path,
513 &mut accumulator,
514 entry.pkg.filter.as_ref(),
515 &metadata,
516 );
517 }
518 }
519 }
520
521 Ok(Some(accumulator.into_resolved_paths()))
522 }
523
524 pub async fn ensure_packages_installed(&self) -> Result<Vec<PackageEntry>> {
527 let packages = self.list_packages().await?;
533 let mut installed = Vec::new();
534
535 for entry in packages {
536 if let Ok(Some(path)) = self.installed_path(&entry.source, entry.scope).await {
538 if path.exists() {
539 continue;
540 }
541 }
542
543 if self.install(&entry.source, entry.scope).await.is_ok() {
545 installed.push(entry);
546 }
547 }
548
549 Ok(installed)
550 }
551
552 pub async fn resolve(&self) -> Result<ResolvedPaths> {
559 let roots = ResolveRoots::from_env(&self.cwd);
560 self.resolve_with_roots(&roots).await
561 }
562
563 pub async fn resolve_with_roots(&self, roots: &ResolveRoots) -> Result<ResolvedPaths> {
564 let this_for_setup = self.clone();
565 let roots_for_setup = roots.clone();
566 let (tx, rx) = oneshot::channel();
567
568 thread::spawn(move || {
570 let res: Result<(SettingsSnapshot, SettingsSnapshot, Vec<ScopedPackage>)> = (|| {
571 let global = read_settings_snapshot(&roots_for_setup.global_settings_path)?;
572 let project = read_settings_snapshot(&roots_for_setup.project_settings_path)?;
573
574 let mut all_packages: Vec<ScopedPackage> = Vec::new();
576 all_packages.extend(global.packages.iter().cloned().map(|pkg| ScopedPackage {
577 pkg,
578 scope: PackageScope::User,
579 }));
580 all_packages.extend(project.packages.iter().cloned().map(|pkg| ScopedPackage {
581 pkg,
582 scope: PackageScope::Project,
583 }));
584 let package_sources = this_for_setup.dedupe_packages(all_packages);
585 Ok((global, project, package_sources))
586 })(
587 );
588 let cx = AgentCx::for_request();
589 let _ = tx.send(cx.cx(), res);
590 });
591
592 let cx = AgentCx::for_request();
593 let (global, project, package_sources) = rx
594 .recv(cx.cx())
595 .await
596 .map_err(|_| Error::tool("package_manager", "Resolve setup task cancelled"))??;
597
598 let mut accumulator = ResourceAccumulator::new();
599
600 Box::pin(self.resolve_package_sources(&package_sources, &mut accumulator)).await?;
602
603 let this = self.clone();
605 let roots = roots.clone();
606 let (tx, rx) = oneshot::channel();
607 let accumulator = std::sync::Mutex::new(accumulator);
608
609 thread::spawn(move || {
610 let mut accumulator = accumulator.lock().unwrap();
611
612 for resource_type in ResourceType::all() {
614 let target = accumulator.target_mut(resource_type);
615 Self::resolve_local_entries(
616 global.entries_for(resource_type),
617 resource_type,
618 target,
619 &PathMetadata {
620 source: "local".to_string(),
621 scope: PackageScope::User,
622 origin: ResourceOrigin::TopLevel,
623 base_dir: Some(roots.global_base_dir.clone()),
624 },
625 &roots.global_base_dir,
626 );
627
628 Self::resolve_local_entries(
629 project.entries_for(resource_type),
630 resource_type,
631 target,
632 &PathMetadata {
633 source: "local".to_string(),
634 scope: PackageScope::Project,
635 origin: ResourceOrigin::TopLevel,
636 base_dir: Some(roots.project_base_dir.clone()),
637 },
638 &roots.project_base_dir,
639 );
640 }
641
642 this.add_auto_discovered_resources(
644 &mut accumulator,
645 &global,
646 &project,
647 &roots.global_base_dir,
648 &roots.project_base_dir,
649 );
650
651 let resolved = accumulator.clone().into_resolved_paths();
652 drop(accumulator);
653 maybe_emit_compat_ledgers(&resolved.extensions);
654 let cx = AgentCx::for_request();
655 let _ = tx.send(cx.cx(), Ok(resolved));
656 });
657
658 let cx = AgentCx::for_request();
659 rx.recv(cx.cx())
660 .await
661 .map_err(|_| Error::tool("package_manager", "Resolve processing task cancelled"))?
662 }
663
664 pub async fn resolve_extension_sources(
668 &self,
669 sources: &[String],
670 options: ResolveExtensionSourcesOptions,
671 ) -> Result<ResolvedPaths> {
672 let scope = if options.temporary {
673 PackageScope::Temporary
674 } else if options.local {
675 PackageScope::Project
676 } else {
677 PackageScope::User
678 };
679
680 let mut accumulator = ResourceAccumulator::new();
681 let package_sources = sources
682 .iter()
683 .map(|source| ScopedPackage {
684 pkg: PackageSpec {
685 source: source.clone(),
686 filter: None,
687 },
688 scope,
689 })
690 .collect::<Vec<_>>();
691
692 Box::pin(self.resolve_package_sources(&package_sources, &mut accumulator)).await?;
693
694 let (tx, rx) = oneshot::channel();
695 let accumulator = std::sync::Mutex::new(accumulator);
696
697 thread::spawn(move || {
698 let resolved = {
699 let accumulator = accumulator
700 .lock()
701 .unwrap_or_else(std::sync::PoisonError::into_inner);
702 accumulator.clone().into_resolved_paths()
703 };
704 maybe_emit_compat_ledgers(&resolved.extensions);
705 let cx = AgentCx::for_request();
706 let _ = tx.send(cx.cx(), Ok(resolved));
707 });
708
709 let cx = AgentCx::for_request();
710 rx.recv(cx.cx())
711 .await
712 .map_err(|_| Error::tool("package_manager", "Resolve extensions task cancelled"))?
713 }
714
715 pub async fn add_package_source(&self, source: &str, scope: PackageScope) -> Result<()> {
716 let this = self.clone();
717 let source = source.to_string();
718 let (tx, rx) = oneshot::channel();
719
720 thread::spawn(move || {
721 let res = this.add_package_source_sync(&source, scope);
722 let cx = AgentCx::for_request();
723 let _ = tx.send(cx.cx(), res);
724 });
725
726 let cx = AgentCx::for_request();
727 rx.recv(cx.cx())
728 .await
729 .map_err(|_| Error::tool("package_manager", "Add source task cancelled"))?
730 }
731
732 fn add_package_source_sync(&self, source: &str, scope: PackageScope) -> Result<()> {
733 let path = match scope {
734 PackageScope::User => global_settings_path(),
735 PackageScope::Project => project_settings_path(&self.cwd),
736 PackageScope::Temporary => {
737 return Err(Error::config(
738 "Temporary packages cannot be persisted to settings".to_string(),
739 ));
740 }
741 };
742 update_package_sources(&path, source, UpdateAction::Add)
743 }
744
745 pub async fn remove_package_source(&self, source: &str, scope: PackageScope) -> Result<()> {
746 let this = self.clone();
747 let source = source.to_string();
748 let (tx, rx) = oneshot::channel();
749
750 thread::spawn(move || {
751 let res = this.remove_package_source_sync(&source, scope);
752 let cx = AgentCx::for_request();
753 let _ = tx.send(cx.cx(), res);
754 });
755
756 let cx = AgentCx::for_request();
757 rx.recv(cx.cx())
758 .await
759 .map_err(|_| Error::tool("package_manager", "Remove source task cancelled"))?
760 }
761
762 fn remove_package_source_sync(&self, source: &str, scope: PackageScope) -> Result<()> {
763 let path = match scope {
764 PackageScope::User => global_settings_path(),
765 PackageScope::Project => project_settings_path(&self.cwd),
766 PackageScope::Temporary => {
767 return Err(Error::config(
768 "Temporary packages cannot be persisted to settings".to_string(),
769 ));
770 }
771 };
772 update_package_sources(&path, source, UpdateAction::Remove)
773 }
774
775 fn lockfile_path_for_scope(&self, scope: PackageScope) -> Option<PathBuf> {
776 match scope {
777 PackageScope::User => Some(Config::global_dir().join("packages.lock.json")),
778 PackageScope::Project => Some(
779 self.cwd
780 .join(Config::project_dir())
781 .join("packages.lock.json"),
782 ),
783 PackageScope::Temporary => None,
784 }
785 }
786
787 fn trust_audit_path_for_scope(&self, scope: PackageScope) -> Option<PathBuf> {
788 match scope {
789 PackageScope::User => Some(Config::global_dir().join("package-trust-audit.jsonl")),
790 PackageScope::Project => Some(
791 self.cwd
792 .join(Config::project_dir())
793 .join("package-trust-audit.jsonl"),
794 ),
795 PackageScope::Temporary => None,
796 }
797 }
798
799 fn verify_and_record_lock(
800 &self,
801 source: &str,
802 scope: PackageScope,
803 action: PackageLockAction,
804 ) -> Result<()> {
805 let Some(lockfile_path) = self.lockfile_path_for_scope(scope) else {
806 return Ok(());
807 };
808
809 let candidate = self.build_lock_entry(source, scope)?;
810 let mut lockfile = read_package_lockfile(&lockfile_path)?;
811 let existing_idx = lockfile
812 .entries
813 .iter()
814 .position(|entry| entry.identity == candidate.identity);
815 let existing = existing_idx.and_then(|idx| lockfile.entries.get(idx).cloned());
816
817 match evaluate_lock_transition(existing.as_ref(), &candidate, action) {
818 Ok(transition) => {
819 if let Some(idx) = existing_idx {
820 lockfile.entries[idx] = candidate.clone();
821 } else {
822 lockfile.entries.push(candidate.clone());
823 }
824 sort_lock_entries(&mut lockfile.entries);
825 write_package_lockfile_atomic(&lockfile_path, &lockfile)?;
826
827 let event = PackageTrustAuditEvent {
828 schema: PACKAGE_TRUST_AUDIT_SCHEMA,
829 timestamp: chrono::Utc::now()
830 .to_rfc3339_opts(chrono::SecondsFormat::Millis, true),
831 action: match action {
832 PackageLockAction::Install => "install",
833 PackageLockAction::Update => "update",
834 }
835 .to_string(),
836 scope: scope_label(scope).to_string(),
837 source: source.to_string(),
838 identity: candidate.identity.clone(),
839 from_state: transition.from_state,
840 to_state: transition.to_state,
841 reason_codes: transition.reason_codes,
842 remediation: None,
843 details: serde_json::to_value(&candidate)
844 .unwrap_or_else(|_| serde_json::json!({})),
845 };
846 self.append_trust_audit_event(scope, &event)?;
847 Ok(())
848 }
849 Err(mismatch) => {
850 let from_state = existing.as_ref().map_or_else(
851 || "untracked".to_string(),
852 |entry| trust_state_label(entry.trust_state).to_string(),
853 );
854 let event = PackageTrustAuditEvent {
855 schema: PACKAGE_TRUST_AUDIT_SCHEMA,
856 timestamp: chrono::Utc::now()
857 .to_rfc3339_opts(chrono::SecondsFormat::Millis, true),
858 action: match action {
859 PackageLockAction::Install => "install",
860 PackageLockAction::Update => "update",
861 }
862 .to_string(),
863 scope: scope_label(scope).to_string(),
864 source: source.to_string(),
865 identity: candidate.identity.clone(),
866 from_state,
867 to_state: "rejected".to_string(),
868 reason_codes: vec![mismatch.code.to_string()],
869 remediation: Some(mismatch.remediation.clone()),
870 details: serde_json::to_value(&candidate)
871 .unwrap_or_else(|_| serde_json::json!({})),
872 };
873 self.append_trust_audit_event(scope, &event)?;
874
875 Err(verification_error(
876 mismatch.code,
877 &mismatch.reason,
878 &mismatch.remediation,
879 ))
880 }
881 }
882 }
883
884 fn remove_lock_entry(&self, source: &str, scope: PackageScope) -> Result<()> {
885 let Some(lockfile_path) = self.lockfile_path_for_scope(scope) else {
886 return Ok(());
887 };
888
889 let identity = self.package_identity(source);
890 let mut lockfile = read_package_lockfile(&lockfile_path)?;
891 let Some(idx) = lockfile
892 .entries
893 .iter()
894 .position(|entry| entry.identity == identity)
895 else {
896 return Ok(());
897 };
898
899 let removed = lockfile.entries.remove(idx);
900 sort_lock_entries(&mut lockfile.entries);
901 write_package_lockfile_atomic(&lockfile_path, &lockfile)?;
902
903 let event = PackageTrustAuditEvent {
904 schema: PACKAGE_TRUST_AUDIT_SCHEMA,
905 timestamp: chrono::Utc::now().to_rfc3339_opts(chrono::SecondsFormat::Millis, true),
906 action: "remove".to_string(),
907 scope: scope_label(scope).to_string(),
908 source: source.to_string(),
909 identity,
910 from_state: trust_state_label(removed.trust_state).to_string(),
911 to_state: "removed".to_string(),
912 reason_codes: vec!["removed".to_string()],
913 remediation: None,
914 details: serde_json::to_value(&removed).unwrap_or_else(|_| serde_json::json!({})),
915 };
916 self.append_trust_audit_event(scope, &event)?;
917 Ok(())
918 }
919
920 fn append_trust_audit_event(
921 &self,
922 scope: PackageScope,
923 event: &PackageTrustAuditEvent,
924 ) -> Result<()> {
925 let Some(path) = self.trust_audit_path_for_scope(scope) else {
926 return Ok(());
927 };
928 if let Some(parent) = path.parent() {
929 fs::create_dir_all(parent)?;
930 }
931
932 let payload = serde_json::to_string(event)?;
933 let mut file = fs::OpenOptions::new()
934 .create(true)
935 .append(true)
936 .open(path)?;
937 writeln!(file, "{payload}")?;
938 Ok(())
939 }
940
941 #[allow(clippy::too_many_lines)]
942 fn build_lock_entry(&self, source: &str, scope: PackageScope) -> Result<PackageLockEntry> {
943 let parsed = parse_source(source, &self.cwd);
944 match parsed {
945 ParsedSource::Npm { spec, name, pinned } => {
946 let installed_path = self.npm_install_path(&name, scope)?.ok_or_else(|| {
947 Error::tool(
948 "package_manager",
949 "npm lock verification requires a concrete install path",
950 )
951 })?;
952 if !installed_path.exists() {
953 return Err(Error::tool(
954 "package_manager",
955 format!(
956 "Installed npm package path is missing for lock verification: {}",
957 installed_path.display()
958 ),
959 ));
960 }
961
962 let (_, requested_version) = parse_npm_spec(&spec);
963 let installed_version = read_installed_npm_version(&installed_path).ok_or_else(|| {
964 verification_error(
965 "npm_version_missing",
966 &format!(
967 "Missing package.json version for installed npm package at {}",
968 installed_path.display()
969 ),
970 "Reinstall the package (`pi remove <source>` then `pi install <source>`) and retry.",
971 )
972 })?;
973
974 if let Some(expected) = requested_version
975 .as_deref()
976 .filter(|value| is_exact_npm_version(value))
977 {
978 if expected != installed_version {
979 return Err(verification_error(
980 "npm_version_mismatch",
981 &format!(
982 "Pinned npm version mismatch for {name}: expected {expected}, got {installed_version}",
983 ),
984 "Pin the intended version explicitly and reinstall to refresh trusted provenance.",
985 ));
986 }
987 }
988
989 let digest_sha256 = digest_package_path(&installed_path)?;
990 Ok(PackageLockEntry {
991 identity: self.package_identity(source),
992 source: source.to_string(),
993 source_kind: PackageSourceKind::Npm,
994 resolved: PackageResolvedProvenance::Npm {
995 name,
996 requested_spec: spec,
997 requested_version,
998 installed_version,
999 pinned,
1000 },
1001 digest_sha256,
1002 trust_state: PackageEntryTrustState::Trusted,
1003 })
1004 }
1005 ParsedSource::Git {
1006 repo,
1007 host,
1008 path,
1009 r#ref,
1010 pinned,
1011 } => {
1012 let installed_path = self.git_install_path(&host, &path, scope);
1013 if !installed_path.exists() {
1014 return Err(Error::tool(
1015 "package_manager",
1016 format!(
1017 "Installed git package path is missing for lock verification: {}",
1018 installed_path.display()
1019 ),
1020 ));
1021 }
1022
1023 let resolved_commit =
1024 run_command_capture("git", ["rev-parse", "HEAD"], Some(&installed_path))?;
1025 if let Some(expected_ref) = r#ref.as_ref() {
1026 let expected_commit = run_command_capture(
1027 "git",
1028 ["rev-parse", expected_ref.as_str()],
1029 Some(&installed_path),
1030 )?;
1031 if expected_commit != resolved_commit {
1032 return Err(verification_error(
1033 "git_ref_mismatch",
1034 &format!(
1035 "Pinned git ref mismatch for {repo}: ref {expected_ref} resolved to {expected_commit}, but HEAD is {resolved_commit}",
1036 ),
1037 "Fetch/reset the repo and reinstall from the intended pinned ref.",
1038 ));
1039 }
1040 }
1041
1042 let origin_url = run_command_capture(
1043 "git",
1044 ["config", "--get", "remote.origin.url"],
1045 Some(&installed_path),
1046 )
1047 .ok()
1048 .filter(|value| !value.trim().is_empty());
1049 let digest_sha256 = digest_package_path(&installed_path)?;
1050
1051 Ok(PackageLockEntry {
1052 identity: self.package_identity(source),
1053 source: source.to_string(),
1054 source_kind: PackageSourceKind::Git,
1055 resolved: PackageResolvedProvenance::Git {
1056 repo,
1057 host,
1058 path,
1059 requested_ref: r#ref,
1060 resolved_commit,
1061 origin_url,
1062 pinned,
1063 },
1064 digest_sha256,
1065 trust_state: PackageEntryTrustState::Trusted,
1066 })
1067 }
1068 ParsedSource::Local { path } => {
1069 if !path.exists() {
1070 return Err(Error::config(format!(
1071 "Local package path does not exist: {}",
1072 path.display()
1073 )));
1074 }
1075
1076 let digest_sha256 = digest_package_path(&path)?;
1077 let resolved_path = path
1078 .canonicalize()
1079 .unwrap_or(path)
1080 .to_string_lossy()
1081 .to_string();
1082 Ok(PackageLockEntry {
1083 identity: self.package_identity(source),
1084 source: source.to_string(),
1085 source_kind: PackageSourceKind::Local,
1086 resolved: PackageResolvedProvenance::Local { resolved_path },
1087 digest_sha256,
1088 trust_state: PackageEntryTrustState::Trusted,
1089 })
1090 }
1091 }
1092 }
1093
1094 fn project_npm_root(&self) -> PathBuf {
1095 self.cwd.join(Config::project_dir()).join("npm")
1096 }
1097
1098 fn project_git_root(&self) -> PathBuf {
1099 self.cwd.join(Config::project_dir()).join("git")
1100 }
1101
1102 #[allow(clippy::unused_self)]
1103 fn global_git_root(&self) -> PathBuf {
1104 Config::global_dir().join("git")
1105 }
1106
1107 #[allow(clippy::unused_self)]
1108 fn global_npm_root(&self) -> Result<PathBuf> {
1109 let output = Command::new("npm")
1110 .args(["root", "-g"])
1111 .stdin(Stdio::null())
1112 .stdout(Stdio::piped())
1113 .stderr(Stdio::piped())
1114 .output()
1115 .map_err(|e| Error::tool("npm", format!("Failed to spawn npm: {e}")))?;
1116
1117 if !output.status.success() {
1118 let stdout = String::from_utf8_lossy(&output.stdout);
1119 let stderr = String::from_utf8_lossy(&output.stderr);
1120 let mut msg = String::from("npm root -g failed");
1121 if let Some(code) = output.status.code() {
1122 let _ = write!(msg, " (exit {code})");
1123 }
1124 if !stdout.trim().is_empty() {
1125 let _ = write!(msg, "\nstdout:\n{stdout}");
1126 }
1127 if !stderr.trim().is_empty() {
1128 let _ = write!(msg, "\nstderr:\n{stderr}");
1129 }
1130 return Err(Error::tool("npm", msg));
1131 }
1132
1133 let root = String::from_utf8_lossy(&output.stdout).trim().to_string();
1134 if root.is_empty() {
1135 return Err(Error::tool("npm", "npm root -g returned empty output"));
1136 }
1137
1138 Ok(PathBuf::from(root))
1139 }
1140
1141 fn npm_install_path(&self, name: &str, scope: PackageScope) -> Result<Option<PathBuf>> {
1142 Ok(match scope {
1143 PackageScope::Temporary => {
1144 Some(temporary_dir("npm", None).join("node_modules").join(name))
1145 }
1146 PackageScope::Project => Some(self.project_npm_root().join("node_modules").join(name)),
1147 PackageScope::User => Some(self.global_npm_root()?.join(name)),
1148 })
1149 }
1150
1151 fn git_root(&self, scope: PackageScope) -> Option<PathBuf> {
1152 match scope {
1153 PackageScope::Temporary => None,
1154 PackageScope::User => Some(self.global_git_root()),
1155 PackageScope::Project => Some(self.project_git_root()),
1156 }
1157 }
1158
1159 fn git_install_path(&self, host: &str, repo_path: &str, scope: PackageScope) -> PathBuf {
1160 match scope {
1161 PackageScope::Temporary => temporary_dir(&format!("git-{host}"), Some(repo_path)),
1162 PackageScope::User => self.global_git_root().join(host).join(repo_path),
1163 PackageScope::Project => self.project_git_root().join(host).join(repo_path),
1164 }
1165 }
1166
1167 fn install_npm(&self, spec: &str, scope: PackageScope) -> Result<()> {
1168 let (name, _) = parse_npm_spec(spec);
1169 match scope {
1170 PackageScope::User => run_command("npm", ["install", "-g", spec], None)?,
1171 PackageScope::Project | PackageScope::Temporary => {
1172 let install_root = match scope {
1173 PackageScope::Project => self.project_npm_root(),
1174 PackageScope::Temporary => temporary_dir("npm", None),
1175 PackageScope::User => unreachable!("handled above"),
1176 };
1177 ensure_npm_project(&install_root)?;
1178 run_command(
1179 "npm",
1180 [
1181 "install",
1182 "--prefix",
1183 install_root.to_string_lossy().as_ref(),
1184 "--",
1185 spec,
1186 ],
1187 None,
1188 )?;
1189 }
1190 }
1191
1192 if let Some(installed) = self.npm_install_path(&name, scope)? {
1194 if !installed.exists() {
1195 return Err(Error::tool(
1196 "npm",
1197 format!(
1198 "npm install succeeded but '{}' is missing",
1199 installed.display()
1200 ),
1201 ));
1202 }
1203 }
1204
1205 Ok(())
1206 }
1207
1208 fn uninstall_npm(&self, name: &str, scope: PackageScope) -> Result<()> {
1209 if scope == PackageScope::User {
1210 run_command("npm", ["uninstall", "-g", "--", name], None)?;
1211 return Ok(());
1212 }
1213
1214 let install_root = match scope {
1215 PackageScope::Project => self.project_npm_root(),
1216 PackageScope::Temporary => temporary_dir("npm", None),
1217 PackageScope::User => unreachable!("handled above"),
1218 };
1219 if !install_root.exists() {
1220 return Ok(());
1221 }
1222 run_command(
1223 "npm",
1224 [
1225 "uninstall",
1226 "--prefix",
1227 install_root.to_string_lossy().as_ref(),
1228 "--",
1229 name,
1230 ],
1231 None,
1232 )?;
1233 Ok(())
1234 }
1235
1236 fn install_git(
1237 &self,
1238 repo: &str,
1239 host: &str,
1240 repo_path: &str,
1241 r#ref: Option<&str>,
1242 scope: PackageScope,
1243 ) -> Result<()> {
1244 let target_dir = self.git_install_path(host, repo_path, scope);
1245 if target_dir.exists() {
1246 return Ok(());
1247 }
1248
1249 if let Some(root) = self.git_root(scope) {
1250 ensure_git_ignore(&root)?;
1251 }
1252 if let Some(parent) = target_dir.parent() {
1253 fs::create_dir_all(parent)?;
1254 }
1255
1256 let clone_url = if repo.starts_with("http://") || repo.starts_with("https://") {
1257 repo.to_string()
1258 } else if looks_like_local_path(repo) {
1259 repo.to_string()
1261 } else {
1262 format!("https://{repo}")
1263 };
1264
1265 run_command(
1266 "git",
1267 [
1268 "clone",
1269 "--",
1270 &clone_url,
1271 target_dir.to_string_lossy().as_ref(),
1272 ],
1273 None,
1274 )?;
1275
1276 if let Some(r#ref) = r#ref {
1277 run_command("git", ["checkout", r#ref], Some(&target_dir))?;
1278 }
1279
1280 if target_dir.join("package.json").exists() {
1281 run_command("npm", ["install"], Some(&target_dir))?;
1282 }
1283
1284 Ok(())
1285 }
1286
1287 fn update_git(
1288 &self,
1289 repo: &str,
1290 host: &str,
1291 repo_path: &str,
1292 scope: PackageScope,
1293 ) -> Result<()> {
1294 if scope == PackageScope::Temporary {
1295 return Ok(());
1297 }
1298
1299 let target_dir = self.git_install_path(host, repo_path, scope);
1300 if !target_dir.exists() {
1301 return self.install_git(repo, host, repo_path, None, scope);
1302 }
1303
1304 run_command("git", ["fetch", "--prune", "origin"], Some(&target_dir))?;
1305 run_command("git", ["reset", "--hard", "@{upstream}"], Some(&target_dir))?;
1306 run_command("git", ["clean", "-fdx"], Some(&target_dir))?;
1307
1308 if target_dir.join("package.json").exists() {
1309 run_command("npm", ["install"], Some(&target_dir))?;
1310 }
1311
1312 Ok(())
1313 }
1314
1315 fn remove_git(&self, host: &str, repo_path: &str, scope: PackageScope) -> Result<()> {
1316 let target_dir = self.git_install_path(host, repo_path, scope);
1317 if !target_dir.exists() {
1318 return Ok(());
1319 }
1320
1321 fs::remove_dir_all(&target_dir)?;
1322 if let Some(root) = self.git_root(scope) {
1323 prune_empty_git_parents(&target_dir, &root);
1324 }
1325 Ok(())
1326 }
1327}
1328
1329#[derive(Debug, Clone, Default)]
1334pub struct ResolveExtensionSourcesOptions {
1335 pub local: bool,
1336 pub temporary: bool,
1337}
1338
1339#[derive(Debug, Clone)]
1340struct PackageSpec {
1341 source: String,
1342 filter: Option<PackageFilter>,
1343}
1344
1345#[derive(Debug, Clone)]
1346struct SettingsSnapshot {
1347 packages: Vec<PackageSpec>,
1348 extensions: Vec<String>,
1349 skills: Vec<String>,
1350 prompts: Vec<String>,
1351 themes: Vec<String>,
1352}
1353
1354impl SettingsSnapshot {
1355 fn entries_for(&self, resource_type: ResourceType) -> &[String] {
1356 match resource_type {
1357 ResourceType::Extensions => &self.extensions,
1358 ResourceType::Skills => &self.skills,
1359 ResourceType::Prompts => &self.prompts,
1360 ResourceType::Themes => &self.themes,
1361 }
1362 }
1363}
1364
1365fn read_settings_snapshot(path: &Path) -> Result<SettingsSnapshot> {
1366 let value = read_settings_json(path)?;
1367 let packages_value = value
1368 .get("packages")
1369 .and_then(Value::as_array)
1370 .cloned()
1371 .unwrap_or_default();
1372
1373 let mut packages = Vec::new();
1374 for pkg in &packages_value {
1375 if let Some(spec) = extract_package_spec(pkg) {
1376 packages.push(spec);
1377 }
1378 }
1379
1380 Ok(SettingsSnapshot {
1381 packages,
1382 extensions: extract_string_array(value.get("extensions")),
1383 skills: extract_string_array(value.get("skills")),
1384 prompts: extract_string_array(value.get("prompts")),
1385 themes: extract_string_array(value.get("themes")),
1386 })
1387}
1388
1389fn extract_string_array(value: Option<&Value>) -> Vec<String> {
1390 match value {
1391 Some(Value::String(s)) => vec![s.clone()],
1392 Some(Value::Array(arr)) => arr
1393 .iter()
1394 .filter_map(Value::as_str)
1395 .map(str::to_string)
1396 .collect(),
1397 _ => Vec::new(),
1398 }
1399}
1400
1401fn extract_package_spec(value: &Value) -> Option<PackageSpec> {
1402 if let Some(s) = value.as_str() {
1403 return Some(PackageSpec {
1404 source: s.to_string(),
1405 filter: None,
1406 });
1407 }
1408
1409 let obj = value.as_object()?;
1410 let source = obj.get("source")?.as_str()?.to_string();
1411
1412 let filter = PackageFilter {
1413 extensions: extract_filter_field(obj, "extensions"),
1414 skills: extract_filter_field(obj, "skills"),
1415 prompts: extract_filter_field(obj, "prompts"),
1416 themes: extract_filter_field(obj, "themes"),
1417 };
1418
1419 Some(PackageSpec {
1420 source,
1421 filter: Some(filter),
1422 })
1423}
1424
1425fn extract_filter_field(obj: &serde_json::Map<String, Value>, key: &str) -> Option<Vec<String>> {
1426 if !obj.contains_key(key) {
1427 return None;
1428 }
1429
1430 match obj.get(key) {
1431 Some(Value::String(s)) => Some(vec![s.clone()]),
1432 Some(Value::Array(arr)) => Some(
1433 arr.iter()
1434 .filter_map(Value::as_str)
1435 .map(str::to_string)
1436 .collect(),
1437 ),
1438 _ => Some(Vec::new()),
1439 }
1440}
1441
1442#[derive(Debug, Clone)]
1443struct ScopedPackage {
1444 pkg: PackageSpec,
1445 scope: PackageScope,
1446}
1447
1448#[derive(Debug, Clone, Copy, PartialEq, Eq)]
1449enum ResourceType {
1450 Extensions,
1451 Skills,
1452 Prompts,
1453 Themes,
1454}
1455
1456impl ResourceType {
1457 const fn all() -> [Self; 4] {
1458 [Self::Extensions, Self::Skills, Self::Prompts, Self::Themes]
1459 }
1460
1461 const fn as_str(self) -> &'static str {
1462 match self {
1463 Self::Extensions => "extensions",
1464 Self::Skills => "skills",
1465 Self::Prompts => "prompts",
1466 Self::Themes => "themes",
1467 }
1468 }
1469}
1470
1471#[derive(Debug, Default, Clone)]
1472struct ResourceAccumulator {
1473 extensions: ResourceList,
1474 skills: ResourceList,
1475 prompts: ResourceList,
1476 themes: ResourceList,
1477}
1478
1479impl ResourceAccumulator {
1480 fn new() -> Self {
1481 Self::default()
1482 }
1483
1484 #[allow(clippy::missing_const_for_fn)] fn target_mut(&mut self, resource_type: ResourceType) -> &mut ResourceList {
1486 match resource_type {
1487 ResourceType::Extensions => &mut self.extensions,
1488 ResourceType::Skills => &mut self.skills,
1489 ResourceType::Prompts => &mut self.prompts,
1490 ResourceType::Themes => &mut self.themes,
1491 }
1492 }
1493
1494 fn into_resolved_paths(mut self) -> ResolvedPaths {
1495 for items in [
1496 &mut self.extensions.items,
1497 &mut self.skills.items,
1498 &mut self.prompts.items,
1499 &mut self.themes.items,
1500 ] {
1501 items.sort_by(|a, b| a.path.to_string_lossy().cmp(&b.path.to_string_lossy()));
1502 }
1503
1504 ResolvedPaths {
1505 extensions: self.extensions.items,
1506 skills: self.skills.items,
1507 prompts: self.prompts.items,
1508 themes: self.themes.items,
1509 }
1510 }
1511}
1512
1513#[derive(Debug, Default, Clone)]
1514struct ResourceList {
1515 seen: std::collections::HashSet<String>,
1516 items: Vec<ResolvedResource>,
1517}
1518
1519impl ResourceList {
1520 fn add(&mut self, path: PathBuf, metadata: &PathMetadata, enabled: bool) {
1521 let key = path.to_string_lossy().to_string();
1522 if !self.seen.insert(key) {
1523 return;
1524 }
1525 self.items.push(ResolvedResource {
1526 path,
1527 enabled,
1528 metadata: metadata.clone(),
1529 });
1530 }
1531}
1532
1533impl PackageManager {
1534 fn dedupe_packages(&self, packages: Vec<ScopedPackage>) -> Vec<ScopedPackage> {
1535 let mut seen: std::collections::HashMap<String, usize> = std::collections::HashMap::new();
1536 let mut out: Vec<ScopedPackage> = Vec::new();
1537
1538 for entry in packages {
1539 let identity = self.package_identity(&entry.pkg.source);
1540 if let Some(&idx) = seen.get(&identity) {
1541 let existing_scope = out[idx].scope;
1542 if entry.scope == PackageScope::Project && existing_scope == PackageScope::User {
1543 out[idx] = entry;
1544 }
1545 continue;
1546 }
1547
1548 seen.insert(identity, out.len());
1549 out.push(entry);
1550 }
1551
1552 out
1553 }
1554
1555 async fn resolve_package_sources(
1556 &self,
1557 sources: &[ScopedPackage],
1558 accumulator: &mut ResourceAccumulator,
1559 ) -> Result<()> {
1560 for entry in sources {
1561 let source_str = entry.pkg.source.trim();
1562 if source_str.is_empty() {
1563 continue;
1564 }
1565
1566 let parsed = parse_source(source_str, &self.cwd);
1567 let mut metadata = PathMetadata {
1568 source: source_str.to_string(),
1569 scope: entry.scope,
1570 origin: ResourceOrigin::Package,
1571 base_dir: None,
1572 };
1573
1574 match parsed {
1575 ParsedSource::Local { path } => {
1576 Self::resolve_local_extension_source(
1577 &path,
1578 accumulator,
1579 entry.pkg.filter.as_ref(),
1580 &mut metadata,
1581 );
1582 }
1583 ParsedSource::Npm { spec, name, pinned } => {
1584 let installed_path = self
1586 .installed_path(&format!("npm:{name}"), entry.scope)
1587 .await?
1588 .unwrap_or_else(|| self.cwd.join("node_modules").join(&name));
1589
1590 let needs_install = !installed_path.exists()
1591 || Box::pin(self.npm_needs_update(&spec, pinned, &installed_path)).await;
1592 if needs_install {
1593 self.install(source_str, entry.scope).await?;
1594 }
1595
1596 metadata.base_dir = Some(installed_path.clone());
1597 Self::collect_package_resources(
1598 &installed_path,
1599 accumulator,
1600 entry.pkg.filter.as_ref(),
1601 &metadata,
1602 );
1603 }
1604 ParsedSource::Git {
1605 repo: _,
1606 host,
1607 path,
1608 r#ref: _,
1609 ..
1610 } => {
1611 let installed_path = self.git_install_path(&host, &path, entry.scope);
1613
1614 if !installed_path.exists() {
1615 self.install(source_str, entry.scope).await?;
1616 }
1617
1618 metadata.base_dir = Some(installed_path.clone());
1619 Self::collect_package_resources(
1620 &installed_path,
1621 accumulator,
1622 entry.pkg.filter.as_ref(),
1623 &metadata,
1624 );
1625 }
1626 }
1627 }
1628
1629 Ok(())
1630 }
1631
1632 async fn npm_needs_update(&self, spec: &str, pinned: bool, installed_path: &Path) -> bool {
1633 let installed_version = read_installed_npm_version(installed_path);
1634 let Some(installed_version) = installed_version else {
1635 return true;
1636 };
1637
1638 let (_, pinned_version) = parse_npm_spec(spec);
1639 if pinned {
1640 return pinned_version.is_some_and(|pv| pv != installed_version);
1641 }
1642
1643 Box::pin(get_latest_npm_version(installed_path, spec))
1644 .await
1645 .is_ok_and(|latest| latest != installed_version)
1646 }
1647
1648 fn resolve_local_extension_source(
1649 resolved: &Path,
1650 accumulator: &mut ResourceAccumulator,
1651 filter: Option<&PackageFilter>,
1652 metadata: &mut PathMetadata,
1653 ) {
1654 if !resolved.exists() {
1655 return;
1656 }
1657
1658 let Ok(stats) = fs::metadata(resolved) else {
1659 return;
1660 };
1661
1662 if stats.is_file() {
1663 if !is_supported_extension_file(resolved) {
1664 warn!(
1665 path = %resolved.display(),
1666 "Ignoring unsupported extension source file; use extension.json, JS/TS entrypoints, *.native.json, or *.wasm"
1667 );
1668 return;
1669 }
1670 metadata.base_dir = resolved.parent().map(Path::to_path_buf);
1671 accumulator
1672 .extensions
1673 .add(resolved.to_path_buf(), metadata, true);
1674 return;
1675 }
1676
1677 if !stats.is_dir() {
1678 return;
1679 }
1680
1681 metadata.base_dir = Some(resolved.to_path_buf());
1682 let had_any = Self::collect_package_resources(resolved, accumulator, filter, metadata);
1683 if !had_any {
1684 accumulator
1685 .extensions
1686 .add(resolved.to_path_buf(), metadata, true);
1687 }
1688 }
1689
1690 fn resolve_local_entries(
1691 entries: &[String],
1692 resource_type: ResourceType,
1693 target: &mut ResourceList,
1694 metadata: &PathMetadata,
1695 base_dir: &Path,
1696 ) {
1697 if entries.is_empty() {
1698 return;
1699 }
1700
1701 let (plain, patterns) = split_patterns(entries);
1702 let resolved_plain = plain
1703 .iter()
1704 .map(|p| resolve_path_from_base(p, base_dir))
1705 .collect::<Vec<_>>();
1706 let all_files = collect_files_from_paths(&resolved_plain, resource_type);
1707 let enabled_paths = apply_patterns(&all_files, &patterns, base_dir);
1708
1709 for f in all_files {
1710 let enabled = enabled_paths.contains(&f);
1711 target.add(f, metadata, enabled);
1712 }
1713 }
1714
1715 #[allow(clippy::unused_self)]
1716 fn add_auto_discovered_resources(
1717 &self,
1718 accumulator: &mut ResourceAccumulator,
1719 global: &SettingsSnapshot,
1720 project: &SettingsSnapshot,
1721 global_base_dir: &Path,
1722 project_base_dir: &Path,
1723 ) {
1724 let user_metadata = PathMetadata {
1725 source: "auto".to_string(),
1726 scope: PackageScope::User,
1727 origin: ResourceOrigin::TopLevel,
1728 base_dir: Some(global_base_dir.to_path_buf()),
1729 };
1730 let project_metadata = PathMetadata {
1731 source: "auto".to_string(),
1732 scope: PackageScope::Project,
1733 origin: ResourceOrigin::TopLevel,
1734 base_dir: Some(project_base_dir.to_path_buf()),
1735 };
1736
1737 let user_dirs = AutoDirs::new(global_base_dir);
1738 let project_dirs = AutoDirs::new(project_base_dir);
1739
1740 for resource_type in ResourceType::all() {
1741 let target = accumulator.target_mut(resource_type);
1742 let (user_paths, user_overrides) = match resource_type {
1743 ResourceType::Extensions => (
1744 collect_auto_extension_entries(&user_dirs.extensions),
1745 &global.extensions,
1746 ),
1747 ResourceType::Skills => (
1748 collect_auto_skill_entries(&user_dirs.skills),
1749 &global.skills,
1750 ),
1751 ResourceType::Prompts => (
1752 collect_auto_prompt_entries(&user_dirs.prompts),
1753 &global.prompts,
1754 ),
1755 ResourceType::Themes => (
1756 collect_auto_theme_entries(&user_dirs.themes),
1757 &global.themes,
1758 ),
1759 };
1760 for path in user_paths {
1761 let enabled = is_enabled_by_overrides(&path, user_overrides, global_base_dir);
1762 target.add(path, &user_metadata, enabled);
1763 }
1764
1765 let (project_paths, project_overrides) = match resource_type {
1766 ResourceType::Extensions => (
1767 collect_auto_extension_entries(&project_dirs.extensions),
1768 &project.extensions,
1769 ),
1770 ResourceType::Skills => (
1771 collect_auto_skill_entries(&project_dirs.skills),
1772 &project.skills,
1773 ),
1774 ResourceType::Prompts => (
1775 collect_auto_prompt_entries(&project_dirs.prompts),
1776 &project.prompts,
1777 ),
1778 ResourceType::Themes => (
1779 collect_auto_theme_entries(&project_dirs.themes),
1780 &project.themes,
1781 ),
1782 };
1783 for path in project_paths {
1784 let enabled = is_enabled_by_overrides(&path, project_overrides, project_base_dir);
1785 target.add(path, &project_metadata, enabled);
1786 }
1787 }
1788 }
1789
1790 fn collect_package_resources(
1791 package_root: &Path,
1792 accumulator: &mut ResourceAccumulator,
1793 filter: Option<&PackageFilter>,
1794 metadata: &PathMetadata,
1795 ) -> bool {
1796 if let Some(filter) = filter {
1797 for resource_type in ResourceType::all() {
1798 let target = accumulator.target_mut(resource_type);
1799 let patterns = match resource_type {
1800 ResourceType::Extensions => filter.extensions.as_ref(),
1801 ResourceType::Skills => filter.skills.as_ref(),
1802 ResourceType::Prompts => filter.prompts.as_ref(),
1803 ResourceType::Themes => filter.themes.as_ref(),
1804 };
1805
1806 if let Some(patterns) = patterns {
1807 Self::apply_package_filter(
1808 package_root,
1809 patterns,
1810 resource_type,
1811 target,
1812 metadata,
1813 );
1814 } else {
1815 Self::collect_default_resources(package_root, resource_type, target, metadata);
1816 }
1817 }
1818 return true;
1819 }
1820
1821 if let Some(manifest) = read_pi_manifest(package_root) {
1822 for resource_type in ResourceType::all() {
1823 let entries = manifest.entries_for(resource_type);
1824 Self::add_manifest_entries(
1825 entries.as_deref(),
1826 package_root,
1827 resource_type,
1828 accumulator.target_mut(resource_type),
1829 metadata,
1830 );
1831 }
1832 return true;
1833 }
1834
1835 let mut has_any_dir = false;
1836 for resource_type in ResourceType::all() {
1837 let dir = package_root.join(resource_type.as_str());
1838 if dir.exists() {
1839 let files = collect_resource_files(&dir, resource_type);
1840 let target = accumulator.target_mut(resource_type);
1841 for f in files {
1842 target.add(f, metadata, true);
1843 }
1844 has_any_dir = true;
1845 }
1846 }
1847
1848 has_any_dir
1849 }
1850
1851 fn collect_default_resources(
1852 package_root: &Path,
1853 resource_type: ResourceType,
1854 target: &mut ResourceList,
1855 metadata: &PathMetadata,
1856 ) {
1857 if let Some(manifest) = read_pi_manifest(package_root) {
1858 let entries = manifest.entries_for(resource_type);
1859 if entries.as_ref().is_some_and(|e| !e.is_empty()) {
1860 Self::add_manifest_entries(
1861 entries.as_deref(),
1862 package_root,
1863 resource_type,
1864 target,
1865 metadata,
1866 );
1867 return;
1868 }
1869 }
1870
1871 let dir = package_root.join(resource_type.as_str());
1872 if dir.exists() {
1873 let files = collect_resource_files(&dir, resource_type);
1874 for f in files {
1875 target.add(f, metadata, true);
1876 }
1877 }
1878 }
1879
1880 fn apply_package_filter(
1881 package_root: &Path,
1882 user_patterns: &[String],
1883 resource_type: ResourceType,
1884 target: &mut ResourceList,
1885 metadata: &PathMetadata,
1886 ) {
1887 let (all_files, _) = Self::collect_manifest_files(package_root, resource_type);
1888
1889 if user_patterns.is_empty() {
1890 for f in all_files {
1891 target.add(f, metadata, false);
1892 }
1893 return;
1894 }
1895
1896 let enabled_by_user = apply_patterns(&all_files, user_patterns, package_root);
1897 for f in all_files {
1898 let enabled = enabled_by_user.contains(&f);
1899 target.add(f, metadata, enabled);
1900 }
1901 }
1902
1903 fn collect_manifest_files(
1904 package_root: &Path,
1905 resource_type: ResourceType,
1906 ) -> (Vec<PathBuf>, std::collections::HashSet<PathBuf>) {
1907 if let Some(manifest) = read_pi_manifest(package_root) {
1908 let entries = manifest.entries_for(resource_type);
1909 if let Some(entries) = entries {
1910 if !entries.is_empty() {
1911 let all_files =
1912 collect_files_from_manifest_entries(&entries, package_root, resource_type);
1913 let patterns = entries
1914 .iter()
1915 .filter(|e| is_pattern(e))
1916 .cloned()
1917 .collect::<Vec<_>>();
1918 let enabled_by_manifest = if patterns.is_empty() {
1919 all_files
1920 .iter()
1921 .cloned()
1922 .collect::<std::collections::HashSet<_>>()
1923 } else {
1924 apply_patterns(&all_files, &patterns, package_root)
1925 };
1926 let mut enabled_vec = enabled_by_manifest.iter().cloned().collect::<Vec<_>>();
1927 enabled_vec.sort_by(|a, b| a.to_string_lossy().cmp(&b.to_string_lossy()));
1928 return (enabled_vec, enabled_by_manifest);
1929 }
1930 }
1931 }
1932
1933 let convention_dir = package_root.join(resource_type.as_str());
1934 if !convention_dir.exists() {
1935 return (Vec::new(), std::collections::HashSet::new());
1936 }
1937 let all_files = collect_resource_files(&convention_dir, resource_type);
1938 let set = all_files.iter().cloned().collect();
1939 (all_files, set)
1940 }
1941
1942 fn add_manifest_entries(
1943 entries: Option<&[String]>,
1944 root: &Path,
1945 resource_type: ResourceType,
1946 target: &mut ResourceList,
1947 metadata: &PathMetadata,
1948 ) {
1949 let Some(entries) = entries else {
1950 return;
1951 };
1952 if entries.is_empty() {
1953 return;
1954 }
1955
1956 let all_files = collect_files_from_manifest_entries(entries, root, resource_type);
1957 let patterns = entries
1958 .iter()
1959 .filter(|e| is_pattern(e))
1960 .cloned()
1961 .collect::<Vec<_>>();
1962 let enabled_paths = apply_patterns(&all_files, &patterns, root);
1963
1964 for f in all_files {
1965 if enabled_paths.contains(&f) {
1966 target.add(f, metadata, true);
1967 }
1968 }
1969 }
1970}
1971
1972#[derive(Debug, Default)]
1973struct AutoDirs {
1974 extensions: PathBuf,
1975 skills: PathBuf,
1976 prompts: PathBuf,
1977 themes: PathBuf,
1978}
1979
1980impl AutoDirs {
1981 fn new(base_dir: &Path) -> Self {
1982 Self {
1983 extensions: base_dir.join("extensions"),
1984 skills: base_dir.join("skills"),
1985 prompts: base_dir.join("prompts"),
1986 themes: base_dir.join("themes"),
1987 }
1988 }
1989}
1990
1991#[derive(Debug, Clone, Default)]
1992struct PiManifest {
1993 extensions: Option<Vec<String>>,
1994 skills: Option<Vec<String>>,
1995 prompts: Option<Vec<String>>,
1996 themes: Option<Vec<String>>,
1997}
1998
1999impl PiManifest {
2000 fn entries_for(&self, resource_type: ResourceType) -> Option<Vec<String>> {
2001 match resource_type {
2002 ResourceType::Extensions => self.extensions.clone(),
2003 ResourceType::Skills => self.skills.clone(),
2004 ResourceType::Prompts => self.prompts.clone(),
2005 ResourceType::Themes => self.themes.clone(),
2006 }
2007 }
2008}
2009
2010fn read_pi_manifest(package_root: &Path) -> Option<PiManifest> {
2011 let package_json = package_root.join("package.json");
2012 if !package_json.exists() {
2013 return None;
2014 }
2015 let raw = fs::read_to_string(package_json).ok()?;
2016 let json: Value = serde_json::from_str(&raw).ok()?;
2017 let pi = json.get("pi")?;
2018 let obj = pi.as_object()?;
2019
2020 Some(PiManifest {
2021 extensions: obj.get("extensions").and_then(Value::as_array).map(|arr| {
2022 arr.iter()
2023 .filter_map(Value::as_str)
2024 .map(str::to_string)
2025 .collect()
2026 }),
2027 skills: obj.get("skills").and_then(Value::as_array).map(|arr| {
2028 arr.iter()
2029 .filter_map(Value::as_str)
2030 .map(str::to_string)
2031 .collect()
2032 }),
2033 prompts: obj.get("prompts").and_then(Value::as_array).map(|arr| {
2034 arr.iter()
2035 .filter_map(Value::as_str)
2036 .map(str::to_string)
2037 .collect()
2038 }),
2039 themes: obj.get("themes").and_then(Value::as_array).map(|arr| {
2040 arr.iter()
2041 .filter_map(Value::as_str)
2042 .map(str::to_string)
2043 .collect()
2044 }),
2045 })
2046}
2047
2048fn temporary_dir(prefix: &str, suffix: Option<&str>) -> PathBuf {
2049 let mut hasher = Sha256::new();
2050 hasher.update(format!("{prefix}-{}", suffix.unwrap_or("")));
2051 let digest = hasher.finalize();
2052 let short = hex_encode(&digest)[..8].to_string();
2053
2054 let mut dir = std::env::temp_dir()
2055 .join("pi-extensions")
2056 .join(prefix)
2057 .join(short);
2058 if let Some(suffix) = suffix {
2059 dir = dir.join(suffix);
2060 }
2061 dir
2062}
2063
2064fn hex_encode(bytes: &[u8]) -> String {
2065 const LUT: &[u8; 16] = b"0123456789abcdef";
2066 let mut out = String::with_capacity(bytes.len().saturating_mul(2));
2067 for &b in bytes {
2068 out.push(LUT[(b >> 4) as usize] as char);
2069 out.push(LUT[(b & 0x0f) as usize] as char);
2070 }
2071 out
2072}
2073
2074fn resolve_path_from_base(input: &str, base_dir: &Path) -> PathBuf {
2075 let trimmed = input.trim();
2076 if trimmed == "~" {
2077 return dirs::home_dir().unwrap_or_else(|| base_dir.to_path_buf());
2078 }
2079 if let Some(rest) = trimmed.strip_prefix("~/") {
2080 return dirs::home_dir()
2081 .unwrap_or_else(|| base_dir.to_path_buf())
2082 .join(rest);
2083 }
2084 if trimmed.starts_with('~') {
2085 return dirs::home_dir()
2086 .unwrap_or_else(|| base_dir.to_path_buf())
2087 .join(trimmed.trim_start_matches('~'));
2088 }
2089
2090 let p = Path::new(trimmed);
2091 if p.is_absolute() {
2092 return p.to_path_buf();
2093 }
2094 base_dir.join(p)
2095}
2096
2097fn is_pattern(s: &str) -> bool {
2098 s.starts_with('!')
2099 || s.starts_with('+')
2100 || s.starts_with('-')
2101 || s.contains('*')
2102 || s.contains('?')
2103}
2104
2105fn split_patterns(entries: &[String]) -> (Vec<String>, Vec<String>) {
2106 let mut plain = Vec::new();
2107 let mut patterns = Vec::new();
2108 for entry in entries {
2109 if is_pattern(entry) {
2110 patterns.push(entry.clone());
2111 } else {
2112 plain.push(entry.clone());
2113 }
2114 }
2115 (plain, patterns)
2116}
2117
2118fn posix_string(path: &Path) -> String {
2119 path.to_string_lossy().replace('\\', "/")
2120}
2121
2122fn relative_posix(base: &Path, path: &Path) -> String {
2123 let base_components = base.components().collect::<Vec<_>>();
2124 let path_components = path.components().collect::<Vec<_>>();
2125
2126 let mut i = 0usize;
2127 while i < base_components.len()
2128 && i < path_components.len()
2129 && base_components[i] == path_components[i]
2130 {
2131 i += 1;
2132 }
2133
2134 if i == 0 {
2135 return posix_string(path);
2136 }
2137
2138 let mut rel = PathBuf::new();
2139 for _ in i..base_components.len() {
2140 rel.push("..");
2141 }
2142 for comp in path_components.iter().skip(i) {
2143 rel.push(comp.as_os_str());
2144 }
2145 posix_string(&rel)
2146}
2147
2148fn normalize_exact_pattern(pattern: &str) -> &str {
2149 pattern
2150 .strip_prefix("./")
2151 .or_else(|| pattern.strip_prefix(".\\"))
2152 .unwrap_or(pattern)
2153}
2154
2155fn pattern_matches(pattern: &str, candidate: &str) -> bool {
2156 let normalized_pattern = pattern.replace('\\', "/");
2157 let candidate = candidate.replace('\\', "/");
2158 glob::Pattern::new(&normalized_pattern)
2159 .ok()
2160 .is_some_and(|p| p.matches(&candidate))
2161}
2162
2163fn matches_any_pattern(file_path: &Path, patterns: &[String], base_dir: &Path) -> bool {
2164 let rel = relative_posix(base_dir, file_path);
2165 let name = file_path.file_name().and_then(|n| n.to_str()).unwrap_or("");
2166 let file_str = posix_string(file_path);
2167
2168 let is_skill_file = name == "SKILL.md";
2169 let parent_dir = is_skill_file.then(|| file_path.parent().unwrap_or_else(|| Path::new(".")));
2170 let parent_dir_str = parent_dir.map(posix_string);
2171 let parent_rel = parent_dir.map(|p| relative_posix(base_dir, p));
2172 let parent_name = parent_dir
2173 .and_then(|p| p.file_name())
2174 .and_then(|n| n.to_str());
2175
2176 for pattern in patterns {
2177 if pattern_matches(pattern, &rel)
2178 || pattern_matches(pattern, name)
2179 || pattern_matches(pattern, &file_str)
2180 {
2181 return true;
2182 }
2183 if !is_skill_file {
2184 continue;
2185 }
2186 if parent_rel
2187 .as_ref()
2188 .is_some_and(|s| pattern_matches(pattern, s))
2189 {
2190 return true;
2191 }
2192 if parent_name.is_some_and(|s| pattern_matches(pattern, s)) {
2193 return true;
2194 }
2195 if parent_dir_str
2196 .as_ref()
2197 .is_some_and(|s| pattern_matches(pattern, s))
2198 {
2199 return true;
2200 }
2201 }
2202 false
2203}
2204
2205fn matches_any_exact_pattern(file_path: &Path, patterns: &[String], base_dir: &Path) -> bool {
2206 if patterns.is_empty() {
2207 return false;
2208 }
2209
2210 let rel = relative_posix(base_dir, file_path);
2211 let file_str = posix_string(file_path);
2212
2213 let name = file_path.file_name().and_then(|n| n.to_str()).unwrap_or("");
2214 let is_skill_file = name == "SKILL.md";
2215 let parent_dir = is_skill_file.then(|| file_path.parent().unwrap_or_else(|| Path::new(".")));
2216 let parent_dir_str = parent_dir.map(posix_string);
2217 let parent_rel = parent_dir.map(|p| relative_posix(base_dir, p));
2218
2219 patterns.iter().any(|pattern| {
2220 let normalized = normalize_exact_pattern(pattern);
2221 if normalized == rel || normalized == file_str {
2222 return true;
2223 }
2224 if !is_skill_file {
2225 return false;
2226 }
2227 parent_rel.as_ref().is_some_and(|p| normalized == p)
2228 || parent_dir_str.as_ref().is_some_and(|p| normalized == p)
2229 })
2230}
2231
2232fn get_override_patterns(entries: &[String]) -> Vec<String> {
2233 entries
2234 .iter()
2235 .filter(|p| p.starts_with('!') || p.starts_with('+') || p.starts_with('-'))
2236 .cloned()
2237 .collect()
2238}
2239
2240fn is_enabled_by_overrides(path: &Path, patterns: &[String], base_dir: &Path) -> bool {
2241 let overrides = get_override_patterns(patterns);
2242 let excludes = overrides
2243 .iter()
2244 .filter_map(|p| p.strip_prefix('!').map(str::to_string))
2245 .collect::<Vec<_>>();
2246 let force_includes = overrides
2247 .iter()
2248 .filter_map(|p| p.strip_prefix('+').map(str::to_string))
2249 .collect::<Vec<_>>();
2250 let force_excludes = overrides
2251 .iter()
2252 .filter_map(|p| p.strip_prefix('-').map(str::to_string))
2253 .collect::<Vec<_>>();
2254
2255 if !force_excludes.is_empty() && matches_any_exact_pattern(path, &force_excludes, base_dir) {
2257 false
2258 } else if !force_includes.is_empty()
2259 && matches_any_exact_pattern(path, &force_includes, base_dir)
2260 {
2261 true
2262 } else {
2263 excludes.is_empty() || !matches_any_pattern(path, &excludes, base_dir)
2264 }
2265}
2266
2267fn apply_patterns(
2268 all_paths: &[PathBuf],
2269 patterns: &[String],
2270 base_dir: &Path,
2271) -> std::collections::HashSet<PathBuf> {
2272 let mut includes = Vec::new();
2273 let mut excludes = Vec::new();
2274 let mut force_includes = Vec::new();
2275 let mut force_excludes = Vec::new();
2276
2277 for p in patterns {
2278 if let Some(rest) = p.strip_prefix('+') {
2279 force_includes.push(rest.to_string());
2280 } else if let Some(rest) = p.strip_prefix('-') {
2281 force_excludes.push(rest.to_string());
2282 } else if let Some(rest) = p.strip_prefix('!') {
2283 excludes.push(rest.to_string());
2284 } else {
2285 includes.push(p.clone());
2286 }
2287 }
2288
2289 let mut result: Vec<PathBuf> = if includes.is_empty() {
2290 all_paths.to_vec()
2291 } else {
2292 all_paths
2293 .iter()
2294 .filter(|p| matches_any_pattern(p, &includes, base_dir))
2295 .cloned()
2296 .collect()
2297 };
2298
2299 if !excludes.is_empty() {
2300 result.retain(|p| !matches_any_pattern(p, &excludes, base_dir));
2301 }
2302
2303 if !force_includes.is_empty() {
2304 for p in all_paths {
2305 if !result.contains(p) && matches_any_exact_pattern(p, &force_includes, base_dir) {
2306 result.push(p.clone());
2307 }
2308 }
2309 }
2310
2311 if !force_excludes.is_empty() {
2312 result.retain(|p| !matches_any_exact_pattern(p, &force_excludes, base_dir));
2313 }
2314
2315 result.into_iter().collect()
2316}
2317
2318fn collect_resource_files(dir: &Path, resource_type: ResourceType) -> Vec<PathBuf> {
2319 match resource_type {
2320 ResourceType::Skills => collect_skill_entries(dir),
2321 ResourceType::Extensions => collect_auto_extension_entries(dir),
2322 ResourceType::Prompts => collect_files_recursive(dir, "md"),
2323 ResourceType::Themes => collect_files_recursive(dir, "json"),
2324 }
2325}
2326
2327fn collect_files_from_paths(paths: &[PathBuf], resource_type: ResourceType) -> Vec<PathBuf> {
2328 let mut out = Vec::new();
2329 for p in paths {
2330 if !p.exists() {
2331 continue;
2332 }
2333 let Ok(stats) = fs::metadata(p) else {
2334 continue;
2335 };
2336 if stats.is_file() {
2337 if resource_type == ResourceType::Extensions && !is_supported_extension_file(p) {
2338 warn!(
2339 path = %p.display(),
2340 "Ignoring unsupported extension file entry; use extension.json, JS/TS entrypoints, *.native.json, or *.wasm"
2341 );
2342 continue;
2343 }
2344 out.push(p.clone());
2345 } else if stats.is_dir() {
2346 out.extend(collect_resource_files(p, resource_type));
2347 }
2348 }
2349 out
2350}
2351
2352fn collect_files_from_manifest_entries(
2353 entries: &[String],
2354 root: &Path,
2355 resource_type: ResourceType,
2356) -> Vec<PathBuf> {
2357 let plain = entries
2358 .iter()
2359 .filter(|e| !is_pattern(e))
2360 .cloned()
2361 .collect::<Vec<_>>();
2362 let resolved = plain
2363 .iter()
2364 .map(|entry| {
2365 let p = Path::new(entry);
2366 if p.is_absolute() {
2367 p.to_path_buf()
2368 } else {
2369 root.join(entry)
2370 }
2371 })
2372 .collect::<Vec<_>>();
2373
2374 collect_files_from_paths(&resolved, resource_type)
2375}
2376
2377fn collect_files_recursive(dir: &Path, ext: &str) -> Vec<PathBuf> {
2378 if !dir.exists() {
2379 return Vec::new();
2380 }
2381
2382 let mut builder = ignore::WalkBuilder::new(dir);
2383 builder
2384 .hidden(true)
2385 .follow_links(true)
2386 .git_global(false)
2387 .git_exclude(false)
2388 .add_custom_ignore_filename(".fdignore")
2389 .filter_entry(|e| e.file_name() != std::ffi::OsStr::new("node_modules"));
2390
2391 let mut out = Vec::new();
2392 for entry in builder.build().filter_map(std::result::Result::ok) {
2393 let path = entry.path();
2394 if path.is_file()
2395 && path
2396 .extension()
2397 .and_then(|e| e.to_str())
2398 .is_some_and(|e| e.eq_ignore_ascii_case(ext))
2399 {
2400 out.push(path.to_path_buf());
2401 }
2402 }
2403 out
2404}
2405
2406fn collect_skill_entries(dir: &Path) -> Vec<PathBuf> {
2407 if !dir.exists() {
2408 return Vec::new();
2409 }
2410
2411 let mut builder = ignore::WalkBuilder::new(dir);
2412 builder
2413 .hidden(true)
2414 .follow_links(true)
2415 .git_global(false)
2416 .git_exclude(false)
2417 .add_custom_ignore_filename(".fdignore")
2418 .filter_entry(|e| e.file_name() != std::ffi::OsStr::new("node_modules"));
2419
2420 let mut out = Vec::new();
2421 for entry in builder.build().filter_map(std::result::Result::ok) {
2422 let path = entry.path();
2423 if !path.is_file() {
2424 continue;
2425 }
2426 let rel = path.strip_prefix(dir).unwrap_or(path);
2427 let depth = rel.components().count();
2428 let name = path.file_name().and_then(|n| n.to_str()).unwrap_or("");
2429
2430 if depth == 1 {
2431 if path.extension().and_then(|e| e.to_str()) == Some("md") {
2432 out.push(path.to_path_buf());
2433 }
2434 } else if name == "SKILL.md" {
2435 out.push(path.to_path_buf());
2436 }
2437 }
2438 out
2439}
2440
2441fn collect_auto_skill_entries(dir: &Path) -> Vec<PathBuf> {
2442 collect_skill_entries(dir)
2443}
2444
2445fn collect_auto_prompt_entries(dir: &Path) -> Vec<PathBuf> {
2446 let mut out = Vec::new();
2447 if !dir.exists() {
2448 return out;
2449 }
2450 let Ok(entries) = fs::read_dir(dir) else {
2451 return out;
2452 };
2453 for entry in entries.flatten() {
2454 let path = entry.path();
2455 let name = entry.file_name();
2456 let name = name.to_string_lossy();
2457 if name.starts_with('.') || name == "node_modules" {
2458 continue;
2459 }
2460 let Ok(stats) = fs::metadata(&path) else {
2461 continue;
2462 };
2463 if stats.is_file() && path.extension().and_then(|e| e.to_str()) == Some("md") {
2464 out.push(path);
2465 }
2466 }
2467 out.sort();
2468 out
2469}
2470
2471fn collect_auto_theme_entries(dir: &Path) -> Vec<PathBuf> {
2472 let mut out = Vec::new();
2473 if !dir.exists() {
2474 return out;
2475 }
2476 let Ok(entries) = fs::read_dir(dir) else {
2477 return out;
2478 };
2479 for entry in entries.flatten() {
2480 let path = entry.path();
2481 let name = entry.file_name();
2482 let name = name.to_string_lossy();
2483 if name.starts_with('.') || name == "node_modules" {
2484 continue;
2485 }
2486 let Ok(stats) = fs::metadata(&path) else {
2487 continue;
2488 };
2489 if stats.is_file() && path.extension().and_then(|e| e.to_str()) == Some("json") {
2490 out.push(path);
2491 }
2492 }
2493 out.sort();
2494 out
2495}
2496
2497fn is_supported_extension_file(path: &Path) -> bool {
2498 let Some(name) = path.file_name().and_then(|name| name.to_str()) else {
2499 return false;
2500 };
2501
2502 if name.eq_ignore_ascii_case("extension.json") || name.ends_with(".native.json") {
2503 return true;
2504 }
2505
2506 let Some(ext) = path.extension().and_then(|ext| ext.to_str()) else {
2507 return false;
2508 };
2509
2510 if ext.eq_ignore_ascii_case("wasm") {
2511 return true;
2512 }
2513
2514 ["ts", "tsx", "js", "mjs", "cjs", "mts", "cts"]
2515 .iter()
2516 .any(|candidate| ext.eq_ignore_ascii_case(candidate))
2517}
2518
2519fn resolve_extension_entries(dir: &Path) -> Option<Vec<PathBuf>> {
2520 match load_extension_manifest(dir) {
2521 Ok(Some(_)) => {
2522 return Some(vec![dir.to_path_buf()]);
2523 }
2524 Ok(None) => {}
2525 Err(err) => {
2526 warn!(path = %dir.display(), "Invalid extension manifest: {err}");
2527 }
2528 }
2529
2530 let package_json_path = dir.join("package.json");
2531 if package_json_path.exists() {
2532 let manifest = read_pi_manifest(dir);
2533 if let Some(manifest) = manifest {
2534 if let Some(exts) = manifest.extensions {
2535 let mut entries = Vec::new();
2536 for ext_path in exts {
2537 let resolved = dir.join(ext_path);
2538 if !resolved.exists() {
2539 continue;
2540 }
2541 if resolved.is_file() && !is_supported_extension_file(&resolved) {
2542 warn!(
2543 path = %resolved.display(),
2544 "Ignoring unsupported package.json#pi.extensions entry; use extension.json, JS/TS entrypoints, *.native.json, or *.wasm"
2545 );
2546 continue;
2547 }
2548 entries.push(resolved);
2549 }
2550 if !entries.is_empty() {
2551 return Some(entries);
2552 }
2553 }
2554 }
2555 }
2556
2557 let index_native = dir.join("index.native.json");
2558 if index_native.exists() {
2559 return Some(vec![index_native]);
2560 }
2561
2562 for index_name in [
2563 "index.ts",
2564 "index.tsx",
2565 "index.js",
2566 "index.mjs",
2567 "index.cjs",
2568 "index.mts",
2569 "index.cts",
2570 ] {
2571 let candidate = dir.join(index_name);
2572 if candidate.exists() {
2573 return Some(vec![candidate]);
2574 }
2575 }
2576
2577 None
2578}
2579
2580fn collect_auto_extension_entries(dir: &Path) -> Vec<PathBuf> {
2581 if !dir.exists() {
2582 return Vec::new();
2583 }
2584
2585 let mut out = Vec::new();
2586 if let Some(entries) = resolve_extension_entries(dir) {
2587 out.extend(entries);
2588 }
2589
2590 let mut builder = ignore::WalkBuilder::new(dir);
2591 builder
2592 .hidden(true)
2593 .follow_links(true)
2594 .max_depth(Some(1))
2595 .git_ignore(false)
2596 .git_global(false)
2597 .git_exclude(false)
2598 .add_custom_ignore_filename(".fdignore")
2599 .filter_entry(|e| e.file_name() != std::ffi::OsStr::new("node_modules"));
2600
2601 for entry in builder.build().skip(1).filter_map(std::result::Result::ok) {
2602 let path = entry.path().to_path_buf();
2603 let Ok(stats) = fs::metadata(&path) else {
2604 continue;
2605 };
2606 if stats.is_file() {
2607 if is_supported_extension_file(&path) {
2608 out.push(path);
2609 }
2610 continue;
2611 }
2612 if stats.is_dir() {
2613 if let Some(entries) = resolve_extension_entries(&path) {
2614 out.extend(entries);
2615 }
2616 }
2617 }
2618 out.sort();
2619 out.dedup();
2620 out
2621}
2622
2623fn read_installed_npm_version(installed_path: &Path) -> Option<String> {
2624 let package_json = installed_path.join("package.json");
2625 let raw = fs::read_to_string(package_json).ok()?;
2626 let json: Value = serde_json::from_str(&raw).ok()?;
2627 json.get("version")
2628 .and_then(Value::as_str)
2629 .map(str::to_string)
2630}
2631
2632async fn get_latest_npm_version(installed_path: &Path, spec: &str) -> Result<String> {
2633 let (name, _) = parse_npm_spec(spec);
2634 let url = format!("https://registry.npmjs.org/{name}/latest");
2635 let client = crate::http::client::Client::new();
2636 let response = Box::pin(client.get(&url).send()).await.map_err(|e| {
2637 Error::tool(
2638 "npm",
2639 format!(
2640 "Failed to fetch npm registry for {}: {e}",
2641 installed_path.display()
2642 ),
2643 )
2644 })?;
2645
2646 let status = response.status();
2647 let body = response.text().await.map_err(|e| {
2648 Error::tool(
2649 "npm",
2650 format!(
2651 "Failed to read npm registry response for {}: {e}",
2652 installed_path.display()
2653 ),
2654 )
2655 })?;
2656
2657 if !(200..300).contains(&status) {
2658 return Err(Error::tool(
2659 "npm",
2660 format!("npm registry error (HTTP {status}): {body}"),
2661 ));
2662 }
2663
2664 let data: Value = serde_json::from_str(&body).map_err(|e| {
2665 Error::tool(
2666 "npm",
2667 format!(
2668 "Failed to parse npm registry response for {}: {e}",
2669 installed_path.display()
2670 ),
2671 )
2672 })?;
2673 data.get("version")
2674 .and_then(Value::as_str)
2675 .map(str::to_string)
2676 .ok_or_else(|| Error::tool("npm", "Registry response missing version"))
2677}
2678
2679#[derive(Debug, Clone)]
2680enum ParsedSource {
2681 Npm {
2682 spec: String,
2683 name: String,
2684 pinned: bool,
2685 },
2686 Git {
2687 repo: String,
2688 host: String,
2689 path: String,
2690 r#ref: Option<String>,
2691 pinned: bool,
2692 },
2693 Local {
2694 path: PathBuf,
2695 },
2696}
2697
2698fn parse_source(source: &str, cwd: &Path) -> ParsedSource {
2699 let source = source.trim();
2700 if let Some(rest) = source.strip_prefix("npm:") {
2701 let spec = rest.trim().to_string();
2702 let (name, version) = parse_npm_spec(&spec);
2703 return ParsedSource::Npm {
2704 spec,
2705 name,
2706 pinned: version.is_some(),
2707 };
2708 }
2709
2710 if let Some(rest) = source.strip_prefix("git:") {
2711 return parse_git_source(rest.trim(), cwd);
2712 }
2713
2714 if looks_like_git_url(source) || source.starts_with("https://") || source.starts_with("http://")
2715 {
2716 return parse_git_source(source, cwd);
2717 }
2718
2719 if let Some(resolved) = resolve_install_source_alias(source, cwd) {
2720 return parse_source(&resolved, cwd);
2721 }
2722
2723 ParsedSource::Local {
2724 path: resolve_local_path(source, cwd),
2725 }
2726}
2727
2728fn resolve_install_source_alias(source: &str, cwd: &Path) -> Option<String> {
2729 if source.is_empty() || looks_like_local_path(source) {
2730 return None;
2731 }
2732
2733 if resolve_local_path(source, cwd).exists() {
2735 return None;
2736 }
2737
2738 match ExtensionIndexStore::default_store().resolve_install_source(source) {
2739 Ok(Some(resolved)) if resolved != source => Some(resolved),
2740 Ok(_) => None,
2741 Err(err) => {
2742 tracing::debug!(
2743 "failed to resolve install source alias via extension index (using source as-is): {err}"
2744 );
2745 None
2746 }
2747 }
2748}
2749
2750fn parse_git_source(spec: &str, cwd: &Path) -> ParsedSource {
2751 let mut parts = spec.split('@');
2752 let repo_raw = parts.next().unwrap_or("").trim();
2753 let r#ref = parts
2754 .next()
2755 .map(|s| s.trim().to_string())
2756 .filter(|s| !s.is_empty());
2757 let pinned = r#ref.is_some();
2758
2759 let (repo, host, path) = if looks_like_local_path(repo_raw) {
2760 let repo_path = local_path_from_spec(repo_raw, cwd);
2761
2762 let mut hasher = Sha256::new();
2765 hasher.update(repo_path.to_string_lossy().as_bytes());
2766 let digest = hasher.finalize();
2767 let key = hex_encode(&digest)[..16].to_string();
2768
2769 (
2770 repo_path.to_string_lossy().to_string(),
2771 "local".to_string(),
2772 key,
2773 )
2774 } else {
2775 let normalized = repo_raw
2776 .trim_start_matches("https://")
2777 .trim_start_matches("http://")
2778 .trim_end_matches(".git")
2779 .to_string();
2780
2781 let segments = normalized
2782 .split('/')
2783 .filter(|s| !s.is_empty() && *s != "." && *s != "..")
2784 .collect::<Vec<_>>();
2785
2786 let host = segments.first().copied().unwrap_or("").to_string();
2787 let path = if segments.len() >= 2 {
2788 segments[1..].join("/")
2789 } else {
2790 String::new()
2791 };
2792
2793 (normalized, host, path)
2794 };
2795
2796 ParsedSource::Git {
2797 repo,
2798 host,
2799 path,
2800 r#ref,
2801 pinned,
2802 }
2803}
2804
2805fn looks_like_git_url(source: &str) -> bool {
2806 const HOSTS: [&str; 4] = ["github.com", "gitlab.com", "bitbucket.org", "codeberg.org"];
2807 let normalized = source
2808 .trim_start_matches("https://")
2809 .trim_start_matches("http://");
2810 HOSTS
2811 .iter()
2812 .any(|host| normalized.starts_with(&format!("{host}/")))
2813}
2814
2815fn looks_like_local_path(spec: &str) -> bool {
2816 let spec = spec.trim();
2817 spec == "."
2818 || spec == ".."
2819 || spec.starts_with("file://")
2820 || spec.starts_with('/')
2821 || spec.starts_with(".\\")
2822 || spec.starts_with("..\\")
2823 || spec.starts_with("./")
2824 || spec.starts_with("../")
2825 || spec.starts_with('~')
2826}
2827
2828fn local_path_from_spec(spec: &str, cwd: &Path) -> PathBuf {
2829 let spec = spec.trim();
2832 if let Some(rest) = spec.strip_prefix("file://") {
2833 return resolve_local_path(rest, cwd);
2835 }
2836 resolve_local_path(spec, cwd)
2837}
2838
2839fn resolve_local_path(input: &str, cwd: &Path) -> PathBuf {
2840 let trimmed = input.trim();
2841 if trimmed == "~" {
2842 return normalize_dot_segments(&dirs::home_dir().unwrap_or_else(|| cwd.to_path_buf()));
2843 }
2844 if let Some(rest) = trimmed.strip_prefix("~/") {
2845 return normalize_dot_segments(
2846 &dirs::home_dir()
2847 .unwrap_or_else(|| cwd.to_path_buf())
2848 .join(rest),
2849 );
2850 }
2851 if trimmed.starts_with('~') {
2852 return normalize_dot_segments(
2853 &dirs::home_dir()
2854 .unwrap_or_else(|| cwd.to_path_buf())
2855 .join(trimmed.trim_start_matches('~')),
2856 );
2857 }
2858 normalize_dot_segments(&cwd.join(trimmed))
2859}
2860
2861fn normalize_dot_segments(path: &Path) -> PathBuf {
2862 use std::ffi::{OsStr, OsString};
2863 use std::path::Component;
2864
2865 let mut out = PathBuf::new();
2866 let mut normals: Vec<OsString> = Vec::new();
2867 let mut has_prefix = false;
2868 let mut has_root = false;
2869
2870 for component in path.components() {
2871 match component {
2872 Component::Prefix(prefix) => {
2873 out.push(prefix.as_os_str());
2874 has_prefix = true;
2875 }
2876 Component::RootDir => {
2877 out.push(component.as_os_str());
2878 has_root = true;
2879 }
2880 Component::CurDir => {}
2881 Component::ParentDir => match normals.last() {
2882 Some(last) if last.as_os_str() != OsStr::new("..") => {
2883 normals.pop();
2884 }
2885 _ => {
2886 if !has_root && !has_prefix {
2887 normals.push(OsString::from(".."));
2888 }
2889 }
2890 },
2891 Component::Normal(part) => normals.push(part.to_os_string()),
2892 }
2893 }
2894
2895 for part in normals {
2896 out.push(part);
2897 }
2898
2899 out
2900}
2901
2902fn parse_npm_spec(spec: &str) -> (String, Option<String>) {
2903 let spec = spec.trim();
2904 if spec.is_empty() {
2905 return (String::new(), None);
2906 }
2907
2908 let at_pos = spec
2909 .strip_prefix('@')
2910 .map_or_else(|| spec.find('@'), |rest| rest.rfind('@').map(|idx| idx + 1));
2911
2912 match at_pos {
2913 Some(pos) if pos + 1 < spec.len() => {
2914 (spec[..pos].to_string(), Some(spec[pos + 1..].to_string()))
2915 }
2916 _ => (spec.to_string(), None),
2917 }
2918}
2919
2920fn ensure_npm_project(root: &Path) -> Result<()> {
2921 fs::create_dir_all(root)?;
2922 ensure_git_ignore(root)?;
2923 let package_json = root.join("package.json");
2924 if !package_json.exists() {
2925 let value = serde_json::json!({ "name": "pi-packages", "private": true });
2926 fs::write(&package_json, serde_json::to_string_pretty(&value)?)?;
2927 }
2928 Ok(())
2929}
2930
2931fn ensure_git_ignore(dir: &Path) -> Result<()> {
2932 fs::create_dir_all(dir)?;
2933 let ignore_path = dir.join(".gitignore");
2934 if !ignore_path.exists() {
2935 fs::write(ignore_path, "*\n!.gitignore\n")?;
2936 }
2937 Ok(())
2938}
2939
2940fn prune_empty_git_parents(target_dir: &Path, root: &Path) {
2941 let Ok(root) = root.canonicalize() else {
2942 return;
2943 };
2944 let mut current = target_dir.parent().map(PathBuf::from);
2945
2946 while let Some(dir) = current {
2947 let Ok(canon) = dir.canonicalize() else { break };
2948 if canon == root || !canon.starts_with(&root) {
2949 break;
2950 }
2951 let Ok(entries) = fs::read_dir(&dir) else {
2952 break;
2953 };
2954 if entries.into_iter().next().is_some() {
2955 break;
2956 }
2957 let _ = fs::remove_dir(&dir);
2958 current = dir.parent().map(PathBuf::from);
2959 }
2960}
2961
2962fn run_command<I, S>(program: &str, args: I, cwd: Option<&Path>) -> Result<()>
2963where
2964 I: IntoIterator<Item = S>,
2965 S: AsRef<OsStr>,
2966{
2967 let mut cmd = Command::new(program);
2968 cmd.args(args)
2969 .stdin(Stdio::null())
2970 .stdout(Stdio::piped())
2971 .stderr(Stdio::piped());
2972 if let Some(cwd) = cwd {
2973 cmd.current_dir(cwd);
2974 }
2975
2976 let output = cmd
2977 .output()
2978 .map_err(|e| Error::tool(program, format!("Failed to spawn {program}: {e}")))?;
2979
2980 if !output.status.success() {
2981 let stdout = String::from_utf8_lossy(&output.stdout);
2982 let stderr = String::from_utf8_lossy(&output.stderr);
2983 let mut msg = format!("Command failed: {program}");
2984 if let Some(code) = output.status.code() {
2985 let _ = write!(msg, " (exit {code})");
2986 }
2987 if !stdout.trim().is_empty() {
2988 let _ = write!(msg, "\nstdout:\n{stdout}");
2989 }
2990 if !stderr.trim().is_empty() {
2991 let _ = write!(msg, "\nstderr:\n{stderr}");
2992 }
2993 return Err(Error::tool(program, msg));
2994 }
2995
2996 Ok(())
2997}
2998
2999fn run_command_capture<I, S>(program: &str, args: I, cwd: Option<&Path>) -> Result<String>
3000where
3001 I: IntoIterator<Item = S>,
3002 S: AsRef<OsStr>,
3003{
3004 let mut cmd = Command::new(program);
3005 cmd.args(args)
3006 .stdin(Stdio::null())
3007 .stdout(Stdio::piped())
3008 .stderr(Stdio::piped());
3009 if let Some(cwd) = cwd {
3010 cmd.current_dir(cwd);
3011 }
3012
3013 let output = cmd
3014 .output()
3015 .map_err(|e| Error::tool(program, format!("Failed to spawn {program}: {e}")))?;
3016
3017 if !output.status.success() {
3018 let stdout = String::from_utf8_lossy(&output.stdout);
3019 let stderr = String::from_utf8_lossy(&output.stderr);
3020 let mut msg = format!("Command failed: {program}");
3021 if let Some(code) = output.status.code() {
3022 let _ = write!(msg, " (exit {code})");
3023 }
3024 if !stdout.trim().is_empty() {
3025 let _ = write!(msg, "\nstdout:\n{stdout}");
3026 }
3027 if !stderr.trim().is_empty() {
3028 let _ = write!(msg, "\nstderr:\n{stderr}");
3029 }
3030 return Err(Error::tool(program, msg));
3031 }
3032
3033 Ok(String::from_utf8_lossy(&output.stdout).trim().to_string())
3034}
3035
3036const fn scope_label(scope: PackageScope) -> &'static str {
3037 match scope {
3038 PackageScope::User => "user",
3039 PackageScope::Project => "project",
3040 PackageScope::Temporary => "temporary",
3041 }
3042}
3043
3044const fn trust_state_label(state: PackageEntryTrustState) -> &'static str {
3045 match state {
3046 PackageEntryTrustState::Trusted => "trusted",
3047 PackageEntryTrustState::Rejected => "rejected",
3048 }
3049}
3050
3051fn verification_error(code: &str, reason: &str, remediation: &str) -> Error {
3052 Error::tool(
3053 "package_manager",
3054 format!(
3055 "Package lock/provenance verification failed [{code}]: {reason}\nRemediation: {remediation}"
3056 ),
3057 )
3058}
3059
3060pub fn evaluate_lock_transition(
3061 existing: Option<&PackageLockEntry>,
3062 candidate: &PackageLockEntry,
3063 action: PackageLockAction,
3064) -> std::result::Result<LockTransitionPlan, PackageLockMismatch> {
3065 let Some(existing) = existing else {
3066 return Ok(LockTransitionPlan {
3067 reason_codes: vec!["first_seen".to_string()],
3068 from_state: "untracked".to_string(),
3069 to_state: "trusted".to_string(),
3070 });
3071 };
3072
3073 let allow_mutation = allow_lock_entry_update(candidate, action);
3074
3075 if (existing.source_kind != candidate.source_kind || existing.source != candidate.source)
3076 && !allow_mutation
3077 {
3078 return Err(PackageLockMismatch {
3079 code: "provenance_mismatch",
3080 reason: format!(
3081 "source identity changed for {}: previous='{}' ({:?}), current='{}' ({:?})",
3082 candidate.identity,
3083 existing.source,
3084 existing.source_kind,
3085 candidate.source,
3086 candidate.source_kind
3087 ),
3088 remediation: format!(
3089 "Review the source change, then run `pi remove {}` and `pi install {}` to re-establish trust.",
3090 candidate.source, candidate.source
3091 ),
3092 });
3093 }
3094
3095 if existing.resolved != candidate.resolved && !allow_mutation {
3096 return Err(PackageLockMismatch {
3097 code: "provenance_mismatch",
3098 reason: format!(
3099 "resolved provenance changed for {} while source is immutable in this operation",
3100 candidate.identity
3101 ),
3102 remediation: format!(
3103 "Use `pi update {}` for unpinned sources, or reinstall after intentional provenance changes.",
3104 candidate.source
3105 ),
3106 });
3107 }
3108
3109 if existing.digest_sha256 != candidate.digest_sha256 && !allow_mutation {
3110 return Err(PackageLockMismatch {
3111 code: "digest_mismatch",
3112 reason: format!(
3113 "digest changed for {}: expected {}, got {}",
3114 candidate.identity, existing.digest_sha256, candidate.digest_sha256
3115 ),
3116 remediation: format!(
3117 "Inspect upstream changes. If expected, run `pi remove {}` then `pi install {}` to trust the new digest.",
3118 candidate.source, candidate.source
3119 ),
3120 });
3121 }
3122
3123 let mut reason_codes = Vec::new();
3124 if existing.resolved != candidate.resolved {
3125 reason_codes.push("provenance_changed".to_string());
3126 }
3127 if existing.digest_sha256 != candidate.digest_sha256 {
3128 reason_codes.push("digest_changed".to_string());
3129 }
3130 if reason_codes.is_empty() {
3131 reason_codes.push("verified".to_string());
3132 }
3133
3134 Ok(LockTransitionPlan {
3135 reason_codes,
3136 from_state: trust_state_label(existing.trust_state).to_string(),
3137 to_state: "trusted".to_string(),
3138 })
3139}
3140
3141const fn allow_lock_entry_update(candidate: &PackageLockEntry, action: PackageLockAction) -> bool {
3142 match action {
3143 PackageLockAction::Install => false,
3144 PackageLockAction::Update => match candidate.resolved {
3145 PackageResolvedProvenance::Npm { pinned, .. }
3146 | PackageResolvedProvenance::Git { pinned, .. } => !pinned,
3147 PackageResolvedProvenance::Local { .. } => false,
3148 },
3149 }
3150}
3151
3152pub fn sort_lock_entries(entries: &mut [PackageLockEntry]) {
3153 entries.sort_by(|left, right| {
3154 left.identity
3155 .cmp(&right.identity)
3156 .then_with(|| left.source.cmp(&right.source))
3157 });
3158}
3159
3160pub fn read_package_lockfile(path: &Path) -> Result<PackageLockfile> {
3161 if !path.exists() {
3162 return Ok(PackageLockfile::default());
3163 }
3164
3165 let content = fs::read_to_string(path)?;
3166 let mut lockfile: PackageLockfile = serde_json::from_str(&content).map_err(|err| {
3167 Error::config(format!(
3168 "Invalid package lockfile JSON in {}: {err}",
3169 path.display()
3170 ))
3171 })?;
3172 if lockfile.schema.trim().is_empty() {
3173 lockfile.schema = PACKAGE_LOCK_SCHEMA.to_string();
3174 }
3175 sort_lock_entries(&mut lockfile.entries);
3176 Ok(lockfile)
3177}
3178
3179pub fn write_package_lockfile_atomic(path: &Path, lockfile: &PackageLockfile) -> Result<()> {
3180 let value = serde_json::to_value(lockfile)?;
3181 write_settings_json_atomic(path, &value)
3182}
3183
3184fn is_exact_npm_version(value: &str) -> bool {
3185 !value.is_empty()
3186 && !value.contains(|ch: char| {
3187 matches!(
3188 ch,
3189 '^' | '~' | '>' | '<' | '=' | '*' | 'x' | 'X' | '|' | ' ' | '\t'
3190 )
3191 })
3192}
3193
3194pub fn digest_package_path(path: &Path) -> Result<String> {
3195 if path.is_file() {
3196 let mut hasher = Sha256::new();
3197 hasher.update(b"file\0");
3198 let file_name = path
3199 .file_name()
3200 .and_then(|name| name.to_str())
3201 .unwrap_or("entry");
3202 hasher.update(file_name.as_bytes());
3203 hasher.update(b"\0");
3204 let bytes = fs::read(path)?
3205 .into_iter()
3206 .filter(|byte| *byte != b'\r')
3207 .collect::<Vec<_>>();
3208 hasher.update(&bytes);
3209 hasher.update(b"\0");
3210 return Ok(hex_encode(hasher.finalize().as_slice()));
3211 }
3212
3213 if !path.is_dir() {
3214 return Err(Error::tool(
3215 "package_manager",
3216 format!(
3217 "Cannot compute digest for non-file/non-directory path: {}",
3218 path.display()
3219 ),
3220 ));
3221 }
3222
3223 let mut files = Vec::new();
3224 collect_digest_files_recursive(path, path, &mut files)?;
3225 files.sort_by_key(|(_, relative)| relative.clone());
3226
3227 let mut hasher = Sha256::new();
3228 for (full_path, relative) in files {
3229 hasher.update(b"file\0");
3230 hasher.update(relative.as_bytes());
3231 hasher.update(b"\0");
3232 let bytes = fs::read(full_path)?
3233 .into_iter()
3234 .filter(|byte| *byte != b'\r')
3235 .collect::<Vec<_>>();
3236 hasher.update(&bytes);
3237 hasher.update(b"\0");
3238 }
3239
3240 Ok(hex_encode(hasher.finalize().as_slice()))
3241}
3242
3243fn collect_digest_files_recursive(
3244 root: &Path,
3245 dir: &Path,
3246 out: &mut Vec<(PathBuf, String)>,
3247) -> Result<()> {
3248 for entry in fs::read_dir(dir)? {
3249 let entry = entry?;
3250 let path = entry.path();
3251 let file_type = entry.file_type()?;
3252 let name = entry.file_name();
3253 if name == OsStr::new(".git") {
3254 continue;
3255 }
3256
3257 if file_type.is_dir() {
3258 collect_digest_files_recursive(root, &path, out)?;
3259 continue;
3260 }
3261
3262 if !file_type.is_file() {
3263 continue;
3264 }
3265
3266 out.push((path.clone(), relative_posix(root, &path)));
3267 }
3268 Ok(())
3269}
3270
3271fn global_settings_path() -> PathBuf {
3272 if let Ok(path) = std::env::var("PI_CONFIG_PATH") {
3273 return PathBuf::from(path);
3274 }
3275 Config::global_dir().join("settings.json")
3276}
3277
3278fn project_settings_path(cwd: &Path) -> PathBuf {
3279 cwd.join(Config::project_dir()).join("settings.json")
3280}
3281
3282#[derive(Debug, Clone, Copy)]
3283enum UpdateAction {
3284 Add,
3285 Remove,
3286}
3287
3288fn list_packages_in_settings(path: &Path) -> Result<Vec<PackageEntry>> {
3289 let value = read_settings_json(path)?;
3290 let packages = value
3291 .get("packages")
3292 .and_then(Value::as_array)
3293 .cloned()
3294 .unwrap_or_default();
3295
3296 let mut out = Vec::new();
3297 for pkg in packages {
3298 if let Some(spec) = extract_package_spec(&pkg) {
3299 out.push(PackageEntry {
3300 scope: PackageScope::User, source: spec.source,
3302 filter: spec.filter,
3303 });
3304 }
3305 }
3306 Ok(out)
3307}
3308
3309fn update_package_sources(path: &Path, source: &str, action: UpdateAction) -> Result<()> {
3310 let mut root = read_settings_json(path)?;
3311 if !root.is_object() {
3312 root = serde_json::json!({});
3313 }
3314
3315 let packages_value = root.get_mut("packages");
3316 let packages = match packages_value {
3317 Some(Value::Array(arr)) => arr,
3318 Some(_) => {
3319 *packages_value.unwrap() = Value::Array(Vec::new());
3320 root.get_mut("packages")
3321 .and_then(Value::as_array_mut)
3322 .unwrap()
3323 }
3324 None => {
3325 root["packages"] = Value::Array(Vec::new());
3326 root.get_mut("packages")
3327 .and_then(Value::as_array_mut)
3328 .unwrap()
3329 }
3330 };
3331
3332 match action {
3333 UpdateAction::Add => {
3334 let exists = packages.iter().any(|existing| {
3335 extract_package_source(existing).is_some_and(|(s, _)| sources_match(&s, source))
3336 });
3337 if !exists {
3338 packages.push(Value::String(source.to_string()));
3339 }
3340 }
3341 UpdateAction::Remove => {
3342 packages.retain(|existing| {
3343 !extract_package_source(existing).is_some_and(|(s, _)| sources_match(&s, source))
3344 });
3345 }
3346 }
3347
3348 write_settings_json_atomic(path, &root)
3349}
3350
3351fn extract_package_source(value: &Value) -> Option<(String, bool)> {
3352 if let Some(s) = value.as_str() {
3353 return Some((s.to_string(), false));
3354 }
3355 let obj = value.as_object()?;
3356 let source = obj.get("source")?.as_str()?.to_string();
3357 Some((source, true))
3358}
3359
3360#[derive(Debug, Clone, PartialEq, Eq)]
3361enum NormalizedKind {
3362 Npm,
3363 Git,
3364 Local,
3365}
3366
3367#[derive(Debug, Clone, PartialEq, Eq)]
3368struct NormalizedSource {
3369 kind: NormalizedKind,
3370 key: String,
3371}
3372
3373fn sources_match(a: &str, b: &str) -> bool {
3374 normalize_source(a).is_some_and(|left| normalize_source(b).is_some_and(|right| left == right))
3375}
3376
3377fn normalize_source(source: &str) -> Option<NormalizedSource> {
3378 let source = source.trim();
3379 if source.is_empty() {
3380 return None;
3381 }
3382 if let Some(rest) = source.strip_prefix("npm:") {
3383 let spec = rest.trim();
3384 let (name, _) = parse_npm_spec(spec);
3385 return Some(NormalizedSource {
3386 kind: NormalizedKind::Npm,
3387 key: name,
3388 });
3389 }
3390 if let Some(rest) = source.strip_prefix("git:") {
3391 let repo = rest.trim().split('@').next().unwrap_or("");
3392 let normalized = repo
3393 .trim_start_matches("https://")
3394 .trim_start_matches("http://")
3395 .trim_end_matches(".git");
3396 return Some(NormalizedSource {
3397 kind: NormalizedKind::Git,
3398 key: normalized.to_string(),
3399 });
3400 }
3401 if looks_like_git_url(source) || source.starts_with("https://") || source.starts_with("http://")
3402 {
3403 let repo = source.split('@').next().unwrap_or("");
3404 let normalized = repo
3405 .trim_start_matches("https://")
3406 .trim_start_matches("http://")
3407 .trim_end_matches(".git");
3408 return Some(NormalizedSource {
3409 kind: NormalizedKind::Git,
3410 key: normalized.to_string(),
3411 });
3412 }
3413 Some(NormalizedSource {
3414 kind: NormalizedKind::Local,
3415 key: source.to_string(),
3416 })
3417}
3418
3419fn read_settings_json(path: &Path) -> Result<Value> {
3420 if !path.exists() {
3421 return Ok(serde_json::json!({}));
3422 }
3423 let content = fs::read_to_string(path)?;
3424 serde_json::from_str(&content).map_err(|e| {
3425 Error::config(format!(
3426 "Invalid JSON in settings file {}: {e}",
3427 path.display()
3428 ))
3429 })
3430}
3431
3432fn write_settings_json_atomic(path: &Path, value: &Value) -> Result<()> {
3433 let data = serde_json::to_string_pretty(value)?;
3434 let parent = path.parent().unwrap_or_else(|| Path::new("."));
3435 fs::create_dir_all(parent)?;
3436
3437 let tmp = tempfile::NamedTempFile::new_in(parent)?;
3438 fs::write(tmp.path(), data)?;
3439 let tmp_path = tmp.into_temp_path();
3440 tmp_path
3441 .persist(path)
3442 .map_err(|e| Error::Io(Box::new(e.error)))?;
3443 Ok(())
3444}
3445
3446fn compat_scan_enabled() -> bool {
3447 let value = std::env::var("PI_EXT_COMPAT_SCAN").unwrap_or_default();
3448 matches!(
3449 value.trim().to_ascii_lowercase().as_str(),
3450 "1" | "true" | "yes" | "on"
3451 )
3452}
3453
3454fn maybe_emit_compat_ledgers(extensions: &[ResolvedResource]) {
3455 if !compat_scan_enabled() {
3456 return;
3457 }
3458
3459 let mut enabled = extensions.iter().filter(|r| r.enabled).collect::<Vec<_>>();
3460 enabled.sort_by(|left, right| left.path.cmp(&right.path));
3461
3462 for resource in enabled {
3463 let root = if resource.path.is_dir() {
3464 resource.path.clone()
3465 } else {
3466 resource
3467 .path
3468 .parent()
3469 .map_or_else(|| resource.path.clone(), Path::to_path_buf)
3470 };
3471 let scanner = CompatibilityScanner::new(root);
3472 let ledger = match scanner.scan_path(&resource.path) {
3473 Ok(ledger) => ledger,
3474 Err(err) => {
3475 warn!(event = "ext.compat_ledger_error", error = %err);
3476 continue;
3477 }
3478 };
3479
3480 if ledger.is_empty() {
3481 continue;
3482 }
3483
3484 match serde_json::to_string(&ledger) {
3485 Ok(ledger_json) => {
3486 info!(
3487 event = "ext.compat_ledger",
3488 schema = %ledger.schema,
3489 ledger = %ledger_json
3490 );
3491 }
3492 Err(err) => {
3493 warn!(event = "ext.compat_ledger_serialize_error", error = %err);
3494 }
3495 }
3496 }
3497}
3498
3499#[cfg(test)]
3500mod tests {
3501 use super::*;
3502 use asupersync::runtime::RuntimeBuilder;
3503 use serde_json::json;
3504 use std::fs;
3505 use std::future::Future;
3506
3507 fn run_async<T>(future: impl Future<Output = T>) -> T {
3508 let runtime = RuntimeBuilder::current_thread()
3509 .build()
3510 .expect("build runtime");
3511 runtime.block_on(future)
3512 }
3513
3514 #[test]
3515 fn test_parse_npm_spec_scoped_and_unscoped() {
3516 assert_eq!(parse_npm_spec("foo"), ("foo".to_string(), None));
3517 assert_eq!(
3518 parse_npm_spec("foo@1.2.3"),
3519 ("foo".to_string(), Some("1.2.3".to_string()))
3520 );
3521 assert_eq!(
3522 parse_npm_spec("@scope/name@1.2.3"),
3523 ("@scope/name".to_string(), Some("1.2.3".to_string()))
3524 );
3525 assert_eq!(
3526 parse_npm_spec("@scope/name"),
3527 ("@scope/name".to_string(), None)
3528 );
3529 }
3530
3531 #[test]
3532 fn test_sources_match_normalization() {
3533 assert!(sources_match("npm:foo@1", "npm:foo@2"));
3534 assert!(sources_match(
3535 "git:github.com/a/b@v1",
3536 "git:github.com/a/b@v2"
3537 ));
3538 assert!(sources_match(
3539 "https://github.com/a/b.git@v1",
3540 "github.com/a/b"
3541 ));
3542 assert!(!sources_match("npm:foo", "npm:bar"));
3543 assert!(!sources_match("git:github.com/a/b", "git:github.com/a/c"));
3544 }
3545
3546 #[test]
3547 #[cfg(unix)]
3548 fn test_package_identity_matches_pi_mono() {
3549 let dir = tempfile::tempdir().expect("tempdir");
3550 let manager = PackageManager::new(dir.path().to_path_buf());
3551
3552 assert_eq!(
3553 manager.package_identity("npm:@scope/name@1.2.3"),
3554 "npm:@scope/name"
3555 );
3556 assert_eq!(
3557 manager.package_identity("git:https://github.com/a/b.git@v1"),
3558 "git:github.com/a/b"
3559 );
3560
3561 let identity = manager.package_identity("./foo/../bar");
3562 let expected_suffix = format!("{}/bar", dir.path().display());
3563 assert!(identity.ends_with(&expected_suffix), "{identity}");
3564 }
3565
3566 #[test]
3567 fn parse_source_prefers_existing_local_paths_over_index_aliases() {
3568 let dir = tempfile::tempdir().expect("tempdir");
3569 let local = dir.path().join("checkpoint-pi");
3570 fs::create_dir_all(&local).expect("create local path");
3571
3572 match parse_source("checkpoint-pi", dir.path()) {
3573 ParsedSource::Local { path } => assert_eq!(path, local),
3574 other => panic!("expected local source, got {other:?}"),
3575 }
3576 }
3577
3578 #[test]
3579 fn test_installed_path_project_scope() {
3580 let dir = tempfile::tempdir().expect("tempdir");
3581 let manager = PackageManager::new(dir.path().to_path_buf());
3582
3583 let npm = manager
3589 .installed_path_sync("npm:foo@1.2.3", PackageScope::Project)
3590 .expect("installed_path")
3591 .expect("path");
3592 assert_eq!(
3593 npm,
3594 dir.path()
3595 .join(Config::project_dir())
3596 .join("npm")
3597 .join("node_modules")
3598 .join("foo")
3599 );
3600
3601 let git = manager
3602 .installed_path_sync("git:github.com/user/repo@v1", PackageScope::Project)
3603 .expect("installed_path")
3604 .expect("path");
3605 assert_eq!(
3606 git,
3607 dir.path()
3608 .join(Config::project_dir())
3609 .join("git")
3610 .join("github.com")
3611 .join("user/repo")
3612 );
3613 }
3614
3615 #[test]
3616 fn test_installed_path_project_scope_local_git_hashes_absolute_path() {
3617 let dir = tempfile::tempdir().expect("tempdir");
3618 let manager = PackageManager::new(dir.path().to_path_buf());
3619
3620 let repo_path = dir.path().join("repo");
3621 fs::create_dir_all(&repo_path).expect("create local repo dir");
3622
3623 let mut hasher = Sha256::new();
3624 hasher.update(repo_path.to_string_lossy().as_bytes());
3625 let digest = hasher.finalize();
3626 let key = hex_encode(&digest)[..16].to_string();
3627
3628 let local = manager
3629 .installed_path_sync("git:./repo", PackageScope::Project)
3630 .expect("installed_path")
3631 .expect("path");
3632 assert_eq!(
3633 local,
3634 dir.path()
3635 .join(Config::project_dir())
3636 .join("git")
3637 .join("local")
3638 .join(key),
3639 "local git sources should map to a stable hashed install directory",
3640 );
3641 }
3642
3643 #[test]
3644 fn test_project_settings_override_global_package_filters() {
3645 run_async(async {
3646 let temp_dir = tempfile::tempdir().expect("tempdir");
3647 let project_root = temp_dir.path().join("project");
3648 fs::create_dir_all(project_root.join(".pi")).expect("create project settings dir");
3649
3650 let package_root = temp_dir.path().join("pkg");
3651 fs::create_dir_all(package_root.join("extensions")).expect("create extensions dir");
3652 fs::write(package_root.join("extensions/a.native.json"), "{}")
3653 .expect("write a.native.json");
3654 fs::write(package_root.join("extensions/b.native.json"), "{}")
3655 .expect("write b.native.json");
3656
3657 let global_settings_path = temp_dir.path().join("global-settings.json");
3658 let project_settings_path = project_root.join(".pi/settings.json");
3659
3660 let global_settings = json!({
3661 "packages": [{
3662 "source": package_root.to_string_lossy(),
3663 "extensions": ["extensions/a.native.json"]
3664 }]
3665 });
3666 fs::write(
3667 &global_settings_path,
3668 serde_json::to_string_pretty(&global_settings).expect("serialize global settings"),
3669 )
3670 .expect("write global settings");
3671
3672 let project_settings = json!({
3673 "packages": [{
3674 "source": package_root.to_string_lossy(),
3675 "extensions": ["extensions/b.native.json"]
3676 }]
3677 });
3678 fs::write(
3679 &project_settings_path,
3680 serde_json::to_string_pretty(&project_settings)
3681 .expect("serialize project settings"),
3682 )
3683 .expect("write project settings");
3684
3685 let roots = ResolveRoots {
3686 global_settings_path: global_settings_path.clone(),
3687 project_settings_path: project_settings_path.clone(),
3688 global_base_dir: temp_dir.path().join("global-base"),
3689 project_base_dir: project_root.join(".pi"),
3690 };
3691 fs::create_dir_all(&roots.global_base_dir).expect("create global base dir");
3692
3693 let manager = PackageManager::new(project_root);
3694 let resolved = manager.resolve_with_roots(&roots).await.expect("resolve");
3695
3696 let enabled_extensions = resolved
3697 .extensions
3698 .iter()
3699 .filter(|entry| entry.enabled)
3700 .collect::<Vec<_>>();
3701 assert_eq!(enabled_extensions.len(), 1);
3702 let expected_path = package_root.join("extensions/b.native.json");
3703 assert_eq!(enabled_extensions[0].path, expected_path);
3704 assert_eq!(enabled_extensions[0].metadata.scope, PackageScope::Project);
3705
3706 let disabled = resolved
3707 .extensions
3708 .iter()
3709 .find(|entry| entry.path == package_root.join("extensions/a.native.json"))
3710 .expect("a.native.json entry");
3711 assert!(!disabled.enabled);
3712 assert!(
3713 resolved
3714 .extensions
3715 .iter()
3716 .all(|entry| entry.metadata.scope == PackageScope::Project)
3717 );
3718 });
3719 }
3720
3721 #[test]
3722 fn test_resolve_extension_sources_uses_temporary_scope() {
3723 run_async(async {
3724 let temp_dir = tempfile::tempdir().expect("tempdir");
3725 let extension_path = temp_dir.path().join("ext.native.json");
3726 fs::write(&extension_path, "{}").expect("write extension");
3727
3728 let manager = PackageManager::new(temp_dir.path().to_path_buf());
3729 let sources = vec![extension_path.to_string_lossy().to_string()];
3730 let resolved = manager
3731 .resolve_extension_sources(
3732 &sources,
3733 ResolveExtensionSourcesOptions {
3734 local: false,
3735 temporary: true,
3736 },
3737 )
3738 .await
3739 .expect("resolve extension sources");
3740
3741 assert_eq!(resolved.extensions.len(), 1);
3742 let entry = &resolved.extensions[0];
3743 assert!(entry.enabled);
3744 assert_eq!(entry.path, extension_path);
3745 assert_eq!(entry.metadata.scope, PackageScope::Temporary);
3746 assert_eq!(entry.metadata.origin, ResourceOrigin::Package);
3747 assert_eq!(entry.metadata.source, sources[0]);
3748 });
3749 }
3750
3751 #[test]
3752 fn test_resolve_local_path_normalizes_dot_segments() {
3753 let temp_dir = tempfile::tempdir().expect("tempdir");
3754 let resolved = resolve_local_path("./foo/../bar", temp_dir.path());
3755 assert_eq!(resolved, temp_dir.path().join("bar"));
3756 }
3757
3758 #[cfg(unix)]
3759 #[test]
3760 fn test_resolve_local_extension_source_accepts_symlink() {
3761 let temp_dir = tempfile::tempdir().expect("tempdir");
3762 let extension_path = temp_dir.path().join("ext.native.json");
3763 fs::write(&extension_path, "{}").expect("write extension");
3764
3765 let symlink_path = temp_dir.path().join("ext-link.native.json");
3766 std::os::unix::fs::symlink(&extension_path, &symlink_path).expect("create symlink");
3767
3768 let mut accumulator = ResourceAccumulator::new();
3769 let mut metadata = PathMetadata {
3770 source: symlink_path.to_string_lossy().to_string(),
3771 scope: PackageScope::Temporary,
3772 origin: ResourceOrigin::Package,
3773 base_dir: None,
3774 };
3775
3776 PackageManager::resolve_local_extension_source(
3777 &symlink_path,
3778 &mut accumulator,
3779 None,
3780 &mut metadata,
3781 );
3782
3783 assert_eq!(accumulator.extensions.items.len(), 1);
3784 assert_eq!(accumulator.extensions.items[0].path, symlink_path);
3785 }
3786
3787 #[test]
3788 fn test_manifest_extensions_resolve_with_patterns() {
3789 run_async(async {
3790 let temp_dir = tempfile::tempdir().expect("tempdir");
3791 let package_root = temp_dir.path().join("pkg");
3792 let extensions_dir = package_root.join("extensions");
3793 fs::create_dir_all(&extensions_dir).expect("create extensions dir");
3794 fs::write(extensions_dir.join("a.native.json"), "{}").expect("write a.native.json");
3795 fs::write(extensions_dir.join("b.native.json"), "{}").expect("write b.native.json");
3796
3797 let manifest = json!({
3798 "name": "pkg",
3799 "version": "1.0.0",
3800 "pi": {
3801 "extensions": ["extensions", "!extensions/b.native.json"]
3802 }
3803 });
3804 fs::write(
3805 package_root.join("package.json"),
3806 serde_json::to_string_pretty(&manifest).expect("serialize manifest"),
3807 )
3808 .expect("write manifest");
3809
3810 let manager = PackageManager::new(temp_dir.path().to_path_buf());
3811 let sources = vec![package_root.to_string_lossy().to_string()];
3812 let resolved = manager
3813 .resolve_extension_sources(
3814 &sources,
3815 ResolveExtensionSourcesOptions {
3816 local: false,
3817 temporary: true,
3818 },
3819 )
3820 .await
3821 .expect("resolve extension sources");
3822
3823 let paths = resolved
3824 .extensions
3825 .iter()
3826 .map(|entry| entry.path.clone())
3827 .collect::<Vec<_>>();
3828 assert!(paths.contains(&package_root.join("extensions/a.native.json")));
3829 assert!(!paths.contains(&package_root.join("extensions/b.native.json")));
3830 });
3831 }
3832
3833 #[test]
3834 fn test_extension_manifest_directory_detected() {
3835 let temp_dir = tempfile::tempdir().expect("tempdir");
3836 let extension_dir = temp_dir.path().join("ext");
3837 fs::create_dir_all(&extension_dir).expect("create extension dir");
3838 fs::write(
3839 extension_dir.join("extension.json"),
3840 serde_json::to_string_pretty(&json!({
3841 "schema": "pi.ext.manifest.v1",
3842 "extension_id": "test.ext",
3843 "name": "Test Extension",
3844 "version": "0.1.0",
3845 "api_version": "1.0",
3846 "runtime": "native-rust",
3847 "entrypoint": "index.native.json",
3848 "capabilities": []
3849 }))
3850 .expect("serialize extension manifest"),
3851 )
3852 .expect("write extension manifest");
3853 fs::write(extension_dir.join("index.native.json"), "{}").expect("write extension entry");
3854
3855 let entries = resolve_extension_entries(&extension_dir).expect("entries");
3856 assert_eq!(entries, vec![extension_dir]);
3857 }
3858
3859 #[test]
3864 fn is_pattern_detects_all_prefix_operators() {
3865 assert!(is_pattern("!exclude_me"));
3866 assert!(is_pattern("+force_include"));
3867 assert!(is_pattern("-force_exclude"));
3868 assert!(is_pattern("*.js"));
3869 assert!(is_pattern("foo?bar"));
3870 assert!(!is_pattern("plain_entry"));
3871 assert!(!is_pattern("extensions/a.js"));
3872 assert!(!is_pattern(""));
3873 }
3874
3875 #[test]
3876 fn split_patterns_separates_plain_from_operators() {
3877 let entries = vec![
3878 "a.js".to_string(),
3879 "!b.js".to_string(),
3880 "c.js".to_string(),
3881 "+d.js".to_string(),
3882 "-e.js".to_string(),
3883 "*.ts".to_string(),
3884 ];
3885 let (plain, patterns) = split_patterns(&entries);
3886 assert_eq!(plain, vec!["a.js", "c.js"]);
3887 assert_eq!(patterns, vec!["!b.js", "+d.js", "-e.js", "*.ts"]);
3888 }
3889
3890 #[test]
3891 fn split_patterns_empty_input() {
3892 let (plain, patterns) = split_patterns(&[]);
3893 assert!(plain.is_empty());
3894 assert!(patterns.is_empty());
3895 }
3896
3897 #[test]
3902 fn posix_string_normalizes_separators() {
3903 assert_eq!(posix_string(Path::new("a/b/c")), "a/b/c");
3904 assert_eq!(posix_string(Path::new("/abs/path")), "/abs/path");
3905 }
3906
3907 #[test]
3908 fn relative_posix_computes_relative_path() {
3909 let base = Path::new("/home/user/project");
3910 let path = Path::new("/home/user/project/src/main.rs");
3911 assert_eq!(relative_posix(base, path), "src/main.rs");
3912 }
3913
3914 #[test]
3915 fn relative_posix_with_parent_traversal() {
3916 let base = Path::new("/home/user/project/src");
3917 let path = Path::new("/home/user/project/tests/foo.rs");
3918 assert_eq!(relative_posix(base, path), "../tests/foo.rs");
3919 }
3920
3921 #[test]
3922 fn relative_posix_no_common_prefix() {
3923 let base = Path::new("/a/b");
3924 let path = Path::new("/c/d");
3925 let result = relative_posix(base, path);
3926 assert_eq!(result, "../../c/d");
3927 }
3928
3929 #[test]
3934 fn normalize_exact_pattern_strips_dot_slash() {
3935 assert_eq!(normalize_exact_pattern("./foo.js"), "foo.js");
3936 assert_eq!(normalize_exact_pattern("foo.js"), "foo.js");
3937 assert_eq!(normalize_exact_pattern(""), "");
3938 }
3939
3940 #[test]
3945 fn pattern_matches_simple_glob() {
3946 assert!(pattern_matches("*.js", "foo.js"));
3947 assert!(pattern_matches("*.js", "bar.js"));
3948 assert!(!pattern_matches("*.js", "foo.ts"));
3949 }
3950
3951 #[test]
3952 fn pattern_matches_exact() {
3953 assert!(pattern_matches("foo.js", "foo.js"));
3954 assert!(!pattern_matches("foo.js", "bar.js"));
3955 }
3956
3957 #[test]
3958 fn pattern_matches_question_mark() {
3959 assert!(pattern_matches("?.js", "a.js"));
3960 assert!(!pattern_matches("?.js", "ab.js"));
3961 }
3962
3963 #[test]
3968 fn looks_like_git_url_recognizes_known_hosts() {
3969 assert!(looks_like_git_url("github.com/user/repo"));
3970 assert!(looks_like_git_url("https://github.com/user/repo"));
3971 assert!(looks_like_git_url("gitlab.com/user/repo"));
3972 assert!(looks_like_git_url("bitbucket.org/user/repo"));
3973 assert!(looks_like_git_url("codeberg.org/user/repo"));
3974 assert!(!looks_like_git_url("example.com/user/repo"));
3975 assert!(!looks_like_git_url("npm:foo"));
3976 assert!(!looks_like_git_url("./local"));
3977 }
3978
3979 #[test]
3980 fn looks_like_local_path_various_forms() {
3981 assert!(looks_like_local_path("."));
3982 assert!(looks_like_local_path(".."));
3983 assert!(looks_like_local_path("./relative"));
3984 assert!(looks_like_local_path("../parent"));
3985 assert!(looks_like_local_path("/absolute"));
3986 assert!(looks_like_local_path("~/home_relative"));
3987 assert!(looks_like_local_path("file:///abs/path"));
3988 assert!(!looks_like_local_path("npm:foo"));
3989 assert!(!looks_like_local_path("github.com/user/repo"));
3990 }
3991
3992 #[test]
3997 fn hex_encode_correctness() {
3998 assert_eq!(hex_encode(&[0x00, 0xff, 0xab, 0x12]), "00ffab12");
3999 assert_eq!(hex_encode(&[]), "");
4000 assert_eq!(hex_encode(&[0xde, 0xad, 0xbe, 0xef]), "deadbeef");
4001 }
4002
4003 #[test]
4008 fn normalize_dot_segments_removes_current_dir() {
4009 let result = normalize_dot_segments(Path::new("/a/./b/./c"));
4010 assert_eq!(result, PathBuf::from("/a/b/c"));
4011 }
4012
4013 #[test]
4014 fn normalize_dot_segments_resolves_parent_dir() {
4015 let result = normalize_dot_segments(Path::new("/a/b/../c"));
4016 assert_eq!(result, PathBuf::from("/a/c"));
4017 }
4018
4019 #[test]
4020 fn normalize_dot_segments_multiple_parents() {
4021 let result = normalize_dot_segments(Path::new("/a/b/c/../../d"));
4022 assert_eq!(result, PathBuf::from("/a/d"));
4023 }
4024
4025 #[test]
4026 fn normalize_dot_segments_cannot_go_above_root() {
4027 let result = normalize_dot_segments(Path::new("/a/../.."));
4028 assert_eq!(result, PathBuf::from("/"));
4029 }
4030
4031 #[test]
4032 fn normalize_dot_segments_relative_path_keeps_parents() {
4033 let result = normalize_dot_segments(Path::new("a/../../b"));
4034 assert_eq!(result, PathBuf::from("../b"));
4035 }
4036
4037 #[test]
4042 fn resolve_path_from_base_absolute_path() {
4043 let result = resolve_path_from_base("/abs/path", Path::new("/base"));
4044 assert_eq!(result, PathBuf::from("/abs/path"));
4045 }
4046
4047 #[test]
4048 fn resolve_path_from_base_relative_path() {
4049 let result = resolve_path_from_base("foo/bar", Path::new("/base"));
4050 assert_eq!(result, PathBuf::from("/base/foo/bar"));
4051 }
4052
4053 #[test]
4054 fn resolve_path_from_base_tilde_expansion() {
4055 let result = resolve_path_from_base("~/docs", Path::new("/base"));
4056 let home = dirs::home_dir().unwrap_or_else(|| PathBuf::from("/base"));
4057 assert_eq!(result, home.join("docs"));
4058 }
4059
4060 #[test]
4061 fn resolve_path_from_base_bare_tilde() {
4062 let result = resolve_path_from_base("~", Path::new("/base"));
4063 let home = dirs::home_dir().unwrap_or_else(|| PathBuf::from("/base"));
4064 assert_eq!(result, home);
4065 }
4066
4067 #[test]
4072 fn extract_string_array_from_string() {
4073 let val = json!("single");
4074 let result = extract_string_array(Some(&val));
4075 assert_eq!(result, vec!["single"]);
4076 }
4077
4078 #[test]
4079 fn extract_string_array_from_array() {
4080 let val = json!(["a", "b", "c"]);
4081 let result = extract_string_array(Some(&val));
4082 assert_eq!(result, vec!["a", "b", "c"]);
4083 }
4084
4085 #[test]
4086 fn extract_string_array_from_null() {
4087 let result = extract_string_array(None);
4088 assert!(result.is_empty());
4089 }
4090
4091 #[test]
4092 fn extract_string_array_filters_non_strings() {
4093 let val = json!(["a", 42, "b", null, "c"]);
4094 let result = extract_string_array(Some(&val));
4095 assert_eq!(result, vec!["a", "b", "c"]);
4096 }
4097
4098 #[test]
4103 fn extract_package_spec_from_string() {
4104 let spec = extract_package_spec(&json!("npm:foo@1.0"));
4105 assert!(spec.is_some());
4106 let spec = spec.unwrap();
4107 assert_eq!(spec.source, "npm:foo@1.0");
4108 assert!(spec.filter.is_none());
4109 }
4110
4111 #[test]
4112 fn extract_package_spec_from_object() {
4113 let val = json!({
4114 "source": "npm:bar",
4115 "extensions": ["a.js", "b.js"],
4116 "skills": "my-skill"
4117 });
4118 let spec = extract_package_spec(&val);
4119 assert!(spec.is_some());
4120 let spec = spec.unwrap();
4121 assert_eq!(spec.source, "npm:bar");
4122 let filter = spec.filter.unwrap();
4123 assert_eq!(
4124 filter.extensions,
4125 Some(vec!["a.js".to_string(), "b.js".to_string()])
4126 );
4127 assert_eq!(filter.skills, Some(vec!["my-skill".to_string()]));
4128 assert!(filter.prompts.is_none());
4129 assert!(filter.themes.is_none());
4130 }
4131
4132 #[test]
4133 fn extract_package_spec_from_object_missing_source() {
4134 let val = json!({"extensions": ["a.js"]});
4135 assert!(extract_package_spec(&val).is_none());
4136 }
4137
4138 #[test]
4139 fn extract_package_spec_from_non_string_non_object() {
4140 assert!(extract_package_spec(&json!(42)).is_none());
4141 assert!(extract_package_spec(&json!(null)).is_none());
4142 assert!(extract_package_spec(&json!(true)).is_none());
4143 }
4144
4145 #[test]
4146 fn extract_filter_field_absent_key() {
4147 let obj = serde_json::Map::new();
4148 assert!(extract_filter_field(&obj, "extensions").is_none());
4149 }
4150
4151 #[test]
4152 fn extract_filter_field_string_value() {
4153 let mut obj = serde_json::Map::new();
4154 obj.insert("skills".to_string(), json!("my-skill"));
4155 let result = extract_filter_field(&obj, "skills");
4156 assert_eq!(result, Some(vec!["my-skill".to_string()]));
4157 }
4158
4159 #[test]
4160 fn extract_filter_field_array_value() {
4161 let mut obj = serde_json::Map::new();
4162 obj.insert("themes".to_string(), json!(["dark", "light"]));
4163 let result = extract_filter_field(&obj, "themes");
4164 assert_eq!(result, Some(vec!["dark".to_string(), "light".to_string()]));
4165 }
4166
4167 #[test]
4168 fn extract_filter_field_non_string_array_or_null() {
4169 let mut obj = serde_json::Map::new();
4170 obj.insert("prompts".to_string(), json!(42));
4171 let result = extract_filter_field(&obj, "prompts");
4172 assert_eq!(result, Some(Vec::<String>::new()));
4173 }
4174
4175 #[test]
4180 fn is_enabled_by_overrides_no_overrides_enables_all() {
4181 let path = Path::new("/base/extensions/foo.js");
4182 let patterns: Vec<String> = vec!["extensions/foo.js".to_string()];
4183 assert!(is_enabled_by_overrides(path, &patterns, Path::new("/base")));
4184 }
4185
4186 #[test]
4187 fn is_enabled_by_overrides_exclude_disables() {
4188 let path = Path::new("/base/extensions/foo.js");
4189 let patterns = vec!["!*.js".to_string()];
4190 assert!(!is_enabled_by_overrides(
4191 path,
4192 &patterns,
4193 Path::new("/base")
4194 ));
4195 }
4196
4197 #[test]
4198 fn is_enabled_by_overrides_force_include_overrides_exclude() {
4199 let path = Path::new("/base/extensions/foo.js");
4200 let patterns = vec!["!*.js".to_string(), "+extensions/foo.js".to_string()];
4201 assert!(is_enabled_by_overrides(path, &patterns, Path::new("/base")));
4202 }
4203
4204 #[test]
4205 fn is_enabled_by_overrides_force_exclude_overrides_force_include() {
4206 let path = Path::new("/base/extensions/foo.js");
4207 let patterns = vec![
4208 "+extensions/foo.js".to_string(),
4209 "-extensions/foo.js".to_string(),
4210 ];
4211 assert!(!is_enabled_by_overrides(
4212 path,
4213 &patterns,
4214 Path::new("/base")
4215 ));
4216 }
4217
4218 #[test]
4223 fn apply_patterns_include_glob() {
4224 let base = Path::new("/base");
4225 let paths = vec![
4226 PathBuf::from("/base/a.js"),
4227 PathBuf::from("/base/b.ts"),
4228 PathBuf::from("/base/c.js"),
4229 ];
4230 let patterns = vec!["*.js".to_string()];
4231 let result = apply_patterns(&paths, &patterns, base);
4232 assert!(result.contains(&PathBuf::from("/base/a.js")));
4233 assert!(result.contains(&PathBuf::from("/base/c.js")));
4234 assert!(!result.contains(&PathBuf::from("/base/b.ts")));
4235 }
4236
4237 #[test]
4238 fn apply_patterns_exclude_removes_from_includes() {
4239 let base = Path::new("/base");
4240 let paths = vec![
4241 PathBuf::from("/base/a.js"),
4242 PathBuf::from("/base/b.js"),
4243 PathBuf::from("/base/c.js"),
4244 ];
4245 let patterns = vec!["*.js".to_string(), "!b.js".to_string()];
4246 let result = apply_patterns(&paths, &patterns, base);
4247 assert!(result.contains(&PathBuf::from("/base/a.js")));
4248 assert!(!result.contains(&PathBuf::from("/base/b.js")));
4249 assert!(result.contains(&PathBuf::from("/base/c.js")));
4250 }
4251
4252 #[test]
4253 fn apply_patterns_no_patterns_returns_all() {
4254 let base = Path::new("/base");
4255 let paths = vec![PathBuf::from("/base/a.js"), PathBuf::from("/base/b.js")];
4256 let result = apply_patterns(&paths, &[], base);
4257 assert_eq!(result.len(), 2);
4258 }
4259
4260 #[test]
4261 fn apply_patterns_force_include_adds_excluded() {
4262 let base = Path::new("/base");
4263 let paths = vec![PathBuf::from("/base/a.js"), PathBuf::from("/base/b.js")];
4264 let patterns = vec!["a.js".to_string(), "+b.js".to_string()];
4265 let result = apply_patterns(&paths, &patterns, base);
4266 assert!(result.contains(&PathBuf::from("/base/a.js")));
4267 assert!(result.contains(&PathBuf::from("/base/b.js")));
4268 }
4269
4270 #[test]
4271 fn apply_patterns_force_exclude_removes_everything() {
4272 let base = Path::new("/base");
4273 let paths = vec![PathBuf::from("/base/a.js"), PathBuf::from("/base/b.js")];
4274 let patterns = vec!["-a.js".to_string()];
4275 let result = apply_patterns(&paths, &patterns, base);
4276 assert!(!result.contains(&PathBuf::from("/base/a.js")));
4277 assert!(result.contains(&PathBuf::from("/base/b.js")));
4278 }
4279
4280 #[test]
4285 fn normalize_source_empty_returns_none() {
4286 assert!(normalize_source("").is_none());
4287 assert!(normalize_source(" ").is_none());
4288 }
4289
4290 #[test]
4291 fn normalize_source_npm() {
4292 let result = normalize_source("npm:@scope/pkg@2.0.0").unwrap();
4293 assert_eq!(result.kind, NormalizedKind::Npm);
4294 assert_eq!(result.key, "@scope/pkg");
4295 }
4296
4297 #[test]
4298 fn normalize_source_git() {
4299 let result = normalize_source("git:github.com/user/repo@v1").unwrap();
4300 assert_eq!(result.kind, NormalizedKind::Git);
4301 assert_eq!(result.key, "github.com/user/repo");
4302 }
4303
4304 #[test]
4305 fn normalize_source_https_git_url() {
4306 let result = normalize_source("https://github.com/user/repo.git@v2").unwrap();
4307 assert_eq!(result.kind, NormalizedKind::Git);
4308 assert_eq!(result.key, "github.com/user/repo");
4309 }
4310
4311 #[test]
4312 fn normalize_source_local() {
4313 let result = normalize_source("my-local-package").unwrap();
4314 assert_eq!(result.kind, NormalizedKind::Local);
4315 assert_eq!(result.key, "my-local-package");
4316 }
4317
4318 #[test]
4323 fn parse_npm_spec_empty() {
4324 assert_eq!(parse_npm_spec(""), (String::new(), None));
4325 }
4326
4327 #[test]
4328 fn parse_npm_spec_whitespace() {
4329 assert_eq!(parse_npm_spec(" foo "), ("foo".to_string(), None));
4330 }
4331
4332 #[test]
4333 fn parse_npm_spec_scoped_with_version() {
4334 let (name, version) = parse_npm_spec("@org/pkg@^3.0.0");
4335 assert_eq!(name, "@org/pkg");
4336 assert_eq!(version, Some("^3.0.0".to_string()));
4337 }
4338
4339 #[test]
4340 fn parse_npm_spec_trailing_at() {
4341 let (name, version) = parse_npm_spec("foo@");
4342 assert_eq!(name, "foo@");
4343 assert!(version.is_none());
4344 }
4345
4346 #[test]
4351 fn resource_list_deduplicates_by_path() {
4352 let mut list = ResourceList::default();
4353 let metadata = PathMetadata {
4354 source: "test".to_string(),
4355 scope: PackageScope::User,
4356 origin: ResourceOrigin::Package,
4357 base_dir: None,
4358 };
4359 list.add(PathBuf::from("/a"), &metadata, true);
4360 list.add(PathBuf::from("/a"), &metadata, true);
4361 list.add(PathBuf::from("/b"), &metadata, false);
4362 assert_eq!(list.items.len(), 2);
4363 assert_eq!(list.items[0].path, PathBuf::from("/a"));
4364 assert_eq!(list.items[1].path, PathBuf::from("/b"));
4365 }
4366
4367 #[test]
4372 fn resource_accumulator_sorts_by_path() {
4373 let mut acc = ResourceAccumulator::new();
4374 let metadata = PathMetadata {
4375 source: "test".to_string(),
4376 scope: PackageScope::User,
4377 origin: ResourceOrigin::Package,
4378 base_dir: None,
4379 };
4380 acc.extensions.add(PathBuf::from("/z/ext"), &metadata, true);
4381 acc.extensions.add(PathBuf::from("/a/ext"), &metadata, true);
4382 acc.skills.add(PathBuf::from("/z/skill"), &metadata, true);
4383 acc.skills.add(PathBuf::from("/a/skill"), &metadata, true);
4384
4385 let resolved = acc.into_resolved_paths();
4386 assert_eq!(resolved.extensions[0].path, PathBuf::from("/a/ext"));
4387 assert_eq!(resolved.extensions[1].path, PathBuf::from("/z/ext"));
4388 assert_eq!(resolved.skills[0].path, PathBuf::from("/a/skill"));
4389 assert_eq!(resolved.skills[1].path, PathBuf::from("/z/skill"));
4390 }
4391
4392 #[test]
4397 fn settings_snapshot_entries_for_returns_correct_type() {
4398 let snapshot = SettingsSnapshot {
4399 packages: vec![],
4400 extensions: vec!["ext".to_string()],
4401 skills: vec!["skill".to_string()],
4402 prompts: vec!["prompt".to_string()],
4403 themes: vec!["theme".to_string()],
4404 };
4405 assert_eq!(snapshot.entries_for(ResourceType::Extensions), &["ext"]);
4406 assert_eq!(snapshot.entries_for(ResourceType::Skills), &["skill"]);
4407 assert_eq!(snapshot.entries_for(ResourceType::Prompts), &["prompt"]);
4408 assert_eq!(snapshot.entries_for(ResourceType::Themes), &["theme"]);
4409 }
4410
4411 #[test]
4416 fn read_settings_json_missing_file_returns_empty_object() {
4417 let result = read_settings_json(Path::new("/nonexistent/path/settings.json"));
4418 assert!(result.is_ok());
4419 assert_eq!(result.unwrap(), json!({}));
4420 }
4421
4422 #[test]
4423 fn read_settings_json_valid_file() {
4424 let dir = tempfile::tempdir().expect("tempdir");
4425 let path = dir.path().join("settings.json");
4426 fs::write(&path, r#"{"foo": "bar"}"#).expect("write");
4427 let result = read_settings_json(&path).expect("read");
4428 assert_eq!(result, json!({"foo": "bar"}));
4429 }
4430
4431 #[test]
4432 fn read_settings_json_invalid_json() {
4433 let dir = tempfile::tempdir().expect("tempdir");
4434 let path = dir.path().join("settings.json");
4435 fs::write(&path, "not json").expect("write");
4436 assert!(read_settings_json(&path).is_err());
4437 }
4438
4439 #[test]
4440 fn read_settings_snapshot_with_packages_and_entries() {
4441 let dir = tempfile::tempdir().expect("tempdir");
4442 let path = dir.path().join("settings.json");
4443 let settings = json!({
4444 "packages": ["npm:foo", {"source": "npm:bar", "extensions": ["a.js"]}],
4445 "extensions": ["ext1.js"],
4446 "skills": "my-skill",
4447 "themes": ["dark.json", "light.json"]
4448 });
4449 fs::write(&path, serde_json::to_string(&settings).unwrap()).expect("write");
4450 let snapshot = read_settings_snapshot(&path).expect("read");
4451 assert_eq!(snapshot.packages.len(), 2);
4452 assert_eq!(snapshot.packages[0].source, "npm:foo");
4453 assert_eq!(snapshot.packages[1].source, "npm:bar");
4454 assert!(snapshot.packages[1].filter.is_some());
4455 assert_eq!(snapshot.extensions, vec!["ext1.js"]);
4456 assert_eq!(snapshot.skills, vec!["my-skill"]);
4457 assert_eq!(snapshot.themes, vec!["dark.json", "light.json"]);
4458 assert!(snapshot.prompts.is_empty());
4459 }
4460
4461 #[test]
4466 fn write_settings_json_atomic_creates_file() {
4467 let dir = tempfile::tempdir().expect("tempdir");
4468 let path = dir.path().join("sub/settings.json");
4469 let value = json!({"test": true});
4470 write_settings_json_atomic(&path, &value).expect("write");
4471 let content = fs::read_to_string(&path).expect("read");
4472 let parsed: Value = serde_json::from_str(&content).expect("parse");
4473 assert_eq!(parsed, json!({"test": true}));
4474 }
4475
4476 #[test]
4477 fn update_package_sources_add_and_remove() {
4478 let dir = tempfile::tempdir().expect("tempdir");
4479 let path = dir.path().join("settings.json");
4480 fs::write(&path, "{}").expect("write initial");
4481
4482 update_package_sources(&path, "npm:foo", UpdateAction::Add).expect("add");
4483 let settings = read_settings_json(&path).expect("read");
4484 let packages = settings["packages"].as_array().expect("packages array");
4485 assert_eq!(packages.len(), 1);
4486 assert_eq!(packages[0], json!("npm:foo"));
4487
4488 update_package_sources(&path, "npm:foo@2.0", UpdateAction::Add).expect("add again");
4490 let settings = read_settings_json(&path).expect("read");
4491 let packages = settings["packages"].as_array().expect("packages array");
4492 assert_eq!(packages.len(), 1, "duplicate source should not be added");
4493
4494 update_package_sources(&path, "npm:foo", UpdateAction::Remove).expect("remove");
4495 let settings = read_settings_json(&path).expect("read");
4496 let packages = settings["packages"].as_array().expect("packages array");
4497 assert!(packages.is_empty());
4498 }
4499
4500 #[test]
4505 fn list_packages_in_settings_reads_all_formats() {
4506 let dir = tempfile::tempdir().expect("tempdir");
4507 let path = dir.path().join("settings.json");
4508 let settings = json!({
4509 "packages": [
4510 "npm:foo",
4511 {"source": "git:github.com/user/repo", "extensions": ["a.js"]}
4512 ]
4513 });
4514 fs::write(&path, serde_json::to_string(&settings).unwrap()).expect("write");
4515 let packages = list_packages_in_settings(&path).expect("list");
4516 assert_eq!(packages.len(), 2);
4517 assert_eq!(packages[0].source, "npm:foo");
4518 assert!(packages[0].filter.is_none());
4519 assert_eq!(packages[1].source, "git:github.com/user/repo");
4520 assert!(packages[1].filter.is_some());
4521 }
4522
4523 #[test]
4528 fn read_pi_manifest_with_pi_field() {
4529 let dir = tempfile::tempdir().expect("tempdir");
4530 let manifest = json!({
4531 "name": "test-pkg",
4532 "version": "1.0.0",
4533 "pi": {
4534 "extensions": ["ext/a.js", "ext/b.js"],
4535 "skills": ["skills/foo.md"]
4536 }
4537 });
4538 fs::write(
4539 dir.path().join("package.json"),
4540 serde_json::to_string(&manifest).unwrap(),
4541 )
4542 .expect("write");
4543 let result = read_pi_manifest(dir.path());
4544 assert!(result.is_some());
4545 let result = result.unwrap();
4546 assert_eq!(
4547 result.extensions,
4548 Some(vec!["ext/a.js".to_string(), "ext/b.js".to_string()])
4549 );
4550 assert_eq!(result.skills, Some(vec!["skills/foo.md".to_string()]));
4551 assert!(result.prompts.is_none());
4552 assert!(result.themes.is_none());
4553 }
4554
4555 #[test]
4556 fn read_pi_manifest_no_pi_field() {
4557 let dir = tempfile::tempdir().expect("tempdir");
4558 fs::write(
4559 dir.path().join("package.json"),
4560 r#"{"name": "test", "version": "1.0.0"}"#,
4561 )
4562 .expect("write");
4563 assert!(read_pi_manifest(dir.path()).is_none());
4564 }
4565
4566 #[test]
4567 fn read_pi_manifest_no_package_json() {
4568 let dir = tempfile::tempdir().expect("tempdir");
4569 assert!(read_pi_manifest(dir.path()).is_none());
4570 }
4571
4572 #[test]
4577 fn temporary_dir_stable_hash() {
4578 let a = temporary_dir("npm", None);
4579 let b = temporary_dir("npm", None);
4580 assert_eq!(a, b, "same inputs should produce same path");
4581
4582 let c = temporary_dir("npm", Some("foo"));
4583 assert_ne!(a, c, "different suffix should produce different path");
4584 }
4585
4586 #[test]
4587 fn temporary_dir_includes_prefix() {
4588 let result = temporary_dir("git-github.com", Some("user/repo"));
4589 let path_str = result.to_string_lossy();
4590 assert!(path_str.contains("pi-extensions"));
4591 assert!(path_str.contains("git-github.com"));
4592 }
4593
4594 #[test]
4599 fn compat_scan_enabled_recognizes_truthy_values() {
4600 let truthy = ["1", "true", "yes", "on", "TRUE", "Yes", "ON"];
4602 for val in truthy {
4603 let lower = val.trim().to_ascii_lowercase();
4604 assert!(
4605 matches!(lower.as_str(), "1" | "true" | "yes" | "on"),
4606 "{val} should be truthy"
4607 );
4608 }
4609 let falsy = ["0", "false", "no", "off", "", "random"];
4610 for val in falsy {
4611 let lower = val.trim().to_ascii_lowercase();
4612 assert!(
4613 !matches!(lower.as_str(), "1" | "true" | "yes" | "on"),
4614 "{val} should be falsy"
4615 );
4616 }
4617 }
4618
4619 #[test]
4624 fn parse_source_npm_prefix() {
4625 let dir = tempfile::tempdir().expect("tempdir");
4626 match parse_source("npm:@scope/pkg@1.0", dir.path()) {
4627 ParsedSource::Npm { spec, name, pinned } => {
4628 assert_eq!(spec, "@scope/pkg@1.0");
4629 assert_eq!(name, "@scope/pkg");
4630 assert!(pinned);
4631 }
4632 other => panic!("expected Npm, got {other:?}"),
4633 }
4634 }
4635
4636 #[test]
4637 fn parse_source_npm_unpinned() {
4638 let dir = tempfile::tempdir().expect("tempdir");
4639 match parse_source("npm:express", dir.path()) {
4640 ParsedSource::Npm { pinned, .. } => {
4641 assert!(!pinned);
4642 }
4643 other => panic!("expected Npm, got {other:?}"),
4644 }
4645 }
4646
4647 #[test]
4648 fn parse_source_git_prefix() {
4649 let dir = tempfile::tempdir().expect("tempdir");
4650 match parse_source("git:github.com/user/repo@v2", dir.path()) {
4651 ParsedSource::Git {
4652 repo,
4653 host,
4654 path,
4655 r#ref,
4656 pinned,
4657 } => {
4658 assert_eq!(repo, "github.com/user/repo");
4659 assert_eq!(host, "github.com");
4660 assert_eq!(path, "user/repo");
4661 assert_eq!(r#ref, Some("v2".to_string()));
4662 assert!(pinned);
4663 }
4664 other => panic!("expected Git, got {other:?}"),
4665 }
4666 }
4667
4668 #[test]
4669 fn parse_source_https_github_url() {
4670 let dir = tempfile::tempdir().expect("tempdir");
4671 match parse_source("https://github.com/user/repo.git", dir.path()) {
4672 ParsedSource::Git { repo, host, .. } => {
4673 assert_eq!(repo, "github.com/user/repo");
4674 assert_eq!(host, "github.com");
4675 }
4676 other => panic!("expected Git, got {other:?}"),
4677 }
4678 }
4679
4680 #[test]
4681 fn parse_source_local_relative() {
4682 let dir = tempfile::tempdir().expect("tempdir");
4683 match parse_source("./my-ext", dir.path()) {
4684 ParsedSource::Local { path } => {
4685 assert_eq!(path, dir.path().join("my-ext"));
4686 }
4687 other => panic!("expected Local, got {other:?}"),
4688 }
4689 }
4690
4691 #[test]
4692 #[cfg(unix)]
4693 fn parse_source_local_absolute() {
4694 let dir = tempfile::tempdir().expect("tempdir");
4695 match parse_source("/abs/my-ext", dir.path()) {
4696 ParsedSource::Local { path } => {
4697 assert_eq!(path, PathBuf::from("/abs/my-ext"));
4698 }
4699 other => panic!("expected Local, got {other:?}"),
4700 }
4701 }
4702
4703 #[test]
4708 fn parse_git_source_local_path_hashes_deterministically() {
4709 let dir = tempfile::tempdir().expect("tempdir");
4710 let result1 = parse_git_source("./local-repo", dir.path());
4711 let result2 = parse_git_source("./local-repo", dir.path());
4712 match (&result1, &result2) {
4713 (ParsedSource::Git { path: p1, .. }, ParsedSource::Git { path: p2, .. }) => {
4714 assert_eq!(p1, p2, "same local source should produce same hash");
4715 }
4716 _ => panic!("expected Git for both"),
4717 }
4718 }
4719
4720 #[test]
4725 fn dedupe_packages_project_wins_over_user() {
4726 let dir = tempfile::tempdir().expect("tempdir");
4727 let manager = PackageManager::new(dir.path().to_path_buf());
4728
4729 let packages = vec![
4730 ScopedPackage {
4731 pkg: PackageSpec {
4732 source: "npm:foo@1.0".to_string(),
4733 filter: None,
4734 },
4735 scope: PackageScope::User,
4736 },
4737 ScopedPackage {
4738 pkg: PackageSpec {
4739 source: "npm:foo@2.0".to_string(),
4740 filter: None,
4741 },
4742 scope: PackageScope::Project,
4743 },
4744 ];
4745
4746 let deduped = manager.dedupe_packages(packages);
4747 assert_eq!(deduped.len(), 1);
4748 assert_eq!(deduped[0].scope, PackageScope::Project);
4749 assert_eq!(deduped[0].pkg.source, "npm:foo@2.0");
4750 }
4751
4752 #[test]
4753 fn dedupe_packages_user_does_not_override_project() {
4754 let dir = tempfile::tempdir().expect("tempdir");
4755 let manager = PackageManager::new(dir.path().to_path_buf());
4756
4757 let packages = vec![
4758 ScopedPackage {
4759 pkg: PackageSpec {
4760 source: "npm:bar@1.0".to_string(),
4761 filter: None,
4762 },
4763 scope: PackageScope::Project,
4764 },
4765 ScopedPackage {
4766 pkg: PackageSpec {
4767 source: "npm:bar@2.0".to_string(),
4768 filter: None,
4769 },
4770 scope: PackageScope::User,
4771 },
4772 ];
4773
4774 let deduped = manager.dedupe_packages(packages);
4775 assert_eq!(deduped.len(), 1);
4776 assert_eq!(deduped[0].scope, PackageScope::Project);
4777 assert_eq!(deduped[0].pkg.source, "npm:bar@1.0");
4778 }
4779
4780 #[test]
4781 fn dedupe_packages_different_names_preserved() {
4782 let dir = tempfile::tempdir().expect("tempdir");
4783 let manager = PackageManager::new(dir.path().to_path_buf());
4784
4785 let packages = vec![
4786 ScopedPackage {
4787 pkg: PackageSpec {
4788 source: "npm:foo".to_string(),
4789 filter: None,
4790 },
4791 scope: PackageScope::User,
4792 },
4793 ScopedPackage {
4794 pkg: PackageSpec {
4795 source: "npm:bar".to_string(),
4796 filter: None,
4797 },
4798 scope: PackageScope::User,
4799 },
4800 ];
4801
4802 let deduped = manager.dedupe_packages(packages);
4803 assert_eq!(deduped.len(), 2);
4804 }
4805
4806 #[test]
4811 fn collect_auto_prompt_entries_finds_md_files() {
4812 let dir = tempfile::tempdir().expect("tempdir");
4813 let prompts_dir = dir.path().join("prompts");
4814 fs::create_dir_all(&prompts_dir).expect("create dir");
4815 fs::write(prompts_dir.join("hello.md"), "# Hello").expect("write");
4816 fs::write(prompts_dir.join("world.md"), "# World").expect("write");
4817 fs::write(prompts_dir.join("notmd.txt"), "text").expect("write");
4818 fs::write(prompts_dir.join(".hidden.md"), "hidden").expect("write");
4819
4820 let entries = collect_auto_prompt_entries(&prompts_dir);
4821 assert_eq!(entries.len(), 2);
4822 assert!(entries.iter().all(|p| p.extension().unwrap() == "md"));
4823 }
4824
4825 #[test]
4826 fn collect_auto_prompt_entries_nonexistent_dir() {
4827 let entries = collect_auto_prompt_entries(Path::new("/nonexistent"));
4828 assert!(entries.is_empty());
4829 }
4830
4831 #[test]
4832 fn collect_auto_theme_entries_finds_json_files() {
4833 let dir = tempfile::tempdir().expect("tempdir");
4834 let themes_dir = dir.path().join("themes");
4835 fs::create_dir_all(&themes_dir).expect("create dir");
4836 fs::write(themes_dir.join("dark.json"), "{}").expect("write");
4837 fs::write(themes_dir.join("light.json"), "{}").expect("write");
4838 fs::write(themes_dir.join("readme.md"), "text").expect("write");
4839
4840 let entries = collect_auto_theme_entries(&themes_dir);
4841 assert_eq!(entries.len(), 2);
4842 assert!(entries.iter().all(|p| p.extension().unwrap() == "json"));
4843 }
4844
4845 #[test]
4850 fn collect_auto_extension_entries_finds_native_descriptors() {
4851 let dir = tempfile::tempdir().expect("tempdir");
4852 let ext_dir = dir.path().join("extensions");
4853 fs::create_dir_all(&ext_dir).expect("create dir");
4854 fs::write(ext_dir.join("a.native.json"), "{}").expect("write");
4855 fs::write(ext_dir.join("b.native.json"), "{}").expect("write");
4856 fs::write(ext_dir.join("c.md"), "c").expect("write");
4857
4858 let entries = collect_auto_extension_entries(&ext_dir);
4859 assert!(entries.len() >= 2);
4860 let has_a = entries
4861 .iter()
4862 .any(|p| p.file_name().unwrap() == "a.native.json");
4863 let has_b = entries
4864 .iter()
4865 .any(|p| p.file_name().unwrap() == "b.native.json");
4866 let has_md = entries.iter().any(|p| p.file_name().unwrap() == "c.md");
4867 assert!(has_a, "should find native descriptor files");
4868 assert!(has_b, "should find native descriptor files");
4869 assert!(!has_md, "should not find .md files");
4870 }
4871
4872 #[test]
4873 fn collect_auto_extension_entries_finds_root_js_entry_file() {
4874 let dir = tempfile::tempdir().expect("tempdir");
4875 let ext_dir = dir.path().join("extensions");
4876 fs::create_dir_all(&ext_dir).expect("create dir");
4877 fs::write(ext_dir.join("my_extension.ts"), "export default {}").expect("write");
4878
4879 let entries = collect_auto_extension_entries(&ext_dir);
4880 assert!(
4881 entries
4882 .iter()
4883 .any(|p| p.file_name().unwrap() == "my_extension.ts")
4884 );
4885 }
4886
4887 #[test]
4888 fn collect_auto_extension_entries_deduplicates_index_entry() {
4889 let dir = tempfile::tempdir().expect("tempdir");
4890 let ext_dir = dir.path().join("extensions");
4891 fs::create_dir_all(&ext_dir).expect("create dir");
4892 fs::write(ext_dir.join("index.ts"), "export default {}").expect("write");
4893
4894 let entries = collect_auto_extension_entries(&ext_dir);
4895 let count = entries
4896 .iter()
4897 .filter(|p| p.file_name().unwrap() == "index.ts")
4898 .count();
4899 assert_eq!(count, 1, "index.ts should only be present once");
4900 }
4901
4902 #[test]
4907 fn resolve_extension_entries_finds_index_native_json() {
4908 let dir = tempfile::tempdir().expect("tempdir");
4909 let ext_dir = dir.path().join("ext");
4910 fs::create_dir_all(&ext_dir).expect("create dir");
4911 fs::write(ext_dir.join("index.native.json"), "{}").expect("write");
4912
4913 let entries = resolve_extension_entries(&ext_dir).expect("entries");
4914 assert_eq!(entries, vec![ext_dir.join("index.native.json")]);
4915 }
4916
4917 #[test]
4918 fn resolve_extension_entries_finds_index_ts() {
4919 let dir = tempfile::tempdir().expect("tempdir");
4920 let ext_dir = dir.path().join("ext");
4921 fs::create_dir_all(&ext_dir).expect("create dir");
4922 fs::write(ext_dir.join("index.ts"), "export default {}").expect("write");
4923
4924 let entries = resolve_extension_entries(&ext_dir).expect("entries");
4925 assert_eq!(entries, vec![ext_dir.join("index.ts")]);
4926 }
4927
4928 #[test]
4929 fn resolve_extension_entries_finds_index_js() {
4930 let dir = tempfile::tempdir().expect("tempdir");
4931 let ext_dir = dir.path().join("ext");
4932 fs::create_dir_all(&ext_dir).expect("create dir");
4933 fs::write(ext_dir.join("index.js"), "export default {}").expect("write");
4934
4935 let entries = resolve_extension_entries(&ext_dir).expect("entries");
4936 assert_eq!(entries, vec![ext_dir.join("index.js")]);
4937 }
4938
4939 #[test]
4940 fn resolve_extension_entries_prefers_manifest_over_index() {
4941 let dir = tempfile::tempdir().expect("tempdir");
4942 let ext_dir = dir.path().join("ext");
4943 fs::create_dir_all(&ext_dir).expect("create dir");
4944 fs::write(
4945 ext_dir.join("extension.json"),
4946 serde_json::to_string_pretty(&json!({
4947 "schema": "pi.ext.manifest.v1",
4948 "extension_id": "test.ext",
4949 "name": "Test",
4950 "version": "0.1.0",
4951 "api_version": "1.0",
4952 "runtime": "native-rust",
4953 "entrypoint": "main.native.json",
4954 "capabilities": []
4955 }))
4956 .unwrap(),
4957 )
4958 .expect("write manifest");
4959 fs::write(ext_dir.join("main.native.json"), "{}").expect("write main");
4960 fs::write(ext_dir.join("index.ts"), "index").expect("write index");
4961
4962 let entries = resolve_extension_entries(&ext_dir).expect("entries");
4963 assert_eq!(entries, vec![ext_dir]);
4965 }
4966
4967 #[test]
4968 fn resolve_extension_entries_empty_dir_returns_none() {
4969 let dir = tempfile::tempdir().expect("tempdir");
4970 let ext_dir = dir.path().join("ext");
4971 fs::create_dir_all(&ext_dir).expect("create dir");
4972 assert!(resolve_extension_entries(&ext_dir).is_none());
4973 }
4974
4975 #[test]
4980 fn collect_skill_entries_finds_skill_md_in_subdirs() {
4981 let dir = tempfile::tempdir().expect("tempdir");
4982 let skills_dir = dir.path().join("skills");
4983 fs::create_dir_all(skills_dir.join("my-skill")).expect("create dir");
4984 fs::write(skills_dir.join("my-skill/SKILL.md"), "# Skill").expect("write skill");
4985 fs::write(skills_dir.join("top-level.md"), "# Top").expect("write top");
4986 fs::write(skills_dir.join("readme.txt"), "text").expect("write txt");
4987
4988 let entries = collect_skill_entries(&skills_dir);
4989 assert!(entries.iter().any(|p| p.file_name().unwrap() == "SKILL.md"));
4990 assert!(
4991 entries
4992 .iter()
4993 .any(|p| p.file_name().unwrap() == "top-level.md")
4994 );
4995 assert!(
4996 !entries
4997 .iter()
4998 .any(|p| p.file_name().unwrap() == "readme.txt")
4999 );
5000 }
5001
5002 #[test]
5007 fn prune_empty_git_parents_removes_empty_ancestors() {
5008 let dir = tempfile::tempdir().expect("tempdir");
5009 let root = dir.path().join("git");
5010 let deep = root.join("github.com/user/repo");
5011 fs::create_dir_all(&deep).expect("create dirs");
5012
5013 fs::remove_dir(&deep).expect("remove repo dir");
5015
5016 prune_empty_git_parents(&deep, &root);
5017
5018 assert!(!root.join("github.com/user").exists());
5020 assert!(!root.join("github.com").exists());
5021 assert!(root.exists());
5023 }
5024
5025 #[test]
5030 fn ensure_npm_project_creates_package_json() {
5031 let dir = tempfile::tempdir().expect("tempdir");
5032 let root = dir.path().join("npm");
5033 ensure_npm_project(&root).expect("ensure");
5034 assert!(root.join("package.json").exists());
5035 assert!(root.join(".gitignore").exists());
5036
5037 let content = fs::read_to_string(root.join("package.json")).expect("read");
5038 let json: Value = serde_json::from_str(&content).expect("parse");
5039 assert_eq!(json["name"], "pi-packages");
5040 assert_eq!(json["private"], true);
5041 }
5042
5043 #[test]
5044 fn ensure_npm_project_does_not_overwrite_existing() {
5045 let dir = tempfile::tempdir().expect("tempdir");
5046 let root = dir.path().join("npm");
5047 fs::create_dir_all(&root).expect("create dir");
5048 fs::write(root.join("package.json"), r#"{"name":"existing"}"#).expect("write");
5049 ensure_npm_project(&root).expect("ensure");
5050 let content = fs::read_to_string(root.join("package.json")).expect("read");
5051 assert!(content.contains("existing"), "should not overwrite");
5052 }
5053
5054 #[test]
5055 fn ensure_git_ignore_creates_gitignore() {
5056 let dir = tempfile::tempdir().expect("tempdir");
5057 let root = dir.path().join("git");
5058 ensure_git_ignore(&root).expect("ensure");
5059 let content = fs::read_to_string(root.join(".gitignore")).expect("read");
5060 assert!(content.contains('*'));
5061 assert!(content.contains("!.gitignore"));
5062 }
5063
5064 #[test]
5069 fn pi_manifest_entries_for_returns_cloned_vectors() {
5070 let manifest = PiManifest {
5071 extensions: Some(vec!["a.js".to_string()]),
5072 skills: None,
5073 prompts: Some(vec!["p.md".to_string()]),
5074 themes: None,
5075 };
5076 assert_eq!(
5077 manifest.entries_for(ResourceType::Extensions),
5078 Some(vec!["a.js".to_string()])
5079 );
5080 assert!(manifest.entries_for(ResourceType::Skills).is_none());
5081 assert_eq!(
5082 manifest.entries_for(ResourceType::Prompts),
5083 Some(vec!["p.md".to_string()])
5084 );
5085 assert!(manifest.entries_for(ResourceType::Themes).is_none());
5086 }
5087
5088 #[test]
5093 fn resource_type_all_and_as_str() {
5094 let all = ResourceType::all();
5095 assert_eq!(all.len(), 4);
5096 assert_eq!(ResourceType::Extensions.as_str(), "extensions");
5097 assert_eq!(ResourceType::Skills.as_str(), "skills");
5098 assert_eq!(ResourceType::Prompts.as_str(), "prompts");
5099 assert_eq!(ResourceType::Themes.as_str(), "themes");
5100 }
5101
5102 #[test]
5107 fn read_installed_npm_version_parses_package_json() {
5108 let dir = tempfile::tempdir().expect("tempdir");
5109 fs::write(
5110 dir.path().join("package.json"),
5111 r#"{"name":"foo","version":"1.2.3"}"#,
5112 )
5113 .expect("write");
5114 let version = read_installed_npm_version(dir.path());
5115 assert_eq!(version, Some("1.2.3".to_string()));
5116 }
5117
5118 #[test]
5119 fn read_installed_npm_version_missing_version_field() {
5120 let dir = tempfile::tempdir().expect("tempdir");
5121 fs::write(dir.path().join("package.json"), r#"{"name":"foo"}"#).expect("write");
5122 assert!(read_installed_npm_version(dir.path()).is_none());
5123 }
5124
5125 #[test]
5126 fn read_installed_npm_version_no_package_json() {
5127 let dir = tempfile::tempdir().expect("tempdir");
5128 assert!(read_installed_npm_version(dir.path()).is_none());
5129 }
5130
5131 #[test]
5136 fn extract_package_source_string_value() {
5137 let (source, is_obj) = extract_package_source(&json!("npm:foo")).unwrap();
5138 assert_eq!(source, "npm:foo");
5139 assert!(!is_obj);
5140 }
5141
5142 #[test]
5143 fn extract_package_source_object_value() {
5144 let val = json!({"source": "git:repo"});
5145 let (source, is_obj) = extract_package_source(&val).unwrap();
5146 assert_eq!(source, "git:repo");
5147 assert!(is_obj);
5148 }
5149
5150 #[test]
5151 fn extract_package_source_invalid_returns_none() {
5152 assert!(extract_package_source(&json!(42)).is_none());
5153 assert!(extract_package_source(&json!(null)).is_none());
5154 }
5155
5156 #[test]
5161 fn auto_dirs_constructs_correct_paths() {
5162 let base = Path::new("/home/user/.pi/agent");
5163 let dirs = AutoDirs::new(base);
5164 assert_eq!(dirs.extensions, base.join("extensions"));
5165 assert_eq!(dirs.skills, base.join("skills"));
5166 assert_eq!(dirs.prompts, base.join("prompts"));
5167 assert_eq!(dirs.themes, base.join("themes"));
5168 }
5169
5170 #[test]
5175 fn get_override_patterns_filters_correctly() {
5176 let entries = vec![
5177 "a.js".to_string(),
5178 "!excluded.js".to_string(),
5179 "+forced.js".to_string(),
5180 "-removed.js".to_string(),
5181 "b.js".to_string(),
5182 ];
5183 let overrides = get_override_patterns(&entries);
5184 assert_eq!(overrides.len(), 3);
5185 assert!(overrides.contains(&"!excluded.js".to_string()));
5186 assert!(overrides.contains(&"+forced.js".to_string()));
5187 assert!(overrides.contains(&"-removed.js".to_string()));
5188 }
5189
5190 #[test]
5195 fn local_path_from_spec_file_url() {
5196 let cwd = Path::new("/home/user/project");
5197 let result = local_path_from_spec("file:///abs/repo", cwd);
5198 assert_eq!(result, PathBuf::from("/abs/repo"));
5199 }
5200
5201 #[test]
5202 fn local_path_from_spec_relative() {
5203 let cwd = Path::new("/home/user/project");
5204 let result = local_path_from_spec("./my-repo", cwd);
5205 assert_eq!(result, PathBuf::from("/home/user/project/my-repo"));
5206 }
5207
5208 #[test]
5209 fn parse_git_source_sanitizes_paths() {
5210 let dir = tempfile::tempdir().expect("tempdir");
5211
5212 match parse_git_source("../../evil/repo", dir.path()) {
5215 ParsedSource::Git { host, .. } => {
5216 assert_eq!(host, "local");
5219 }
5220 other => panic!("expected Git, got {other:?}"),
5221 }
5222
5223 match parse_git_source("github.com/../../user/repo", dir.path()) {
5225 ParsedSource::Git { host, path, .. } => {
5226 assert_eq!(host, "github.com");
5228 assert_eq!(path, "user/repo");
5229 }
5230 other => panic!("expected Git, got {other:?}"),
5231 }
5232
5233 match parse_git_source("..", dir.path()) {
5235 ParsedSource::Git { host, .. } => {
5236 assert_eq!(host, "local");
5238 }
5239 other => panic!("expected Git, got {other:?}"),
5240 }
5241 }
5242
5243 fn sample_npm_lock_entry(
5244 source: &str,
5245 requested_spec: &str,
5246 requested_version: Option<&str>,
5247 installed_version: &str,
5248 digest: &str,
5249 pinned: bool,
5250 ) -> PackageLockEntry {
5251 PackageLockEntry {
5252 identity: "npm:demo-pkg".to_string(),
5253 source: source.to_string(),
5254 source_kind: PackageSourceKind::Npm,
5255 resolved: PackageResolvedProvenance::Npm {
5256 name: "demo-pkg".to_string(),
5257 requested_spec: requested_spec.to_string(),
5258 requested_version: requested_version.map(str::to_string),
5259 installed_version: installed_version.to_string(),
5260 pinned,
5261 },
5262 digest_sha256: digest.to_string(),
5263 trust_state: PackageEntryTrustState::Trusted,
5264 }
5265 }
5266
5267 #[test]
5268 fn evaluate_lock_transition_rejects_install_digest_mismatch() {
5269 let existing = sample_npm_lock_entry(
5270 "npm:demo-pkg@1.0.0",
5271 "demo-pkg@1.0.0",
5272 Some("1.0.0"),
5273 "1.0.0",
5274 "aaaaaaaa",
5275 true,
5276 );
5277 let candidate = sample_npm_lock_entry(
5278 "npm:demo-pkg@1.0.0",
5279 "demo-pkg@1.0.0",
5280 Some("1.0.0"),
5281 "1.0.0",
5282 "bbbbbbbb",
5283 true,
5284 );
5285
5286 let mismatch =
5287 evaluate_lock_transition(Some(&existing), &candidate, PackageLockAction::Install)
5288 .expect_err("install should fail closed on digest mismatch");
5289 assert_eq!(mismatch.code, "digest_mismatch");
5290 }
5291
5292 #[test]
5293 fn evaluate_lock_transition_allows_unpinned_update_changes() {
5294 let existing =
5295 sample_npm_lock_entry("npm:demo-pkg", "demo-pkg", None, "1.0.0", "aaaaaaaa", false);
5296 let candidate =
5297 sample_npm_lock_entry("npm:demo-pkg", "demo-pkg", None, "1.1.0", "bbbbbbbb", false);
5298
5299 let transition =
5300 evaluate_lock_transition(Some(&existing), &candidate, PackageLockAction::Update)
5301 .expect("unpinned update should permit provenance/digest rotation");
5302 assert!(
5303 transition
5304 .reason_codes
5305 .contains(&"provenance_changed".to_string()),
5306 "expected provenance_changed reason code"
5307 );
5308 assert!(
5309 transition
5310 .reason_codes
5311 .contains(&"digest_changed".to_string()),
5312 "expected digest_changed reason code"
5313 );
5314 }
5315
5316 #[test]
5317 fn digest_package_path_ignores_git_metadata() {
5318 let dir = tempfile::tempdir().expect("tempdir");
5319 let package_root = dir.path().join("pkg");
5320 fs::create_dir_all(package_root.join(".git")).expect("create .git dir");
5321 fs::write(package_root.join("index.js"), "export const ok = true;\n")
5322 .expect("write index.js");
5323 fs::write(package_root.join(".git/HEAD"), "ref: refs/heads/main\n")
5324 .expect("write .git/HEAD");
5325
5326 let digest_before = digest_package_path(&package_root).expect("digest before");
5327
5328 fs::write(package_root.join(".git/HEAD"), "ref: refs/heads/feature\n")
5329 .expect("rewrite .git/HEAD");
5330 fs::write(
5331 package_root.join(".git/config"),
5332 "[core]\nrepositoryformatversion = 0\n",
5333 )
5334 .expect("write .git/config");
5335
5336 let digest_after = digest_package_path(&package_root).expect("digest after");
5337 assert_eq!(
5338 digest_before, digest_after,
5339 "digest should ignore .git metadata"
5340 );
5341 }
5342
5343 #[test]
5344 fn verify_and_record_lock_is_deterministic_for_same_inputs() {
5345 let dir = tempfile::tempdir().expect("tempdir");
5346 let cwd = dir.path().to_path_buf();
5347 let pkg = cwd.join("local-pkg");
5348 fs::create_dir_all(&pkg).expect("create local package dir");
5349 fs::write(pkg.join("index.js"), "export const stable = true;\n")
5350 .expect("write extension file");
5351
5352 let manager = PackageManager::new(cwd.clone());
5353 manager
5354 .verify_and_record_lock(
5355 "./local-pkg",
5356 PackageScope::Project,
5357 PackageLockAction::Install,
5358 )
5359 .expect("first lock verification");
5360
5361 let lockfile_path = cwd.join(".pi").join("packages.lock.json");
5362 let first = fs::read_to_string(&lockfile_path).expect("read first lockfile");
5363
5364 manager
5365 .verify_and_record_lock(
5366 "./local-pkg",
5367 PackageScope::Project,
5368 PackageLockAction::Install,
5369 )
5370 .expect("second lock verification");
5371 let second = fs::read_to_string(&lockfile_path).expect("read second lockfile");
5372
5373 assert_eq!(
5374 first, second,
5375 "same inputs should produce identical lockfile artifacts"
5376 );
5377 }
5378
5379 mod proptest_package_manager {
5380 use super::*;
5381 use proptest::prelude::*;
5382
5383 proptest! {
5384 #[test]
5386 fn parse_npm_spec_never_panics(s in ".*") {
5387 let _ = parse_npm_spec(&s);
5388 }
5389
5390 #[test]
5392 fn unscoped_no_at_returns_no_version(name in "[a-z][a-z0-9-]{0,20}") {
5393 let (parsed_name, version) = parse_npm_spec(&name);
5394 assert_eq!(parsed_name, name);
5395 assert!(version.is_none());
5396 }
5397
5398 #[test]
5400 fn scoped_no_version(scope in "[a-z]{1,10}", pkg in "[a-z]{1,10}") {
5401 let input = format!("@{scope}/{pkg}");
5402 let (parsed_name, version) = parse_npm_spec(&input);
5403 assert_eq!(parsed_name, input);
5404 assert!(version.is_none());
5405 }
5406
5407 #[test]
5409 fn scoped_with_version(
5410 scope in "[a-z]{1,10}",
5411 pkg in "[a-z]{1,10}",
5412 ver in "[0-9]{1,3}\\.[0-9]{1,3}\\.[0-9]{1,3}",
5413 ) {
5414 let input = format!("@{scope}/{pkg}@{ver}");
5415 let (parsed_name, version) = parse_npm_spec(&input);
5416 assert_eq!(parsed_name, format!("@{scope}/{pkg}"));
5417 assert_eq!(version, Some(ver));
5418 }
5419
5420 #[test]
5422 fn is_pattern_detects_prefix(
5423 prefix_idx in 0..3usize,
5424 suffix in "[a-z.]{1,20}",
5425 ) {
5426 let prefix = ["!", "+", "-"][prefix_idx];
5427 let input = format!("{prefix}{suffix}");
5428 assert!(is_pattern(&input));
5429 }
5430
5431 #[test]
5433 fn is_pattern_detects_wildcards(
5434 prefix in "[a-z]{0,5}",
5435 wild_idx in 0..2usize,
5436 suffix in "[a-z]{0,5}",
5437 ) {
5438 let wild = ["*", "?"][wild_idx];
5439 let input = format!("{prefix}{wild}{suffix}");
5440 assert!(is_pattern(&input));
5441 }
5442
5443 #[test]
5445 fn plain_strings_not_patterns(s in "[a-z0-9]{1,20}") {
5446 assert!(!is_pattern(&s));
5447 }
5448
5449 #[test]
5451 fn split_patterns_partition_is_complete(
5452 plains in prop::collection::vec("[a-z]{1,10}", 0..5),
5453 patterns in prop::collection::vec("[!+*][a-z]{1,10}", 0..5),
5454 ) {
5455 let mut all = plains;
5456 all.extend(patterns);
5457 let (split_plain, split_patterns) = split_patterns(&all);
5458 assert_eq!(
5459 split_plain.len() + split_patterns.len(),
5460 all.len(),
5461 "partition should be complete"
5462 );
5463 for p in &split_patterns {
5464 assert!(is_pattern(p));
5465 }
5466 }
5467
5468 #[test]
5470 fn hex_encode_output_length(bytes in prop::collection::vec(any::<u8>(), 0..64)) {
5471 let hex = hex_encode(&bytes);
5472 assert_eq!(hex.len(), bytes.len() * 2);
5473 }
5474
5475 #[test]
5477 fn hex_encode_is_lowercase_hex(bytes in prop::collection::vec(any::<u8>(), 0..64)) {
5478 let hex = hex_encode(&bytes);
5479 assert!(hex.chars().all(|c| c.is_ascii_hexdigit() && !c.is_ascii_uppercase()));
5480 }
5481
5482 #[test]
5484 fn posix_string_no_backslashes(segments in prop::collection::vec("[a-z]{1,5}", 1..5)) {
5485 let path = PathBuf::from(segments.join("/"));
5486 let result = posix_string(&path);
5487 assert!(!result.contains('\\'));
5488 }
5489
5490 #[test]
5492 fn posix_string_idempotent(segments in prop::collection::vec("[a-z]{1,5}", 1..5)) {
5493 let path = PathBuf::from(segments.join("/"));
5494 let first = posix_string(&path);
5495 let second = posix_string(&PathBuf::from(&first));
5496 assert_eq!(first, second);
5497 }
5498
5499 #[test]
5501 fn normalize_exact_pattern_strips_dot_slash(suffix in "[a-z]{1,15}") {
5502 let with_prefix = format!("./{suffix}");
5503 assert_eq!(normalize_exact_pattern(&with_prefix), suffix.as_str());
5504 }
5505
5506 #[test]
5508 fn normalize_exact_pattern_strips_at_most_one(suffix in "[a-z]{1,15}") {
5509 let input = format!("./{suffix}");
5510 let result = normalize_exact_pattern(&input);
5511 assert_eq!(result, suffix.as_str());
5512 assert_eq!(normalize_exact_pattern(&suffix), suffix.as_str());
5514 }
5515
5516 #[test]
5518 fn looks_like_git_url_known_hosts(
5519 host_idx in 0..4usize,
5520 path in "[a-z]{1,10}/[a-z]{1,10}",
5521 ) {
5522 let host = ["github.com", "gitlab.com", "bitbucket.org", "codeberg.org"][host_idx];
5523 let url = format!("{host}/{path}");
5524 assert!(looks_like_git_url(&url));
5525 }
5526
5527 #[test]
5529 fn looks_like_git_url_rejects_plain(name in "[a-z]{1,15}") {
5530 assert!(!looks_like_git_url(&name));
5531 }
5532
5533 #[test]
5535 fn looks_like_local_path_detects_relative(suffix in "[a-z]{1,10}") {
5536 assert!(looks_like_local_path(&format!("./{suffix}")));
5537 assert!(looks_like_local_path(&format!("../{suffix}")));
5538 assert!(looks_like_local_path(&format!("~/{suffix}")));
5539 }
5540
5541 #[test]
5543 fn normalize_dot_segments_idempotent(
5544 segments in prop::collection::vec("[a-z]{1,5}", 1..5),
5545 ) {
5546 let path = PathBuf::from(segments.join("/"));
5547 let first = normalize_dot_segments(&path);
5548 let second = normalize_dot_segments(&first);
5549 assert_eq!(first, second);
5550 }
5551
5552 #[test]
5554 fn normalize_source_empty_returns_none(s in "[ \\t]{0,10}") {
5555 assert!(normalize_source(&s).is_none());
5556 }
5557
5558 #[test]
5560 fn sources_match_reflexive(source in "[a-z]{1,15}") {
5561 assert!(sources_match(&source, &source));
5562 }
5563
5564 #[test]
5566 fn sources_match_symmetric(a in "[a-z]{1,10}", b in "[a-z]{1,10}") {
5567 assert_eq!(sources_match(&a, &b), sources_match(&b, &a));
5568 }
5569
5570 #[test]
5572 fn sort_lock_entries_produces_sorted(
5573 identities in prop::collection::vec("[a-z]{1,10}", 1..10),
5574 ) {
5575 let mut entries: Vec<PackageLockEntry> = identities
5576 .iter()
5577 .map(|id| PackageLockEntry {
5578 identity: id.clone(),
5579 source: format!("npm:{id}"),
5580 source_kind: PackageSourceKind::Npm,
5581 resolved: PackageResolvedProvenance::Npm {
5582 name: id.clone(),
5583 requested_spec: id.clone(),
5584 requested_version: None,
5585 installed_version: "1.0.0".to_string(),
5586 pinned: false,
5587 },
5588 digest_sha256: "abcd".to_string(),
5589 trust_state: PackageEntryTrustState::Trusted,
5590 })
5591 .collect();
5592 sort_lock_entries(&mut entries);
5593 for pair in entries.windows(2) {
5594 assert!(pair[0].identity <= pair[1].identity);
5595 }
5596 }
5597 }
5598 }
5599
5600 #[test]
5601 fn verify_and_record_lock_fails_closed_on_local_digest_mismatch() {
5602 let dir = tempfile::tempdir().expect("tempdir");
5603 let cwd = dir.path().to_path_buf();
5604 let pkg = cwd.join("local-pkg");
5605 fs::create_dir_all(&pkg).expect("create local package dir");
5606 let file_path = pkg.join("index.js");
5607 fs::write(&file_path, "export const version = 1;\n").expect("write extension file");
5608
5609 let manager = PackageManager::new(cwd);
5610 manager
5611 .verify_and_record_lock(
5612 "./local-pkg",
5613 PackageScope::Project,
5614 PackageLockAction::Install,
5615 )
5616 .expect("initial lock verification");
5617
5618 fs::write(&file_path, "export const version = 2;\n").expect("tamper package file");
5619
5620 let err = manager
5621 .verify_and_record_lock(
5622 "./local-pkg",
5623 PackageScope::Project,
5624 PackageLockAction::Install,
5625 )
5626 .expect_err("install verification should fail on digest mismatch");
5627 let message = err.to_string();
5628 assert!(
5629 message.contains("digest_mismatch"),
5630 "expected digest_mismatch error, got: {message}"
5631 );
5632 }
5633}