1use crate::agent_cx::AgentCx;
10use crate::config::Config;
11use crate::error::{Error, Result};
12use crate::extension_index::ExtensionIndexStore;
13use crate::extensions::{CompatibilityScanner, load_extension_manifest};
14use asupersync::channel::oneshot;
15use serde::{Deserialize, Serialize};
16use serde_json::Value;
17use sha2::{Digest, Sha256};
18use std::ffi::OsStr;
19use std::fmt::Write as _;
20use std::fs;
21use std::io::Write as _;
22use std::path::{Path, PathBuf};
23use std::process::{Command, Stdio};
24use std::thread;
25use tracing::{info, warn};
26
27fn finish_package_task<T, E>(
28 handle: thread::JoinHandle<()>,
29 recv_result: std::result::Result<Result<T>, E>,
30 cancelled_message: &'static str,
31) -> Result<T> {
32 if let Err(panic_payload) = handle.join() {
33 std::panic::resume_unwind(panic_payload);
34 }
35 recv_result.map_err(|_| Error::tool("package_manager", cancelled_message))?
36}
37
38#[derive(Debug, Clone, Copy, PartialEq, Eq)]
39pub enum PackageScope {
40 User,
41 Project,
42 Temporary,
43}
44
45#[derive(Debug, Clone)]
46pub struct PackageEntry {
47 pub scope: PackageScope,
48 pub source: String,
49 pub filter: Option<PackageFilter>,
50}
51
52#[derive(Debug, Clone, Default)]
57pub struct PackageFilter {
58 pub extensions: Option<Vec<String>>,
59 pub skills: Option<Vec<String>>,
60 pub prompts: Option<Vec<String>>,
61 pub themes: Option<Vec<String>>,
62}
63
64#[derive(Debug, Clone)]
65pub struct PathMetadata {
66 pub source: String,
67 pub scope: PackageScope,
68 pub origin: ResourceOrigin,
69 pub base_dir: Option<PathBuf>,
70}
71
72#[derive(Debug, Clone, Copy, PartialEq, Eq)]
73pub enum ResourceOrigin {
74 Package,
75 TopLevel,
76}
77
78#[derive(Debug, Clone)]
79pub struct ResolvedResource {
80 pub path: PathBuf,
81 pub enabled: bool,
82 pub metadata: PathMetadata,
83}
84
85#[derive(Debug, Clone, Default)]
86pub struct ResolvedPaths {
87 pub extensions: Vec<ResolvedResource>,
88 pub skills: Vec<ResolvedResource>,
89 pub prompts: Vec<ResolvedResource>,
90 pub themes: Vec<ResolvedResource>,
91}
92
93#[derive(Debug, Clone)]
98pub struct ResolveRoots {
99 pub global_settings_path: PathBuf,
100 pub project_settings_path: PathBuf,
101 pub global_base_dir: PathBuf,
102 pub project_base_dir: PathBuf,
103 pub project_settings_enabled: bool,
104}
105
106impl ResolveRoots {
107 fn from_override(cwd: &Path, config_override_path: Option<&Path>) -> Self {
108 Self {
109 global_settings_path: config_override_path.map_or_else(
110 || Config::global_dir().join("settings.json"),
111 std::path::Path::to_path_buf,
112 ),
113 project_settings_path: project_settings_path(cwd),
114 global_base_dir: Config::global_dir(),
115 project_base_dir: cwd.join(Config::project_dir()),
116 project_settings_enabled: config_override_path.is_none(),
117 }
118 }
119
120 #[must_use]
122 pub fn from_env(cwd: &Path) -> Self {
123 Self::from_override(cwd, Config::config_path_override_from_env(cwd).as_deref())
124 }
125}
126
127#[derive(Debug, Clone)]
128pub struct PackageManager {
129 cwd: PathBuf,
130}
131
132#[derive(Debug, Clone, Copy, PartialEq, Eq)]
133pub enum PackageLockAction {
134 Install,
135 Update,
136}
137
138#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)]
139pub struct PackageLockfile {
140 pub schema: String,
141 #[serde(default)]
142 pub entries: Vec<PackageLockEntry>,
143}
144
145impl Default for PackageLockfile {
146 fn default() -> Self {
147 Self {
148 schema: PACKAGE_LOCK_SCHEMA.to_string(),
149 entries: Vec::new(),
150 }
151 }
152}
153
154#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)]
155pub struct PackageLockEntry {
156 pub identity: String,
157 pub source: String,
158 pub source_kind: PackageSourceKind,
159 pub resolved: PackageResolvedProvenance,
160 pub digest_sha256: String,
161 pub trust_state: PackageEntryTrustState,
162}
163
164#[derive(Debug, Clone, Copy, Serialize, Deserialize, PartialEq, Eq)]
165#[serde(rename_all = "snake_case")]
166pub enum PackageSourceKind {
167 Npm,
168 Git,
169 Local,
170}
171
172#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)]
173#[serde(tag = "kind", rename_all = "snake_case")]
174pub enum PackageResolvedProvenance {
175 Npm {
176 name: String,
177 requested_spec: String,
178 requested_version: Option<String>,
179 installed_version: String,
180 pinned: bool,
181 },
182 Git {
183 repo: String,
184 host: String,
185 path: String,
186 requested_ref: Option<String>,
187 resolved_commit: String,
188 origin_url: Option<String>,
189 pinned: bool,
190 },
191 Local {
192 resolved_path: String,
193 },
194}
195
196#[derive(Debug, Clone, Copy, Serialize, Deserialize, PartialEq, Eq)]
197#[serde(rename_all = "snake_case")]
198pub enum PackageEntryTrustState {
199 Trusted,
200 Rejected,
201}
202
203#[derive(Debug, Clone, Serialize)]
204pub struct PackageTrustAuditEvent {
205 pub schema: &'static str,
206 pub timestamp: String,
207 pub action: String,
208 pub scope: String,
209 pub source: String,
210 pub identity: String,
211 pub from_state: String,
212 pub to_state: String,
213 pub reason_codes: Vec<String>,
214 pub remediation: Option<String>,
215 pub details: serde_json::Value,
216}
217
218#[derive(Debug, Clone)]
219pub struct LockTransitionPlan {
220 pub reason_codes: Vec<String>,
221 pub from_state: String,
222 pub to_state: String,
223}
224
225#[derive(Debug, Clone)]
226pub struct PackageLockMismatch {
227 pub code: &'static str,
228 pub reason: String,
229 pub remediation: String,
230}
231
232pub const PACKAGE_LOCK_SCHEMA: &str = "pi.package_lock.v1";
233pub const PACKAGE_TRUST_AUDIT_SCHEMA: &str = "pi.package_trust_audit.v1";
234
235impl PackageManager {
236 pub const fn new(cwd: PathBuf) -> Self {
237 Self { cwd }
238 }
239
240 pub fn resolve_install_source_alias(&self, source: &str) -> String {
244 let source = source.trim();
245 resolve_install_source_alias(source, &self.cwd).unwrap_or_else(|| source.to_string())
246 }
247
248 pub fn package_identity(&self, source: &str) -> String {
255 match parse_source(source, &self.cwd) {
256 ParsedSource::Npm { name, .. } => format!("npm:{name}"),
257 ParsedSource::Git { repo, .. } => format!("git:{repo}"),
258 ParsedSource::Local { path } => format!("local:{}", path.display()),
259 }
260 }
261
262 pub async fn install(&self, source: &str, scope: PackageScope) -> Result<()> {
263 let this = self.clone();
264 let source = source.to_string();
265 let (tx, mut rx) = oneshot::channel();
266
267 let handle = thread::spawn(move || {
268 let res = this.install_sync(&source, scope);
269 let cx = AgentCx::for_request();
270 let _ = tx.send(cx.cx(), res);
271 });
272
273 let cx = AgentCx::for_request();
274 let recv_result = rx.recv(cx.cx()).await;
275 finish_package_task(handle, recv_result, "Install task cancelled")
276 }
277
278 fn install_sync(&self, source: &str, scope: PackageScope) -> Result<()> {
279 let source = validate_non_empty_source(source, "Package source")?;
280 let parsed = parse_source(source, &self.cwd);
281 match parsed {
282 ParsedSource::Npm { spec, .. } => self.install_npm(&spec, scope),
283 ParsedSource::Git {
284 clone_source,
285 host,
286 path,
287 r#ref,
288 ..
289 } => self.install_git(&clone_source, &host, &path, r#ref.as_deref(), scope),
290 ParsedSource::Local { path } => {
291 if path.exists() {
292 Ok(())
293 } else {
294 Err(Error::config(format!(
295 "Local package path does not exist: {}",
296 path.display()
297 )))
298 }
299 }
300 }?;
301
302 self.verify_and_record_lock(source, scope, PackageLockAction::Install)
303 }
304
305 pub async fn remove(&self, source: &str, scope: PackageScope) -> Result<()> {
306 let this = self.clone();
307 let source = source.to_string();
308 let (tx, mut rx) = oneshot::channel();
309
310 let handle = thread::spawn(move || {
311 let res = this.remove_sync(&source, scope);
312 let cx = AgentCx::for_request();
313 let _ = tx.send(cx.cx(), res);
314 });
315
316 let cx = AgentCx::for_request();
317 let recv_result = rx.recv(cx.cx()).await;
318 finish_package_task(handle, recv_result, "Remove task cancelled")
319 }
320
321 fn remove_sync(&self, source: &str, scope: PackageScope) -> Result<()> {
322 let source = validate_non_empty_source(source, "Package source")?;
323 let parsed = parse_source(source, &self.cwd);
324 match parsed {
325 ParsedSource::Npm { name, .. } => self.uninstall_npm(&name, scope),
326 ParsedSource::Git { host, path, .. } => self.remove_git(&host, &path, scope),
327 ParsedSource::Local { .. } => Ok(()),
328 }?;
329
330 self.remove_lock_entry(source, scope)
331 }
332
333 pub async fn update_source(&self, source: &str, scope: PackageScope) -> Result<()> {
334 let this = self.clone();
335 let source = source.to_string();
336 let (tx, mut rx) = oneshot::channel();
337
338 let handle = thread::spawn(move || {
339 let res = this.update_source_sync(&source, scope);
340 let cx = AgentCx::for_request();
341 let _ = tx.send(cx.cx(), res);
342 });
343
344 let cx = AgentCx::for_request();
345 let recv_result = rx.recv(cx.cx()).await;
346 finish_package_task(handle, recv_result, "Update task cancelled")
347 }
348
349 fn update_source_sync(&self, source: &str, scope: PackageScope) -> Result<()> {
350 let source = validate_non_empty_source(source, "Package source")?;
351 let parsed = parse_source(source, &self.cwd);
352 match parsed {
353 ParsedSource::Npm { spec, pinned, .. } => {
354 if !pinned {
355 self.install_npm(&spec, scope)?;
356 }
357 }
358 ParsedSource::Git {
359 clone_source,
360 host,
361 path,
362 pinned,
363 ..
364 } => {
365 if !pinned {
366 self.update_git(&clone_source, &host, &path, scope)?;
367 }
368 }
369 ParsedSource::Local { .. } => {}
370 }
371
372 self.verify_and_record_lock(source, scope, PackageLockAction::Update)
373 }
374
375 pub async fn installed_path(
376 &self,
377 source: &str,
378 scope: PackageScope,
379 ) -> Result<Option<PathBuf>> {
380 let this = self.clone();
381 let source = source.to_string();
382 let (tx, mut rx) = oneshot::channel();
383
384 let handle = thread::spawn(move || {
385 let res = this.installed_path_sync(&source, scope);
386 let cx = AgentCx::for_request();
387 let _ = tx.send(cx.cx(), res);
388 });
389
390 let cx = AgentCx::for_request();
391 let recv_result = rx.recv(cx.cx()).await;
392 finish_package_task(handle, recv_result, "Installed path lookup cancelled")
393 }
394
395 pub fn installed_path_blocking(
397 &self,
398 source: &str,
399 scope: PackageScope,
400 ) -> Result<Option<PathBuf>> {
401 self.installed_path_sync(source, scope)
402 }
403
404 fn installed_path_sync(&self, source: &str, scope: PackageScope) -> Result<Option<PathBuf>> {
405 let source = validate_non_empty_source(source, "Package source")?;
406 let parsed = parse_source(source, &self.cwd);
407 Ok(match parsed {
408 ParsedSource::Npm { name, .. } => self.npm_install_path(&name, scope)?,
409 ParsedSource::Git { host, path, .. } => {
410 Some(self.checked_git_install_path(&host, &path, scope)?)
411 }
412 ParsedSource::Local { path } => Some(path),
413 })
414 }
415
416 pub async fn list_packages(&self) -> Result<Vec<PackageEntry>> {
417 let this = self.clone();
418 let (tx, mut rx) = oneshot::channel();
419
420 let handle = thread::spawn(move || {
421 let res = this.list_packages_sync();
422 let cx = AgentCx::for_request();
423 let _ = tx.send(cx.cx(), res);
424 });
425
426 let cx = AgentCx::for_request();
427 let recv_result = rx.recv(cx.cx()).await;
428 finish_package_task(handle, recv_result, "List packages task cancelled")
429 }
430
431 pub fn list_packages_blocking(&self) -> Result<Vec<PackageEntry>> {
433 let roots = ResolveRoots::from_env(&self.cwd);
434 Self::list_packages_with_roots(&roots)
435 }
436
437 fn list_packages_sync(&self) -> Result<Vec<PackageEntry>> {
438 self.list_packages_blocking()
439 }
440
441 fn list_packages_with_roots(roots: &ResolveRoots) -> Result<Vec<PackageEntry>> {
442 let global = list_packages_in_settings(&roots.global_settings_path)?
443 .into_iter()
444 .map(|mut p| {
445 p.scope = PackageScope::User;
446 p
447 });
448 let project = roots
449 .project_settings_enabled
450 .then(|| list_packages_in_settings(&roots.project_settings_path))
451 .transpose()?
452 .unwrap_or_default()
453 .into_iter()
454 .map(|mut p| {
455 p.scope = PackageScope::Project;
456 p
457 });
458 Ok(global.chain(project).collect())
459 }
460
461 pub fn resolve_package_resources_blocking(&self) -> Result<Option<ResolvedPaths>> {
468 let roots = ResolveRoots::from_env(&self.cwd);
469 self.resolve_package_resources_with_roots_blocking(&roots)
470 }
471
472 fn resolve_package_resources_with_roots_blocking(
473 &self,
474 roots: &ResolveRoots,
475 ) -> Result<Option<ResolvedPaths>> {
476 let global = read_settings_snapshot(&roots.global_settings_path)?;
477 let project = read_project_settings_snapshot(roots)?;
478
479 let mut all_packages: Vec<ScopedPackage> = Vec::new();
480 all_packages.extend(global.packages.iter().cloned().map(|pkg| ScopedPackage {
481 pkg,
482 scope: PackageScope::User,
483 }));
484 all_packages.extend(project.packages.iter().cloned().map(|pkg| ScopedPackage {
485 pkg,
486 scope: PackageScope::Project,
487 }));
488 let package_sources = self.dedupe_packages(all_packages);
489
490 let mut accumulator = ResourceAccumulator::new();
491
492 for entry in package_sources {
493 let source_str = entry.pkg.source.trim();
494 if source_str.is_empty() {
495 continue;
496 }
497
498 let parsed = parse_source(source_str, &self.cwd);
499 let mut metadata = PathMetadata {
500 source: source_str.to_string(),
501 scope: entry.scope,
502 origin: ResourceOrigin::Package,
503 base_dir: None,
504 };
505
506 match parsed {
507 ParsedSource::Local { path } => {
508 Self::resolve_local_extension_source(
509 &path,
510 &mut accumulator,
511 entry.pkg.filter.as_ref(),
512 &mut metadata,
513 entry.scope == PackageScope::Temporary,
514 )?;
515 }
516 ParsedSource::Npm { name, .. } => {
517 let installed_path = self
518 .npm_install_path(&name, entry.scope)?
519 .unwrap_or_else(|| self.cwd.join("node_modules").join(&name));
520
521 if !installed_path.exists() {
522 return Ok(None);
523 }
524
525 metadata.base_dir = Some(installed_path.clone());
526 Self::collect_package_resources(
527 &installed_path,
528 &mut accumulator,
529 entry.pkg.filter.as_ref(),
530 &metadata,
531 )?;
532 }
533 ParsedSource::Git { host, path, .. } => {
534 let installed_path =
535 self.checked_git_install_path(&host, &path, entry.scope)?;
536 if !installed_path.exists() {
537 return Ok(None);
538 }
539
540 metadata.base_dir = Some(installed_path.clone());
541 Self::collect_package_resources(
542 &installed_path,
543 &mut accumulator,
544 entry.pkg.filter.as_ref(),
545 &metadata,
546 )?;
547 }
548 }
549 }
550
551 Ok(Some(accumulator.into_resolved_paths()))
552 }
553
554 pub async fn ensure_packages_installed(&self) -> Result<Vec<PackageEntry>> {
557 let packages = self.list_packages().await?;
558 let installed = self.ensure_package_entries_installed(packages).await?;
559
560 if let Err(err) = self.reconcile_all_lockfiles().await {
564 tracing::warn!(
565 event = "pkg.lockfile.reconcile.failed",
566 error = %err,
567 "Lockfile reconciliation failed during ensure_packages_installed"
568 );
569 }
570
571 Ok(installed)
572 }
573
574 pub async fn reconcile_all_lockfiles(&self) -> Result<Vec<PackageLockEntry>> {
582 let this = self.clone();
583 let (tx, mut rx) = oneshot::channel();
584
585 let handle = thread::spawn(move || {
586 let mut pruned = Vec::new();
587 for scope in [PackageScope::User, PackageScope::Project] {
588 match this.reconcile_lockfile_sync(scope) {
589 Ok(mut entries) => pruned.append(&mut entries),
590 Err(err) => tracing::warn!(
591 event = "pkg.lockfile.reconcile.scope_failed",
592 scope = scope_label(scope),
593 error = %err,
594 "Lockfile reconciliation failed for this scope; other scopes will still be reconciled"
595 ),
596 }
597 }
598 let cx = AgentCx::for_request();
599 let _ = tx.send(cx.cx(), Ok::<_, Error>(pruned));
600 });
601
602 let cx = AgentCx::for_request();
603 let recv_result = rx.recv(cx.cx()).await;
604 finish_package_task(handle, recv_result, "Lockfile reconcile task cancelled")
605 }
606
607 async fn ensure_package_entries_installed(
608 &self,
609 packages: Vec<PackageEntry>,
610 ) -> Result<Vec<PackageEntry>> {
611 let mut installed = Vec::new();
612
613 for entry in packages {
614 match self.installed_path(&entry.source, entry.scope).await? {
615 Some(path) if path.exists() => continue,
616 _ => {}
617 }
618
619 self.install(&entry.source, entry.scope).await?;
620 installed.push(entry);
621 }
622
623 Ok(installed)
624 }
625
626 pub async fn resolve(&self) -> Result<ResolvedPaths> {
633 let roots = ResolveRoots::from_env(&self.cwd);
634 self.resolve_with_roots(&roots).await
635 }
636
637 pub async fn resolve_with_roots(&self, roots: &ResolveRoots) -> Result<ResolvedPaths> {
638 let this_for_setup = self.clone();
639 let roots_for_setup = roots.clone();
640 let (tx, mut rx) = oneshot::channel();
641
642 let handle = thread::spawn(move || {
644 let res: Result<(SettingsSnapshot, SettingsSnapshot, Vec<ScopedPackage>)> = (|| {
645 let global = read_settings_snapshot(&roots_for_setup.global_settings_path)?;
646 let project = read_project_settings_snapshot(&roots_for_setup)?;
647
648 let mut all_packages: Vec<ScopedPackage> = Vec::new();
650 all_packages.extend(global.packages.iter().cloned().map(|pkg| ScopedPackage {
651 pkg,
652 scope: PackageScope::User,
653 }));
654 if roots_for_setup.project_settings_enabled {
655 all_packages.extend(project.packages.iter().cloned().map(|pkg| {
656 ScopedPackage {
657 pkg,
658 scope: PackageScope::Project,
659 }
660 }));
661 }
662 let package_sources = this_for_setup.dedupe_packages(all_packages);
663 Ok((global, project, package_sources))
664 })(
665 );
666 let cx = AgentCx::for_request();
667 let _ = tx.send(cx.cx(), res);
668 });
669
670 let cx = AgentCx::for_request();
671 let recv_result = rx.recv(cx.cx()).await;
672 let (global, project, package_sources) =
673 finish_package_task(handle, recv_result, "Resolve setup task cancelled")?;
674
675 let mut accumulator = ResourceAccumulator::new();
676
677 Box::pin(self.resolve_package_sources(&package_sources, &mut accumulator)).await?;
679
680 let this = self.clone();
682 let roots = roots.clone();
683 let (tx, mut rx) = oneshot::channel();
684 let accumulator = std::sync::Mutex::new(accumulator);
685
686 let handle = thread::spawn(move || {
687 let mut accumulator = accumulator
688 .lock()
689 .unwrap_or_else(std::sync::PoisonError::into_inner);
690
691 for resource_type in ResourceType::all() {
693 let target = accumulator.target_mut(resource_type);
694 Self::resolve_local_entries(
695 global.entries_for(resource_type),
696 resource_type,
697 target,
698 &PathMetadata {
699 source: "local".to_string(),
700 scope: PackageScope::User,
701 origin: ResourceOrigin::TopLevel,
702 base_dir: Some(roots.global_base_dir.clone()),
703 },
704 &roots.global_base_dir,
705 );
706
707 if roots.project_settings_enabled {
708 Self::resolve_local_entries(
709 project.entries_for(resource_type),
710 resource_type,
711 target,
712 &PathMetadata {
713 source: "local".to_string(),
714 scope: PackageScope::Project,
715 origin: ResourceOrigin::TopLevel,
716 base_dir: Some(roots.project_base_dir.clone()),
717 },
718 &roots.project_base_dir,
719 );
720 }
721 }
722
723 this.add_auto_discovered_resources(
725 &mut accumulator,
726 &global,
727 &project,
728 &roots.global_base_dir,
729 &roots.project_base_dir,
730 roots.project_settings_enabled,
731 );
732
733 let resolved = accumulator.clone().into_resolved_paths();
734 drop(accumulator);
735 maybe_emit_compat_ledgers(&resolved.extensions);
736 let cx = AgentCx::for_request();
737 let _ = tx.send(cx.cx(), Ok(resolved));
738 });
739
740 let cx = AgentCx::for_request();
741 let recv_result = rx.recv(cx.cx()).await;
742 finish_package_task(handle, recv_result, "Resolve processing task cancelled")
743 }
744
745 pub async fn resolve_extension_sources(
749 &self,
750 sources: &[String],
751 options: ResolveExtensionSourcesOptions,
752 ) -> Result<ResolvedPaths> {
753 let scope = if options.temporary {
754 PackageScope::Temporary
755 } else if options.local {
756 PackageScope::Project
757 } else {
758 PackageScope::User
759 };
760
761 let mut accumulator = ResourceAccumulator::new();
762 let package_sources = sources
763 .iter()
764 .map(|source| {
765 Ok(ScopedPackage {
766 pkg: PackageSpec {
767 source: validate_non_empty_source(source, "Extension source")?.to_string(),
768 filter: None,
769 },
770 scope,
771 })
772 })
773 .collect::<Result<Vec<_>>>()?;
774
775 Box::pin(self.resolve_package_sources(&package_sources, &mut accumulator)).await?;
776
777 let (tx, mut rx) = oneshot::channel();
778 let accumulator = std::sync::Mutex::new(accumulator);
779
780 let handle = thread::spawn(move || {
781 let resolved = {
782 let accumulator = accumulator
783 .lock()
784 .unwrap_or_else(std::sync::PoisonError::into_inner);
785 accumulator.clone().into_resolved_paths()
786 };
787 maybe_emit_compat_ledgers(&resolved.extensions);
788 let cx = AgentCx::for_request();
789 let _ = tx.send(cx.cx(), Ok(resolved));
790 });
791
792 let cx = AgentCx::for_request();
793 let recv_result = rx.recv(cx.cx()).await;
794 finish_package_task(handle, recv_result, "Resolve extensions task cancelled")
795 }
796
797 pub async fn add_package_source(&self, source: &str, scope: PackageScope) -> Result<()> {
798 let this = self.clone();
799 let source = source.to_string();
800 let (tx, mut rx) = oneshot::channel();
801
802 let handle = thread::spawn(move || {
803 let res = this.add_package_source_sync(&source, scope);
804 let cx = AgentCx::for_request();
805 let _ = tx.send(cx.cx(), res);
806 });
807
808 let cx = AgentCx::for_request();
809 let recv_result = rx.recv(cx.cx()).await;
810 finish_package_task(handle, recv_result, "Add source task cancelled")
811 }
812
813 fn add_package_source_sync(&self, source: &str, scope: PackageScope) -> Result<()> {
814 let path = match scope {
815 PackageScope::User => global_settings_path(&self.cwd),
816 PackageScope::Project => project_settings_path(&self.cwd),
817 PackageScope::Temporary => {
818 return Err(Error::config(
819 "Temporary packages cannot be persisted to settings".to_string(),
820 ));
821 }
822 };
823 update_package_sources(&path, source, UpdateAction::Add, &self.cwd)
824 }
825
826 pub async fn remove_package_source(&self, source: &str, scope: PackageScope) -> Result<()> {
827 let this = self.clone();
828 let source = source.to_string();
829 let (tx, mut rx) = oneshot::channel();
830
831 let handle = thread::spawn(move || {
832 let res = this.remove_package_source_sync(&source, scope);
833 let cx = AgentCx::for_request();
834 let _ = tx.send(cx.cx(), res);
835 });
836
837 let cx = AgentCx::for_request();
838 let recv_result = rx.recv(cx.cx()).await;
839 finish_package_task(handle, recv_result, "Remove source task cancelled")
840 }
841
842 fn remove_package_source_sync(&self, source: &str, scope: PackageScope) -> Result<()> {
843 let path = match scope {
844 PackageScope::User => global_settings_path(&self.cwd),
845 PackageScope::Project => project_settings_path(&self.cwd),
846 PackageScope::Temporary => {
847 return Err(Error::config(
848 "Temporary packages cannot be persisted to settings".to_string(),
849 ));
850 }
851 };
852 update_package_sources(&path, source, UpdateAction::Remove, &self.cwd)
853 }
854
855 fn lockfile_path_for_scope(&self, scope: PackageScope) -> Option<PathBuf> {
856 match scope {
857 PackageScope::User => Some(Config::global_dir().join("packages.lock.json")),
858 PackageScope::Project => Some(
859 self.cwd
860 .join(Config::project_dir())
861 .join("packages.lock.json"),
862 ),
863 PackageScope::Temporary => None,
864 }
865 }
866
867 fn trust_audit_path_for_scope(&self, scope: PackageScope) -> Option<PathBuf> {
868 match scope {
869 PackageScope::User => Some(Config::global_dir().join("package-trust-audit.jsonl")),
870 PackageScope::Project => Some(
871 self.cwd
872 .join(Config::project_dir())
873 .join("package-trust-audit.jsonl"),
874 ),
875 PackageScope::Temporary => None,
876 }
877 }
878
879 fn verify_and_record_lock(
880 &self,
881 source: &str,
882 scope: PackageScope,
883 action: PackageLockAction,
884 ) -> Result<()> {
885 let Some(lockfile_path) = self.lockfile_path_for_scope(scope) else {
886 return Ok(());
887 };
888
889 let candidate = self.build_lock_entry(source, scope)?;
890 let mut lockfile = read_package_lockfile(&lockfile_path)?;
891 let existing_idx = lockfile
892 .entries
893 .iter()
894 .position(|entry| entry.identity == candidate.identity);
895 let existing = existing_idx.and_then(|idx| lockfile.entries.get(idx).cloned());
896
897 match evaluate_lock_transition(existing.as_ref(), &candidate, action) {
898 Ok(transition) => {
899 if let Some(idx) = existing_idx {
900 lockfile.entries[idx] = candidate.clone();
901 } else {
902 lockfile.entries.push(candidate.clone());
903 }
904 sort_lock_entries(&mut lockfile.entries);
905 write_package_lockfile_atomic(&lockfile_path, &lockfile)?;
906
907 let event = PackageTrustAuditEvent {
908 schema: PACKAGE_TRUST_AUDIT_SCHEMA,
909 timestamp: chrono::Utc::now()
910 .to_rfc3339_opts(chrono::SecondsFormat::Millis, true),
911 action: match action {
912 PackageLockAction::Install => "install",
913 PackageLockAction::Update => "update",
914 }
915 .to_string(),
916 scope: scope_label(scope).to_string(),
917 source: source.to_string(),
918 identity: candidate.identity.clone(),
919 from_state: transition.from_state,
920 to_state: transition.to_state,
921 reason_codes: transition.reason_codes,
922 remediation: None,
923 details: serde_json::to_value(&candidate)
924 .unwrap_or_else(|_| serde_json::json!({})),
925 };
926 self.append_trust_audit_event(scope, &event)?;
927 Ok(())
928 }
929 Err(mismatch) => {
930 let from_state = existing.as_ref().map_or_else(
931 || "untracked".to_string(),
932 |entry| trust_state_label(entry.trust_state).to_string(),
933 );
934 let event = PackageTrustAuditEvent {
935 schema: PACKAGE_TRUST_AUDIT_SCHEMA,
936 timestamp: chrono::Utc::now()
937 .to_rfc3339_opts(chrono::SecondsFormat::Millis, true),
938 action: match action {
939 PackageLockAction::Install => "install",
940 PackageLockAction::Update => "update",
941 }
942 .to_string(),
943 scope: scope_label(scope).to_string(),
944 source: source.to_string(),
945 identity: candidate.identity.clone(),
946 from_state,
947 to_state: "rejected".to_string(),
948 reason_codes: vec![mismatch.code.to_string()],
949 remediation: Some(mismatch.remediation.clone()),
950 details: serde_json::to_value(&candidate)
951 .unwrap_or_else(|_| serde_json::json!({})),
952 };
953 self.append_trust_audit_event(scope, &event)?;
954
955 Err(verification_error(
956 mismatch.code,
957 &mismatch.reason,
958 &mismatch.remediation,
959 ))
960 }
961 }
962 }
963
964 fn reconcile_lockfile_sync(&self, scope: PackageScope) -> Result<Vec<PackageLockEntry>> {
983 let Some(lockfile_path) = self.lockfile_path_for_scope(scope) else {
984 return Ok(Vec::new());
985 };
986 if !lockfile_path.exists() {
987 return Ok(Vec::new());
988 }
989
990 let roots = ResolveRoots::from_env(&self.cwd);
991 let settings_path = match scope {
992 PackageScope::User => &roots.global_settings_path,
993 PackageScope::Project => {
994 if !roots.project_settings_enabled {
995 return Ok(Vec::new());
1002 }
1003 &roots.project_settings_path
1004 }
1005 PackageScope::Temporary => return Ok(Vec::new()),
1006 };
1007 if !settings_path.exists() {
1013 return Ok(Vec::new());
1014 }
1015 let live_sources: Vec<PackageEntry> = list_packages_in_settings(settings_path)?;
1016
1017 let live_identities: std::collections::HashSet<String> = live_sources
1018 .iter()
1019 .map(|entry| self.package_identity(&entry.source))
1020 .collect();
1021
1022 let mut lockfile = read_package_lockfile(&lockfile_path)?;
1023 let mut pruned = Vec::new();
1024 lockfile.entries.retain(|entry| {
1025 if live_identities.contains(&entry.identity) {
1026 true
1027 } else {
1028 pruned.push(entry.clone());
1029 false
1030 }
1031 });
1032
1033 if pruned.is_empty() {
1034 return Ok(Vec::new());
1035 }
1036
1037 sort_lock_entries(&mut lockfile.entries);
1038 write_package_lockfile_atomic(&lockfile_path, &lockfile)?;
1039
1040 for removed in &pruned {
1041 let event = PackageTrustAuditEvent {
1042 schema: PACKAGE_TRUST_AUDIT_SCHEMA,
1043 timestamp: chrono::Utc::now().to_rfc3339_opts(chrono::SecondsFormat::Millis, true),
1044 action: "remove".to_string(),
1045 scope: scope_label(scope).to_string(),
1046 source: removed.source.clone(),
1047 identity: removed.identity.clone(),
1048 from_state: trust_state_label(removed.trust_state).to_string(),
1049 to_state: "removed".to_string(),
1050 reason_codes: vec!["reconciled".to_string()],
1051 remediation: None,
1052 details: serde_json::to_value(removed).unwrap_or_else(|_| serde_json::json!({})),
1053 };
1054 if let Err(err) = self.append_trust_audit_event(scope, &event) {
1055 tracing::warn!(
1056 event = "pkg.lockfile.reconcile.audit_failed",
1057 scope = scope_label(scope),
1058 identity = %removed.identity,
1059 error = %err,
1060 "Failed to append trust-audit event for reconciled lockfile entry"
1061 );
1062 }
1063 }
1064
1065 Ok(pruned)
1066 }
1067
1068 fn remove_lock_entry(&self, source: &str, scope: PackageScope) -> Result<()> {
1069 let Some(lockfile_path) = self.lockfile_path_for_scope(scope) else {
1070 return Ok(());
1071 };
1072
1073 let identity = self.package_identity(source);
1074 let mut lockfile = read_package_lockfile(&lockfile_path)?;
1075 let Some(idx) = lockfile
1076 .entries
1077 .iter()
1078 .position(|entry| entry.identity == identity)
1079 else {
1080 return Ok(());
1081 };
1082
1083 let removed = lockfile.entries.remove(idx);
1084 sort_lock_entries(&mut lockfile.entries);
1085 write_package_lockfile_atomic(&lockfile_path, &lockfile)?;
1086
1087 let event = PackageTrustAuditEvent {
1088 schema: PACKAGE_TRUST_AUDIT_SCHEMA,
1089 timestamp: chrono::Utc::now().to_rfc3339_opts(chrono::SecondsFormat::Millis, true),
1090 action: "remove".to_string(),
1091 scope: scope_label(scope).to_string(),
1092 source: source.to_string(),
1093 identity,
1094 from_state: trust_state_label(removed.trust_state).to_string(),
1095 to_state: "removed".to_string(),
1096 reason_codes: vec!["removed".to_string()],
1097 remediation: None,
1098 details: serde_json::to_value(&removed).unwrap_or_else(|_| serde_json::json!({})),
1099 };
1100 self.append_trust_audit_event(scope, &event)?;
1101 Ok(())
1102 }
1103
1104 fn append_trust_audit_event(
1105 &self,
1106 scope: PackageScope,
1107 event: &PackageTrustAuditEvent,
1108 ) -> Result<()> {
1109 let Some(path) = self.trust_audit_path_for_scope(scope) else {
1110 return Ok(());
1111 };
1112 if let Some(parent) = path.parent() {
1113 fs::create_dir_all(parent)?;
1114 }
1115
1116 let payload = serde_json::to_string(event)?;
1117 let mut opts = fs::OpenOptions::new();
1118 opts.create(true).append(true);
1119 #[cfg(unix)]
1120 {
1121 use std::os::unix::fs::OpenOptionsExt;
1122 opts.mode(0o600);
1123 }
1124 let mut file = opts.open(path)?;
1125 writeln!(file, "{payload}")?;
1126 Ok(())
1127 }
1128
1129 #[allow(clippy::too_many_lines)]
1130 fn build_lock_entry(&self, source: &str, scope: PackageScope) -> Result<PackageLockEntry> {
1131 let source = validate_non_empty_source(source, "Package source")?;
1132 let parsed = parse_source(source, &self.cwd);
1133 match parsed {
1134 ParsedSource::Npm { spec, name, pinned } => {
1135 let installed_path = self.npm_install_path(&name, scope)?.ok_or_else(|| {
1136 Error::tool(
1137 "package_manager",
1138 "npm lock verification requires a concrete install path",
1139 )
1140 })?;
1141 if !installed_path.exists() {
1142 return Err(Error::tool(
1143 "package_manager",
1144 format!(
1145 "Installed npm package path is missing for lock verification: {}",
1146 installed_path.display()
1147 ),
1148 ));
1149 }
1150
1151 let (_, requested_version) = parse_npm_spec(&spec);
1152 let installed_version = read_installed_npm_version(&installed_path).ok_or_else(|| {
1153 verification_error(
1154 "npm_version_missing",
1155 &format!(
1156 "Missing package.json version for installed npm package at {}",
1157 installed_path.display()
1158 ),
1159 "Reinstall the package (`pi remove <source>` then `pi install <source>`) and retry.",
1160 )
1161 })?;
1162
1163 if let Some(expected) = requested_version
1164 .as_deref()
1165 .filter(|value| is_exact_npm_version(value))
1166 {
1167 if expected != installed_version {
1168 return Err(verification_error(
1169 "npm_version_mismatch",
1170 &format!(
1171 "Pinned npm version mismatch for {name}: expected {expected}, got {installed_version}",
1172 ),
1173 "Pin the intended version explicitly and reinstall to refresh trusted provenance.",
1174 ));
1175 }
1176 }
1177
1178 let digest_sha256 = digest_package_path(&installed_path)?;
1179 Ok(PackageLockEntry {
1180 identity: self.package_identity(source),
1181 source: source.to_string(),
1182 source_kind: PackageSourceKind::Npm,
1183 resolved: PackageResolvedProvenance::Npm {
1184 name,
1185 requested_spec: spec,
1186 requested_version,
1187 installed_version,
1188 pinned,
1189 },
1190 digest_sha256,
1191 trust_state: PackageEntryTrustState::Trusted,
1192 })
1193 }
1194 ParsedSource::Git {
1195 repo,
1196 host,
1197 path,
1198 r#ref,
1199 pinned,
1200 ..
1201 } => {
1202 let installed_path = self.checked_git_install_path(&host, &path, scope)?;
1203 if !installed_path.exists() {
1204 return Err(Error::tool(
1205 "package_manager",
1206 format!(
1207 "Installed git package path is missing for lock verification: {}",
1208 installed_path.display()
1209 ),
1210 ));
1211 }
1212
1213 let resolved_commit =
1214 run_command_capture("git", ["rev-parse", "HEAD"], Some(&installed_path))?;
1215 if let Some(expected_ref) = r#ref.as_ref() {
1216 let expected_commit = run_command_capture(
1217 "git",
1218 ["rev-parse", expected_ref.as_str()],
1219 Some(&installed_path),
1220 )?;
1221 if expected_commit != resolved_commit {
1222 return Err(verification_error(
1223 "git_ref_mismatch",
1224 &format!(
1225 "Pinned git ref mismatch for {repo}: ref {expected_ref} resolved to {expected_commit}, but HEAD is {resolved_commit}",
1226 ),
1227 "Fetch/reset the repo and reinstall from the intended pinned ref.",
1228 ));
1229 }
1230 }
1231
1232 let origin_url = run_command_capture(
1233 "git",
1234 ["config", "--get", "remote.origin.url"],
1235 Some(&installed_path),
1236 )
1237 .ok()
1238 .filter(|value| !value.trim().is_empty());
1239 let digest_sha256 = digest_package_path(&installed_path)?;
1240
1241 Ok(PackageLockEntry {
1242 identity: self.package_identity(source),
1243 source: source.to_string(),
1244 source_kind: PackageSourceKind::Git,
1245 resolved: PackageResolvedProvenance::Git {
1246 repo,
1247 host,
1248 path,
1249 requested_ref: r#ref,
1250 resolved_commit,
1251 origin_url,
1252 pinned,
1253 },
1254 digest_sha256,
1255 trust_state: PackageEntryTrustState::Trusted,
1256 })
1257 }
1258 ParsedSource::Local { path } => {
1259 if !path.exists() {
1260 return Err(Error::config(format!(
1261 "Local package path does not exist: {}",
1262 path.display()
1263 )));
1264 }
1265
1266 let digest_sha256 = digest_package_path(&path)?;
1267 let resolved_path = path
1268 .canonicalize()
1269 .unwrap_or(path)
1270 .to_string_lossy()
1271 .to_string();
1272 Ok(PackageLockEntry {
1273 identity: self.package_identity(source),
1274 source: source.to_string(),
1275 source_kind: PackageSourceKind::Local,
1276 resolved: PackageResolvedProvenance::Local { resolved_path },
1277 digest_sha256,
1278 trust_state: PackageEntryTrustState::Trusted,
1279 })
1280 }
1281 }
1282 }
1283
1284 fn project_npm_root(&self) -> PathBuf {
1285 self.cwd.join(Config::project_dir()).join("npm")
1286 }
1287
1288 fn project_git_root(&self) -> PathBuf {
1289 self.cwd.join(Config::project_dir()).join("git")
1290 }
1291
1292 #[allow(clippy::unused_self)]
1293 fn global_git_root(&self) -> PathBuf {
1294 Config::global_dir().join("git")
1295 }
1296
1297 #[allow(clippy::unused_self)]
1298 fn global_npm_root(&self) -> Result<PathBuf> {
1299 let output = Command::new("npm")
1300 .args(["root", "-g"])
1301 .stdin(Stdio::null())
1302 .stdout(Stdio::piped())
1303 .stderr(Stdio::piped())
1304 .output()
1305 .map_err(|e| Error::tool("npm", format!("Failed to spawn npm: {e}")))?;
1306
1307 if !output.status.success() {
1308 let stdout = String::from_utf8_lossy(&output.stdout);
1309 let stderr = String::from_utf8_lossy(&output.stderr);
1310 let mut msg = String::from("npm root -g failed");
1311 if let Some(code) = output.status.code() {
1312 let _ = write!(msg, " (exit {code})");
1313 }
1314 if !stdout.trim().is_empty() {
1315 let _ = write!(msg, "\nstdout:\n{stdout}");
1316 }
1317 if !stderr.trim().is_empty() {
1318 let _ = write!(msg, "\nstderr:\n{stderr}");
1319 }
1320 return Err(Error::tool("npm", msg));
1321 }
1322
1323 let root = String::from_utf8_lossy(&output.stdout).trim().to_string();
1324 if root.is_empty() {
1325 return Err(Error::tool("npm", "npm root -g returned empty output"));
1326 }
1327
1328 Ok(PathBuf::from(root))
1329 }
1330
1331 fn npm_prefix_root(&self, scope: PackageScope) -> Option<PathBuf> {
1332 match scope {
1333 PackageScope::Project => Some(self.project_npm_root()),
1334 PackageScope::Temporary => Some(temporary_dir("npm", None)),
1335 PackageScope::User => None,
1336 }
1337 }
1338
1339 fn npm_install_path(&self, name: &str, scope: PackageScope) -> Result<Option<PathBuf>> {
1340 Ok(Some(match self.npm_prefix_root(scope) {
1341 Some(prefix_root) => prefix_root.join("node_modules").join(name),
1342 None => self.global_npm_root()?.join(name),
1343 }))
1344 }
1345
1346 fn git_root(&self, scope: PackageScope) -> Option<PathBuf> {
1347 match scope {
1348 PackageScope::Temporary => None,
1349 PackageScope::User => Some(self.global_git_root()),
1350 PackageScope::Project => Some(self.project_git_root()),
1351 }
1352 }
1353
1354 fn git_install_path(&self, host: &str, repo_path: &str, scope: PackageScope) -> PathBuf {
1355 match scope {
1356 PackageScope::Temporary => temporary_dir(&format!("git-{host}"), Some(repo_path)),
1357 PackageScope::User => self.global_git_root().join(host).join(repo_path),
1358 PackageScope::Project => self.project_git_root().join(host).join(repo_path),
1359 }
1360 }
1361
1362 fn checked_git_install_path(
1363 &self,
1364 host: &str,
1365 repo_path: &str,
1366 scope: PackageScope,
1367 ) -> Result<PathBuf> {
1368 if host.trim().is_empty() || repo_path.trim().is_empty() {
1369 return Err(Error::tool(
1370 "package_manager",
1371 "Invalid git package source: remote repositories must include both a host and repository path",
1372 ));
1373 }
1374
1375 Ok(self.git_install_path(host, repo_path, scope))
1376 }
1377
1378 fn install_npm(&self, spec: &str, scope: PackageScope) -> Result<()> {
1379 let (name, _) = parse_npm_spec(spec);
1380 if let Some(install_root) = self.npm_prefix_root(scope) {
1381 ensure_npm_project(&install_root)?;
1382 run_command(
1383 "npm",
1384 [
1385 "install",
1386 "--prefix",
1387 install_root.to_string_lossy().as_ref(),
1388 "--",
1389 spec,
1390 ],
1391 None,
1392 )?;
1393 } else {
1394 run_command("npm", ["install", "-g", spec], None)?;
1395 }
1396
1397 if let Some(installed) = self.npm_install_path(&name, scope)? {
1399 if !installed.exists() {
1400 return Err(Error::tool(
1401 "npm",
1402 format!(
1403 "npm install succeeded but '{}' is missing",
1404 installed.display()
1405 ),
1406 ));
1407 }
1408 }
1409
1410 Ok(())
1411 }
1412
1413 fn uninstall_npm(&self, name: &str, scope: PackageScope) -> Result<()> {
1414 let Some(install_root) = self.npm_prefix_root(scope) else {
1415 run_command("npm", ["uninstall", "-g", "--", name], None)?;
1416 return Ok(());
1417 };
1418 if !install_root.exists() {
1419 return Ok(());
1420 }
1421 run_command(
1422 "npm",
1423 [
1424 "uninstall",
1425 "--prefix",
1426 install_root.to_string_lossy().as_ref(),
1427 "--",
1428 name,
1429 ],
1430 None,
1431 )?;
1432 Ok(())
1433 }
1434
1435 fn install_git(
1436 &self,
1437 repo: &str,
1438 host: &str,
1439 repo_path: &str,
1440 r#ref: Option<&str>,
1441 scope: PackageScope,
1442 ) -> Result<()> {
1443 let target_dir = self.checked_git_install_path(host, repo_path, scope)?;
1444 if target_dir.exists() {
1445 return Ok(());
1446 }
1447
1448 if let Some(root) = self.git_root(scope) {
1449 ensure_git_ignore(&root)?;
1450 }
1451 if let Some(parent) = target_dir.parent() {
1452 fs::create_dir_all(parent)?;
1453 }
1454
1455 let clone_url = if repo.starts_with("http://") || repo.starts_with("https://") {
1456 repo.to_string()
1457 } else if looks_like_local_path(repo) {
1458 repo.to_string()
1460 } else {
1461 format!("https://{repo}")
1462 };
1463
1464 run_command(
1465 "git",
1466 [
1467 "clone",
1468 "--",
1469 &clone_url,
1470 target_dir.to_string_lossy().as_ref(),
1471 ],
1472 None,
1473 )?;
1474
1475 if let Some(r#ref) = r#ref {
1476 run_command("git", ["checkout", r#ref], Some(&target_dir))?;
1477 }
1478
1479 if target_dir.join("package.json").exists() {
1480 run_command("npm", ["install"], Some(&target_dir))?;
1481 }
1482
1483 Ok(())
1484 }
1485
1486 fn update_git(
1487 &self,
1488 repo: &str,
1489 host: &str,
1490 repo_path: &str,
1491 scope: PackageScope,
1492 ) -> Result<()> {
1493 if scope == PackageScope::Temporary {
1494 return Ok(());
1496 }
1497
1498 let target_dir = self.checked_git_install_path(host, repo_path, scope)?;
1499 if !target_dir.exists() {
1500 return self.install_git(repo, host, repo_path, None, scope);
1501 }
1502
1503 run_command("git", ["fetch", "--prune", "origin"], Some(&target_dir))?;
1504 run_command("git", ["reset", "--hard", "@{upstream}"], Some(&target_dir))?;
1505 run_command("git", ["clean", "-fdx"], Some(&target_dir))?;
1506
1507 if target_dir.join("package.json").exists() {
1508 run_command("npm", ["install"], Some(&target_dir))?;
1509 }
1510
1511 Ok(())
1512 }
1513
1514 fn remove_git(&self, host: &str, repo_path: &str, scope: PackageScope) -> Result<()> {
1515 let target_dir = self.checked_git_install_path(host, repo_path, scope)?;
1516 if !target_dir.exists() {
1517 return Ok(());
1518 }
1519
1520 fs::remove_dir_all(&target_dir)?;
1521 if let Some(root) = self.git_root(scope) {
1522 prune_empty_git_parents(&target_dir, &root);
1523 }
1524 Ok(())
1525 }
1526}
1527
1528#[derive(Debug, Clone, Default)]
1533pub struct ResolveExtensionSourcesOptions {
1534 pub local: bool,
1535 pub temporary: bool,
1536}
1537
1538#[derive(Debug, Clone)]
1539struct PackageSpec {
1540 source: String,
1541 filter: Option<PackageFilter>,
1542}
1543
1544#[derive(Debug, Clone, Default)]
1545struct SettingsSnapshot {
1546 packages: Vec<PackageSpec>,
1547 extensions: Vec<String>,
1548 skills: Vec<String>,
1549 prompts: Vec<String>,
1550 themes: Vec<String>,
1551}
1552
1553impl SettingsSnapshot {
1554 fn entries_for(&self, resource_type: ResourceType) -> &[String] {
1555 match resource_type {
1556 ResourceType::Extensions => &self.extensions,
1557 ResourceType::Skills => &self.skills,
1558 ResourceType::Prompts => &self.prompts,
1559 ResourceType::Themes => &self.themes,
1560 }
1561 }
1562}
1563
1564fn read_settings_snapshot(path: &Path) -> Result<SettingsSnapshot> {
1565 let value = read_settings_json(path)?;
1566 let packages_value = value
1567 .get("packages")
1568 .and_then(Value::as_array)
1569 .cloned()
1570 .unwrap_or_default();
1571
1572 let mut packages = Vec::new();
1573 for pkg in &packages_value {
1574 if let Some(spec) = extract_package_spec(pkg) {
1575 packages.push(spec);
1576 }
1577 }
1578
1579 Ok(SettingsSnapshot {
1580 packages,
1581 extensions: extract_string_array(value.get("extensions")),
1582 skills: extract_string_array(value.get("skills")),
1583 prompts: extract_string_array(value.get("prompts")),
1584 themes: extract_string_array(value.get("themes")),
1585 })
1586}
1587
1588fn read_project_settings_snapshot(roots: &ResolveRoots) -> Result<SettingsSnapshot> {
1589 if roots.project_settings_enabled {
1590 read_settings_snapshot(&roots.project_settings_path)
1591 } else {
1592 Ok(SettingsSnapshot::default())
1593 }
1594}
1595
1596fn extract_string_array(value: Option<&Value>) -> Vec<String> {
1597 match value {
1598 Some(Value::String(s)) => vec![s.clone()],
1599 Some(Value::Array(arr)) => arr
1600 .iter()
1601 .filter_map(Value::as_str)
1602 .map(str::to_string)
1603 .collect(),
1604 _ => Vec::new(),
1605 }
1606}
1607
1608fn extract_package_spec(value: &Value) -> Option<PackageSpec> {
1609 if let Some(s) = value.as_str() {
1610 let source = s.trim();
1611 if source.is_empty() {
1612 return None;
1613 }
1614 return Some(PackageSpec {
1615 source: source.to_string(),
1616 filter: None,
1617 });
1618 }
1619
1620 let obj = value.as_object()?;
1621 let source = obj.get("source")?.as_str()?.trim().to_string();
1622 if source.is_empty() {
1623 return None;
1624 }
1625
1626 let filter = PackageFilter {
1627 extensions: extract_filter_field(obj, "extensions"),
1628 skills: extract_filter_field(obj, "skills"),
1629 prompts: extract_filter_field(obj, "prompts"),
1630 themes: extract_filter_field(obj, "themes"),
1631 };
1632
1633 Some(PackageSpec {
1634 source,
1635 filter: Some(filter),
1636 })
1637}
1638
1639fn extract_filter_field(obj: &serde_json::Map<String, Value>, key: &str) -> Option<Vec<String>> {
1640 if !obj.contains_key(key) {
1641 return None;
1642 }
1643
1644 match obj.get(key) {
1645 Some(Value::String(s)) => Some(vec![s.clone()]),
1646 Some(Value::Array(arr)) => Some(
1647 arr.iter()
1648 .filter_map(Value::as_str)
1649 .map(str::to_string)
1650 .collect(),
1651 ),
1652 _ => Some(Vec::new()),
1653 }
1654}
1655
1656#[derive(Debug, Clone)]
1657struct ScopedPackage {
1658 pkg: PackageSpec,
1659 scope: PackageScope,
1660}
1661
1662#[derive(Debug, Clone, Copy, PartialEq, Eq)]
1663enum ResourceType {
1664 Extensions,
1665 Skills,
1666 Prompts,
1667 Themes,
1668}
1669
1670impl ResourceType {
1671 const fn all() -> [Self; 4] {
1672 [Self::Extensions, Self::Skills, Self::Prompts, Self::Themes]
1673 }
1674
1675 const fn as_str(self) -> &'static str {
1676 match self {
1677 Self::Extensions => "extensions",
1678 Self::Skills => "skills",
1679 Self::Prompts => "prompts",
1680 Self::Themes => "themes",
1681 }
1682 }
1683}
1684
1685#[derive(Debug, Default, Clone)]
1686struct ResourceAccumulator {
1687 extensions: ResourceList,
1688 skills: ResourceList,
1689 prompts: ResourceList,
1690 themes: ResourceList,
1691}
1692
1693impl ResourceAccumulator {
1694 fn new() -> Self {
1695 Self::default()
1696 }
1697
1698 #[allow(clippy::missing_const_for_fn)] fn target_mut(&mut self, resource_type: ResourceType) -> &mut ResourceList {
1700 match resource_type {
1701 ResourceType::Extensions => &mut self.extensions,
1702 ResourceType::Skills => &mut self.skills,
1703 ResourceType::Prompts => &mut self.prompts,
1704 ResourceType::Themes => &mut self.themes,
1705 }
1706 }
1707
1708 fn into_resolved_paths(mut self) -> ResolvedPaths {
1709 for items in [
1710 &mut self.extensions.items,
1711 &mut self.skills.items,
1712 &mut self.prompts.items,
1713 &mut self.themes.items,
1714 ] {
1715 items.sort_by(|a, b| a.path.to_string_lossy().cmp(&b.path.to_string_lossy()));
1716 }
1717
1718 ResolvedPaths {
1719 extensions: self.extensions.items,
1720 skills: self.skills.items,
1721 prompts: self.prompts.items,
1722 themes: self.themes.items,
1723 }
1724 }
1725}
1726
1727#[derive(Debug, Default, Clone)]
1728struct ResourceList {
1729 seen: std::collections::HashSet<String>,
1730 items: Vec<ResolvedResource>,
1731}
1732
1733impl ResourceList {
1734 fn add(&mut self, path: PathBuf, metadata: &PathMetadata, enabled: bool) {
1735 let key = path.to_string_lossy().to_string();
1736 if !self.seen.insert(key) {
1737 return;
1738 }
1739 self.items.push(ResolvedResource {
1740 path,
1741 enabled,
1742 metadata: metadata.clone(),
1743 });
1744 }
1745}
1746
1747impl PackageManager {
1748 fn dedupe_packages(&self, packages: Vec<ScopedPackage>) -> Vec<ScopedPackage> {
1749 let mut seen: std::collections::HashMap<String, usize> = std::collections::HashMap::new();
1750 let mut out: Vec<ScopedPackage> = Vec::new();
1751
1752 for entry in packages {
1753 let source = entry.pkg.source.trim();
1754 if source.is_empty() {
1755 continue;
1756 }
1757 let identity = self.package_identity(source);
1758 if let Some(&idx) = seen.get(&identity) {
1759 let existing_scope = out[idx].scope;
1760 if entry.scope == PackageScope::Project && existing_scope == PackageScope::User {
1761 out[idx] = entry;
1762 }
1763 continue;
1764 }
1765
1766 seen.insert(identity, out.len());
1767 out.push(entry);
1768 }
1769
1770 out
1771 }
1772
1773 async fn resolve_package_sources(
1774 &self,
1775 sources: &[ScopedPackage],
1776 accumulator: &mut ResourceAccumulator,
1777 ) -> Result<()> {
1778 for entry in sources {
1779 let source_str = entry.pkg.source.trim();
1780 if source_str.is_empty() {
1781 continue;
1782 }
1783
1784 let parsed = parse_source(source_str, &self.cwd);
1785 let mut metadata = PathMetadata {
1786 source: source_str.to_string(),
1787 scope: entry.scope,
1788 origin: ResourceOrigin::Package,
1789 base_dir: None,
1790 };
1791
1792 match parsed {
1793 ParsedSource::Local { path } => {
1794 Self::resolve_local_extension_source(
1795 &path,
1796 accumulator,
1797 entry.pkg.filter.as_ref(),
1798 &mut metadata,
1799 entry.scope == PackageScope::Temporary,
1800 )?;
1801 }
1802 ParsedSource::Npm { spec, name, pinned } => {
1803 let installed_path = self
1805 .installed_path(&format!("npm:{name}"), entry.scope)
1806 .await?
1807 .unwrap_or_else(|| self.cwd.join("node_modules").join(&name));
1808
1809 let needs_install = !installed_path.exists()
1810 || Box::pin(self.npm_needs_update(&spec, pinned, &installed_path)).await;
1811 if needs_install {
1812 self.install(source_str, entry.scope).await?;
1813 }
1814
1815 metadata.base_dir = Some(installed_path.clone());
1816 Self::collect_package_resources(
1817 &installed_path,
1818 accumulator,
1819 entry.pkg.filter.as_ref(),
1820 &metadata,
1821 )?;
1822 }
1823 ParsedSource::Git {
1824 repo: _,
1825 host,
1826 path,
1827 r#ref: _,
1828 ..
1829 } => {
1830 let installed_path =
1832 self.checked_git_install_path(&host, &path, entry.scope)?;
1833
1834 if !installed_path.exists() {
1835 self.install(source_str, entry.scope).await?;
1836 }
1837
1838 metadata.base_dir = Some(installed_path.clone());
1839 Self::collect_package_resources(
1840 &installed_path,
1841 accumulator,
1842 entry.pkg.filter.as_ref(),
1843 &metadata,
1844 )?;
1845 }
1846 }
1847 }
1848
1849 Ok(())
1850 }
1851
1852 async fn npm_needs_update(&self, spec: &str, pinned: bool, installed_path: &Path) -> bool {
1853 let installed_version = read_installed_npm_version(installed_path);
1854 let Some(installed_version) = installed_version else {
1855 return true;
1856 };
1857
1858 let (_, pinned_version) = parse_npm_spec(spec);
1859 if pinned {
1860 return pinned_version.is_some_and(|pv| pv != installed_version);
1861 }
1862
1863 Box::pin(get_latest_npm_version(installed_path, spec))
1864 .await
1865 .is_ok_and(|latest| latest != installed_version)
1866 }
1867
1868 fn resolve_local_extension_source(
1869 resolved: &Path,
1870 accumulator: &mut ResourceAccumulator,
1871 filter: Option<&PackageFilter>,
1872 metadata: &mut PathMetadata,
1873 strict: bool,
1874 ) -> Result<()> {
1875 if !resolved.exists() {
1876 if strict {
1877 return Err(Error::config(format!(
1878 "Extension source '{}' does not exist",
1879 resolved.display()
1880 )));
1881 }
1882 return Ok(());
1883 }
1884
1885 let stats = match fs::metadata(resolved) {
1886 Ok(stats) => stats,
1887 Err(err) => {
1888 if strict {
1889 return Err(Error::config(format!(
1890 "Failed to inspect extension source '{}': {err}",
1891 resolved.display()
1892 )));
1893 }
1894 return Ok(());
1895 }
1896 };
1897
1898 if stats.is_file() {
1899 if !is_supported_extension_file(resolved) {
1900 let message = format!(
1901 "Unsupported extension source file '{}'; use extension.json, JS/TS entrypoints, *.native.json, or *.wasm",
1902 resolved.display()
1903 );
1904 if strict {
1905 return Err(Error::config(message));
1906 }
1907 warn!(path = %resolved.display(), "{message}");
1908 return Ok(());
1909 }
1910 metadata.base_dir = resolved.parent().map(Path::to_path_buf);
1911 accumulator
1912 .extensions
1913 .add(resolved.to_path_buf(), metadata, true);
1914 return Ok(());
1915 }
1916
1917 if !stats.is_dir() {
1918 if strict {
1919 return Err(Error::config(format!(
1920 "Extension source '{}' is neither a file nor a directory",
1921 resolved.display()
1922 )));
1923 }
1924 return Ok(());
1925 }
1926
1927 metadata.base_dir = Some(resolved.to_path_buf());
1928 let had_any = Self::collect_package_resources(resolved, accumulator, filter, metadata)?;
1929 if !had_any {
1930 accumulator
1931 .extensions
1932 .add(resolved.to_path_buf(), metadata, true);
1933 }
1934 Ok(())
1935 }
1936
1937 fn resolve_local_entries(
1938 entries: &[String],
1939 resource_type: ResourceType,
1940 target: &mut ResourceList,
1941 metadata: &PathMetadata,
1942 base_dir: &Path,
1943 ) {
1944 if entries.is_empty() {
1945 return;
1946 }
1947
1948 let (plain, patterns) = split_patterns(entries);
1949 let resolved_plain = plain
1950 .iter()
1951 .map(|p| resolve_path_from_base(p, base_dir))
1952 .collect::<Vec<_>>();
1953 let all_files = collect_files_from_paths(&resolved_plain, resource_type);
1954 let enabled_paths = apply_patterns(&all_files, &patterns, base_dir);
1955
1956 for f in all_files {
1957 let enabled = enabled_paths.contains(&f);
1958 target.add(f, metadata, enabled);
1959 }
1960 }
1961
1962 #[allow(clippy::unused_self)]
1963 fn add_auto_discovered_resources(
1964 &self,
1965 accumulator: &mut ResourceAccumulator,
1966 global: &SettingsSnapshot,
1967 project: &SettingsSnapshot,
1968 global_base_dir: &Path,
1969 project_base_dir: &Path,
1970 project_settings_enabled: bool,
1971 ) {
1972 let user_metadata = PathMetadata {
1973 source: "auto".to_string(),
1974 scope: PackageScope::User,
1975 origin: ResourceOrigin::TopLevel,
1976 base_dir: Some(global_base_dir.to_path_buf()),
1977 };
1978 let project_metadata = PathMetadata {
1979 source: "auto".to_string(),
1980 scope: PackageScope::Project,
1981 origin: ResourceOrigin::TopLevel,
1982 base_dir: Some(project_base_dir.to_path_buf()),
1983 };
1984
1985 let user_dirs = AutoDirs::new(global_base_dir);
1986 let project_dirs = AutoDirs::new(project_base_dir);
1987
1988 for resource_type in ResourceType::all() {
1989 let target = accumulator.target_mut(resource_type);
1990 let (user_paths, user_overrides) = match resource_type {
1991 ResourceType::Extensions => (
1992 collect_auto_extension_entries(&user_dirs.extensions),
1993 &global.extensions,
1994 ),
1995 ResourceType::Skills => (
1996 collect_auto_skill_entries(&user_dirs.skills),
1997 &global.skills,
1998 ),
1999 ResourceType::Prompts => (
2000 collect_auto_prompt_entries(&user_dirs.prompts),
2001 &global.prompts,
2002 ),
2003 ResourceType::Themes => (
2004 collect_auto_theme_entries(&user_dirs.themes),
2005 &global.themes,
2006 ),
2007 };
2008 for path in user_paths {
2009 let enabled = is_enabled_by_overrides(&path, user_overrides, global_base_dir);
2010 target.add(path, &user_metadata, enabled);
2011 }
2012
2013 if project_settings_enabled {
2014 let (project_paths, project_overrides) = match resource_type {
2015 ResourceType::Extensions => (
2016 collect_auto_extension_entries(&project_dirs.extensions),
2017 &project.extensions,
2018 ),
2019 ResourceType::Skills => (
2020 collect_auto_skill_entries(&project_dirs.skills),
2021 &project.skills,
2022 ),
2023 ResourceType::Prompts => (
2024 collect_auto_prompt_entries(&project_dirs.prompts),
2025 &project.prompts,
2026 ),
2027 ResourceType::Themes => (
2028 collect_auto_theme_entries(&project_dirs.themes),
2029 &project.themes,
2030 ),
2031 };
2032 for path in project_paths {
2033 let enabled =
2034 is_enabled_by_overrides(&path, project_overrides, project_base_dir);
2035 target.add(path, &project_metadata, enabled);
2036 }
2037 }
2038 }
2039 }
2040
2041 fn collect_package_resources(
2042 package_root: &Path,
2043 accumulator: &mut ResourceAccumulator,
2044 filter: Option<&PackageFilter>,
2045 metadata: &PathMetadata,
2046 ) -> Result<bool> {
2047 if let Some(filter) = filter {
2048 for resource_type in ResourceType::all() {
2049 let target = accumulator.target_mut(resource_type);
2050 let patterns = match resource_type {
2051 ResourceType::Extensions => filter.extensions.as_ref(),
2052 ResourceType::Skills => filter.skills.as_ref(),
2053 ResourceType::Prompts => filter.prompts.as_ref(),
2054 ResourceType::Themes => filter.themes.as_ref(),
2055 };
2056
2057 if let Some(patterns) = patterns {
2058 Self::apply_package_filter(
2059 package_root,
2060 patterns,
2061 resource_type,
2062 target,
2063 metadata,
2064 )?;
2065 } else {
2066 Self::collect_default_resources(package_root, resource_type, target, metadata)?;
2067 }
2068 }
2069 return Ok(true);
2070 }
2071
2072 if let Some(manifest) = read_pi_manifest(package_root)? {
2073 for resource_type in ResourceType::all() {
2074 let entries = manifest.entries_for(resource_type);
2075 Self::add_manifest_entries(
2076 entries.as_deref(),
2077 package_root,
2078 resource_type,
2079 accumulator.target_mut(resource_type),
2080 metadata,
2081 );
2082 }
2083 return Ok(true);
2084 }
2085
2086 let mut has_any_dir = false;
2087 for resource_type in ResourceType::all() {
2088 let dir = package_root.join(resource_type.as_str());
2089 if dir.exists() {
2090 let files = collect_resource_files(&dir, resource_type);
2091 let target = accumulator.target_mut(resource_type);
2092 for f in files {
2093 target.add(f, metadata, true);
2094 }
2095 has_any_dir = true;
2096 }
2097 }
2098
2099 Ok(has_any_dir)
2100 }
2101
2102 fn collect_default_resources(
2103 package_root: &Path,
2104 resource_type: ResourceType,
2105 target: &mut ResourceList,
2106 metadata: &PathMetadata,
2107 ) -> Result<()> {
2108 if let Some(manifest) = read_pi_manifest(package_root)? {
2109 if let Some(entries) = manifest.entries_for(resource_type) {
2110 Self::add_manifest_entries(
2111 Some(&entries),
2112 package_root,
2113 resource_type,
2114 target,
2115 metadata,
2116 );
2117 return Ok(());
2118 }
2119 }
2120
2121 let dir = package_root.join(resource_type.as_str());
2122 if dir.exists() {
2123 let files = collect_resource_files(&dir, resource_type);
2124 for f in files {
2125 target.add(f, metadata, true);
2126 }
2127 }
2128 Ok(())
2129 }
2130
2131 fn apply_package_filter(
2132 package_root: &Path,
2133 user_patterns: &[String],
2134 resource_type: ResourceType,
2135 target: &mut ResourceList,
2136 metadata: &PathMetadata,
2137 ) -> Result<()> {
2138 let (all_files, _) = Self::collect_manifest_files(package_root, resource_type)?;
2139
2140 if user_patterns.is_empty() {
2141 for f in all_files {
2142 target.add(f, metadata, false);
2143 }
2144 return Ok(());
2145 }
2146
2147 let enabled_by_user = apply_patterns(&all_files, user_patterns, package_root);
2148 for f in all_files {
2149 let enabled = enabled_by_user.contains(&f);
2150 target.add(f, metadata, enabled);
2151 }
2152 Ok(())
2153 }
2154
2155 fn collect_manifest_files(
2156 package_root: &Path,
2157 resource_type: ResourceType,
2158 ) -> Result<(Vec<PathBuf>, std::collections::HashSet<PathBuf>)> {
2159 if let Some(manifest) = read_pi_manifest(package_root)? {
2160 if let Some(entries) = manifest.entries_for(resource_type) {
2161 if entries.is_empty() {
2162 return Ok((Vec::new(), std::collections::HashSet::new()));
2163 }
2164 let all_files =
2165 collect_files_from_manifest_entries(&entries, package_root, resource_type);
2166 let patterns = entries
2167 .iter()
2168 .filter(|e| is_pattern(e))
2169 .cloned()
2170 .collect::<Vec<_>>();
2171 let enabled_by_manifest = if patterns.is_empty() {
2172 all_files
2173 .iter()
2174 .cloned()
2175 .collect::<std::collections::HashSet<_>>()
2176 } else {
2177 apply_patterns(&all_files, &patterns, package_root)
2178 };
2179 let mut enabled_vec = enabled_by_manifest.iter().cloned().collect::<Vec<_>>();
2180 enabled_vec.sort_by(|a, b| a.to_string_lossy().cmp(&b.to_string_lossy()));
2181 return Ok((enabled_vec, enabled_by_manifest));
2182 }
2183 }
2184
2185 let convention_dir = package_root.join(resource_type.as_str());
2186 if !convention_dir.exists() {
2187 return Ok((Vec::new(), std::collections::HashSet::new()));
2188 }
2189 let all_files = collect_resource_files(&convention_dir, resource_type);
2190 let set = all_files.iter().cloned().collect();
2191 Ok((all_files, set))
2192 }
2193
2194 fn add_manifest_entries(
2195 entries: Option<&[String]>,
2196 root: &Path,
2197 resource_type: ResourceType,
2198 target: &mut ResourceList,
2199 metadata: &PathMetadata,
2200 ) {
2201 let Some(entries) = entries else {
2202 return;
2203 };
2204 if entries.is_empty() {
2205 return;
2206 }
2207
2208 let all_files = collect_files_from_manifest_entries(entries, root, resource_type);
2209 let patterns = entries
2210 .iter()
2211 .filter(|e| is_pattern(e))
2212 .cloned()
2213 .collect::<Vec<_>>();
2214 let enabled_paths = apply_patterns(&all_files, &patterns, root);
2215
2216 for f in all_files {
2217 if enabled_paths.contains(&f) {
2218 target.add(f, metadata, true);
2219 }
2220 }
2221 }
2222}
2223
2224#[derive(Debug, Default)]
2225struct AutoDirs {
2226 extensions: PathBuf,
2227 skills: PathBuf,
2228 prompts: PathBuf,
2229 themes: PathBuf,
2230}
2231
2232impl AutoDirs {
2233 fn new(base_dir: &Path) -> Self {
2234 Self {
2235 extensions: base_dir.join("extensions"),
2236 skills: base_dir.join("skills"),
2237 prompts: base_dir.join("prompts"),
2238 themes: base_dir.join("themes"),
2239 }
2240 }
2241}
2242
2243#[derive(Debug, Clone, Default)]
2244struct PiManifest {
2245 extensions: Option<Vec<String>>,
2246 skills: Option<Vec<String>>,
2247 prompts: Option<Vec<String>>,
2248 themes: Option<Vec<String>>,
2249}
2250
2251impl PiManifest {
2252 fn entries_for(&self, resource_type: ResourceType) -> Option<Vec<String>> {
2253 match resource_type {
2254 ResourceType::Extensions => self.extensions.clone(),
2255 ResourceType::Skills => self.skills.clone(),
2256 ResourceType::Prompts => self.prompts.clone(),
2257 ResourceType::Themes => self.themes.clone(),
2258 }
2259 }
2260}
2261
2262fn parse_manifest_entries_field(
2263 obj: &serde_json::Map<String, Value>,
2264 key: &str,
2265 manifest_path: &Path,
2266 package_root: &Path,
2267) -> Result<Option<Vec<String>>> {
2268 let Some(value) = obj.get(key) else {
2269 return Ok(None);
2270 };
2271
2272 match value {
2273 Value::String(entry) => Ok(Some(vec![validate_manifest_entry(
2274 package_root,
2275 manifest_path,
2276 key,
2277 entry,
2278 )?])),
2279 Value::Array(arr) => {
2280 let mut out = Vec::with_capacity(arr.len());
2281 for entry in arr {
2282 let Some(entry) = entry.as_str() else {
2283 return Err(Error::config(format!(
2284 "Invalid package manifest {}: `pi.{key}` must be a string or array of strings",
2285 manifest_path.display()
2286 )));
2287 };
2288 out.push(validate_manifest_entry(
2289 package_root,
2290 manifest_path,
2291 key,
2292 entry,
2293 )?);
2294 }
2295
2296 Ok(Some(out))
2297 }
2298 _ => Err(Error::config(format!(
2299 "Invalid package manifest {}: `pi.{key}` must be a string or array of strings",
2300 manifest_path.display()
2301 ))),
2302 }
2303}
2304
2305fn validate_manifest_entry(
2306 package_root: &Path,
2307 manifest_path: &Path,
2308 field_name: &str,
2309 entry: &str,
2310) -> Result<String> {
2311 let trimmed = entry.trim();
2312 if trimmed.is_empty() {
2313 return Err(Error::config(format!(
2314 "Invalid package manifest {}: `pi.{field_name}` entries must be non-empty paths",
2315 manifest_path.display()
2316 )));
2317 }
2318
2319 let path_part = trimmed
2320 .strip_prefix('!')
2321 .or_else(|| trimmed.strip_prefix('+'))
2322 .or_else(|| trimmed.strip_prefix('-'))
2323 .unwrap_or(trimmed);
2324 if path_part.is_empty() {
2325 return Err(Error::config(format!(
2326 "Invalid package manifest {}: `pi.{field_name}` entries must be non-empty paths",
2327 manifest_path.display()
2328 )));
2329 }
2330
2331 let relative = Path::new(path_part);
2332 if relative.is_absolute() {
2333 return Err(Error::config(format!(
2334 "Invalid package manifest {}: `pi.{field_name}` paths must stay within the package root",
2335 manifest_path.display()
2336 )));
2337 }
2338
2339 let mut depth = 0usize;
2340 for component in relative.components() {
2341 match component {
2342 std::path::Component::CurDir => {}
2343 std::path::Component::Normal(_) => depth = depth.saturating_add(1),
2344 std::path::Component::ParentDir => {
2345 if depth == 0 {
2346 return Err(Error::config(format!(
2347 "Invalid package manifest {}: `pi.{field_name}` paths must stay within the package root",
2348 manifest_path.display()
2349 )));
2350 }
2351 depth -= 1;
2352 }
2353 std::path::Component::RootDir | std::path::Component::Prefix(_) => {
2354 return Err(Error::config(format!(
2355 "Invalid package manifest {}: `pi.{field_name}` paths must stay within the package root",
2356 manifest_path.display()
2357 )));
2358 }
2359 }
2360 }
2361
2362 let resolved = package_root.join(relative);
2363 if resolved.exists() && !manifest_path_within_root(&resolved, package_root) {
2364 return Err(Error::config(format!(
2365 "Invalid package manifest {}: `pi.{field_name}` paths must stay within the package root",
2366 manifest_path.display()
2367 )));
2368 }
2369
2370 Ok(trimmed.to_string())
2371}
2372
2373fn manifest_path_within_root(target: &Path, root: &Path) -> bool {
2374 let Ok(root) = root.canonicalize() else {
2375 return false;
2376 };
2377 let Ok(target) = target.canonicalize() else {
2378 return false;
2379 };
2380 target == root || target.starts_with(&root)
2381}
2382
2383fn read_pi_manifest(package_root: &Path) -> Result<Option<PiManifest>> {
2384 let manifest_path = package_root.join("package.json");
2385 if !manifest_path.exists() {
2386 return Ok(None);
2387 }
2388 let raw = fs::read_to_string(&manifest_path).map_err(|err| {
2389 Error::config(format!(
2390 "Failed to read package manifest {}: {err}",
2391 manifest_path.display()
2392 ))
2393 })?;
2394 let json: Value = serde_json::from_str(&raw).map_err(|err| {
2395 Error::config(format!(
2396 "Failed to parse package manifest {}: {err}",
2397 manifest_path.display()
2398 ))
2399 })?;
2400 let Some(pi) = json.get("pi") else {
2401 return Ok(None);
2402 };
2403 let Some(obj) = pi.as_object() else {
2404 return Err(Error::config(format!(
2405 "Invalid package manifest {}: `pi` must be an object",
2406 manifest_path.display()
2407 )));
2408 };
2409
2410 Ok(Some(PiManifest {
2411 extensions: parse_manifest_entries_field(obj, "extensions", &manifest_path, package_root)?,
2412 skills: parse_manifest_entries_field(obj, "skills", &manifest_path, package_root)?,
2413 prompts: parse_manifest_entries_field(obj, "prompts", &manifest_path, package_root)?,
2414 themes: parse_manifest_entries_field(obj, "themes", &manifest_path, package_root)?,
2415 }))
2416}
2417
2418fn temporary_dir(prefix: &str, suffix: Option<&str>) -> PathBuf {
2419 let mut hasher = Sha256::new();
2420 hasher.update(format!("{prefix}-{}", suffix.unwrap_or("")));
2421 let digest = hasher.finalize();
2422 let short = hex_encode(&digest)[..8].to_string();
2423
2424 let mut dir = std::env::temp_dir()
2425 .join("pi-extensions")
2426 .join(prefix)
2427 .join(short);
2428 if let Some(suffix) = suffix {
2429 dir = dir.join(suffix);
2430 }
2431 dir
2432}
2433
2434fn hex_encode(bytes: &[u8]) -> String {
2435 const LUT: &[u8; 16] = b"0123456789abcdef";
2436 let mut out = String::with_capacity(bytes.len().saturating_mul(2));
2437 for &b in bytes {
2438 out.push(LUT[(b >> 4) as usize] as char);
2439 out.push(LUT[(b & 0x0f) as usize] as char);
2440 }
2441 out
2442}
2443
2444fn resolve_path_from_base(input: &str, base_dir: &Path) -> PathBuf {
2445 let trimmed = input.trim();
2446 if trimmed == "~" {
2447 return dirs::home_dir().unwrap_or_else(|| base_dir.to_path_buf());
2448 }
2449 if let Some(rest) = trimmed.strip_prefix("~/") {
2450 return dirs::home_dir()
2451 .unwrap_or_else(|| base_dir.to_path_buf())
2452 .join(rest);
2453 }
2454 if trimmed.starts_with('~') {
2455 return dirs::home_dir()
2456 .unwrap_or_else(|| base_dir.to_path_buf())
2457 .join(trimmed.trim_start_matches('~'));
2458 }
2459
2460 let p = Path::new(trimmed);
2461 if p.is_absolute() {
2462 return p.to_path_buf();
2463 }
2464 base_dir.join(p)
2465}
2466
2467fn is_pattern(s: &str) -> bool {
2468 s.starts_with('!')
2469 || s.starts_with('+')
2470 || s.starts_with('-')
2471 || s.contains('*')
2472 || s.contains('?')
2473}
2474
2475fn split_patterns(entries: &[String]) -> (Vec<String>, Vec<String>) {
2476 let mut plain = Vec::new();
2477 let mut patterns = Vec::new();
2478 for entry in entries {
2479 if is_pattern(entry) {
2480 patterns.push(entry.clone());
2481 } else {
2482 plain.push(entry.clone());
2483 }
2484 }
2485 (plain, patterns)
2486}
2487
2488fn posix_string(path: &Path) -> String {
2489 path.to_string_lossy().replace('\\', "/")
2490}
2491
2492fn relative_posix(base: &Path, path: &Path) -> String {
2493 let base_components = base.components().collect::<Vec<_>>();
2494 let path_components = path.components().collect::<Vec<_>>();
2495
2496 let mut i = 0usize;
2497 while i < base_components.len()
2498 && i < path_components.len()
2499 && base_components[i] == path_components[i]
2500 {
2501 i += 1;
2502 }
2503
2504 if i == 0 {
2505 return posix_string(path);
2506 }
2507
2508 let mut rel = PathBuf::new();
2509 for _ in i..base_components.len() {
2510 rel.push("..");
2511 }
2512 for comp in path_components.iter().skip(i) {
2513 rel.push(comp.as_os_str());
2514 }
2515 posix_string(&rel)
2516}
2517
2518fn normalize_exact_pattern(pattern: &str) -> &str {
2519 pattern
2520 .strip_prefix("./")
2521 .or_else(|| pattern.strip_prefix(".\\"))
2522 .unwrap_or(pattern)
2523}
2524
2525fn pattern_matches(pattern: &str, candidate: &str) -> bool {
2526 let normalized_pattern = pattern.replace('\\', "/");
2527 let candidate = candidate.replace('\\', "/");
2528 glob::Pattern::new(&normalized_pattern)
2529 .ok()
2530 .is_some_and(|p| p.matches(&candidate))
2531}
2532
2533fn matches_any_pattern(file_path: &Path, patterns: &[String], base_dir: &Path) -> bool {
2534 let rel = relative_posix(base_dir, file_path);
2535 let name = file_path.file_name().and_then(|n| n.to_str()).unwrap_or("");
2536 let file_str = posix_string(file_path);
2537
2538 let is_skill_file = name == "SKILL.md";
2539 let parent_dir = is_skill_file.then(|| file_path.parent().unwrap_or_else(|| Path::new(".")));
2540 let parent_dir_str = parent_dir.map(posix_string);
2541 let parent_rel = parent_dir.map(|p| relative_posix(base_dir, p));
2542 let parent_name = parent_dir
2543 .and_then(|p| p.file_name())
2544 .and_then(|n| n.to_str());
2545
2546 for pattern in patterns {
2547 if pattern_matches(pattern, &rel)
2548 || pattern_matches(pattern, name)
2549 || pattern_matches(pattern, &file_str)
2550 {
2551 return true;
2552 }
2553 if !is_skill_file {
2554 continue;
2555 }
2556 if parent_rel
2557 .as_ref()
2558 .is_some_and(|s| pattern_matches(pattern, s))
2559 {
2560 return true;
2561 }
2562 if parent_name.is_some_and(|s| pattern_matches(pattern, s)) {
2563 return true;
2564 }
2565 if parent_dir_str
2566 .as_ref()
2567 .is_some_and(|s| pattern_matches(pattern, s))
2568 {
2569 return true;
2570 }
2571 }
2572 false
2573}
2574
2575fn matches_any_exact_pattern(file_path: &Path, patterns: &[String], base_dir: &Path) -> bool {
2576 if patterns.is_empty() {
2577 return false;
2578 }
2579
2580 let rel = relative_posix(base_dir, file_path);
2581 let file_str = posix_string(file_path);
2582
2583 let name = file_path.file_name().and_then(|n| n.to_str()).unwrap_or("");
2584 let is_skill_file = name == "SKILL.md";
2585 let parent_dir = is_skill_file.then(|| file_path.parent().unwrap_or_else(|| Path::new(".")));
2586 let parent_dir_str = parent_dir.map(posix_string);
2587 let parent_rel = parent_dir.map(|p| relative_posix(base_dir, p));
2588
2589 patterns.iter().any(|pattern| {
2590 let normalized = normalize_exact_pattern(pattern);
2591 if normalized == rel || normalized == file_str {
2592 return true;
2593 }
2594 if !is_skill_file {
2595 return false;
2596 }
2597 parent_rel.as_ref().is_some_and(|p| normalized == p)
2598 || parent_dir_str.as_ref().is_some_and(|p| normalized == p)
2599 })
2600}
2601
2602fn get_override_patterns(entries: &[String]) -> Vec<String> {
2603 entries
2604 .iter()
2605 .filter(|p| p.starts_with('!') || p.starts_with('+') || p.starts_with('-'))
2606 .cloned()
2607 .collect()
2608}
2609
2610fn is_enabled_by_overrides(path: &Path, patterns: &[String], base_dir: &Path) -> bool {
2611 let overrides = get_override_patterns(patterns);
2612 let excludes = overrides
2613 .iter()
2614 .filter_map(|p| p.strip_prefix('!').map(str::to_string))
2615 .collect::<Vec<_>>();
2616 let force_includes = overrides
2617 .iter()
2618 .filter_map(|p| p.strip_prefix('+').map(str::to_string))
2619 .collect::<Vec<_>>();
2620 let force_excludes = overrides
2621 .iter()
2622 .filter_map(|p| p.strip_prefix('-').map(str::to_string))
2623 .collect::<Vec<_>>();
2624
2625 if !force_excludes.is_empty() && matches_any_exact_pattern(path, &force_excludes, base_dir) {
2627 false
2628 } else if !force_includes.is_empty()
2629 && matches_any_exact_pattern(path, &force_includes, base_dir)
2630 {
2631 true
2632 } else {
2633 excludes.is_empty() || !matches_any_pattern(path, &excludes, base_dir)
2634 }
2635}
2636
2637fn apply_patterns(
2638 all_paths: &[PathBuf],
2639 patterns: &[String],
2640 base_dir: &Path,
2641) -> std::collections::HashSet<PathBuf> {
2642 let mut includes = Vec::new();
2643 let mut excludes = Vec::new();
2644 let mut force_includes = Vec::new();
2645 let mut force_excludes = Vec::new();
2646
2647 for p in patterns {
2648 if let Some(rest) = p.strip_prefix('+') {
2649 force_includes.push(rest.to_string());
2650 } else if let Some(rest) = p.strip_prefix('-') {
2651 force_excludes.push(rest.to_string());
2652 } else if let Some(rest) = p.strip_prefix('!') {
2653 excludes.push(rest.to_string());
2654 } else {
2655 includes.push(p.clone());
2656 }
2657 }
2658
2659 let mut result: Vec<PathBuf> = if includes.is_empty() {
2660 all_paths.to_vec()
2661 } else {
2662 all_paths
2663 .iter()
2664 .filter(|p| matches_any_pattern(p, &includes, base_dir))
2665 .cloned()
2666 .collect()
2667 };
2668
2669 if !excludes.is_empty() {
2670 result.retain(|p| !matches_any_pattern(p, &excludes, base_dir));
2671 }
2672
2673 if !force_includes.is_empty() {
2674 for p in all_paths {
2675 if !result.contains(p) && matches_any_exact_pattern(p, &force_includes, base_dir) {
2676 result.push(p.clone());
2677 }
2678 }
2679 }
2680
2681 if !force_excludes.is_empty() {
2682 result.retain(|p| !matches_any_exact_pattern(p, &force_excludes, base_dir));
2683 }
2684
2685 result.into_iter().collect()
2686}
2687
2688fn collect_resource_files(dir: &Path, resource_type: ResourceType) -> Vec<PathBuf> {
2689 match resource_type {
2690 ResourceType::Skills => collect_skill_entries(dir),
2691 ResourceType::Extensions => collect_auto_extension_entries(dir),
2692 ResourceType::Prompts => collect_files_recursive(dir, "md"),
2693 ResourceType::Themes => collect_files_recursive(dir, "json"),
2694 }
2695}
2696
2697fn collect_files_from_paths(paths: &[PathBuf], resource_type: ResourceType) -> Vec<PathBuf> {
2698 let mut out = Vec::new();
2699 for p in paths {
2700 if !p.exists() {
2701 continue;
2702 }
2703 let Ok(stats) = fs::metadata(p) else {
2704 continue;
2705 };
2706 if stats.is_file() {
2707 if resource_type == ResourceType::Extensions && !is_supported_extension_file(p) {
2708 warn!(
2709 path = %p.display(),
2710 "Ignoring unsupported extension file entry; use extension.json, JS/TS entrypoints, *.native.json, or *.wasm"
2711 );
2712 continue;
2713 }
2714 out.push(p.clone());
2715 } else if stats.is_dir() {
2716 out.extend(collect_resource_files(p, resource_type));
2717 }
2718 }
2719 out
2720}
2721
2722fn collect_files_from_manifest_entries(
2723 entries: &[String],
2724 root: &Path,
2725 resource_type: ResourceType,
2726) -> Vec<PathBuf> {
2727 if resource_type == ResourceType::Extensions {
2728 return collect_extension_manifest_entries(entries, root);
2729 }
2730
2731 let plain = entries
2732 .iter()
2733 .filter(|e| !is_pattern(e))
2734 .cloned()
2735 .collect::<Vec<_>>();
2736 let resolved = plain
2737 .iter()
2738 .map(|entry| {
2739 let p = Path::new(entry);
2740 if p.is_absolute() {
2741 p.to_path_buf()
2742 } else {
2743 root.join(entry)
2744 }
2745 })
2746 .collect::<Vec<_>>();
2747
2748 collect_files_from_paths(&resolved, resource_type)
2749}
2750
2751fn collect_extension_manifest_entries(entries: &[String], root: &Path) -> Vec<PathBuf> {
2752 let plain = entries
2753 .iter()
2754 .filter(|entry| !is_pattern(entry))
2755 .map(|entry| root.join(entry))
2756 .collect::<Vec<_>>();
2757
2758 let mut out = Vec::new();
2759 let root_identity = canonical_identity_path(root);
2760 for path in plain {
2761 if !path.exists() {
2762 continue;
2763 }
2764
2765 let Ok(stats) = fs::metadata(&path) else {
2766 continue;
2767 };
2768 if stats.is_file() {
2769 if !is_supported_extension_file(&path) {
2770 warn!(
2771 path = %path.display(),
2772 "Ignoring unsupported package.json#pi.extensions entry; use extension.json, JS/TS entrypoints, *.native.json, or *.wasm"
2773 );
2774 continue;
2775 }
2776 out.push(path);
2777 continue;
2778 }
2779
2780 if !stats.is_dir() {
2781 continue;
2782 }
2783
2784 if canonical_identity_path(&path) == root_identity {
2785 out.push(path);
2786 continue;
2787 }
2788
2789 out.extend(collect_auto_extension_entries(&path));
2790 }
2791
2792 out
2793}
2794
2795fn collect_files_recursive(dir: &Path, ext: &str) -> Vec<PathBuf> {
2796 if !dir.exists() {
2797 return Vec::new();
2798 }
2799
2800 let mut builder = ignore::WalkBuilder::new(dir);
2801 builder
2802 .hidden(true)
2803 .follow_links(true)
2804 .git_global(false)
2805 .git_exclude(false)
2806 .add_custom_ignore_filename(".fdignore")
2807 .filter_entry(|e| e.file_name() != std::ffi::OsStr::new("node_modules"));
2808
2809 let mut out = Vec::new();
2810 for entry in builder.build().filter_map(std::result::Result::ok) {
2811 let path = entry.path();
2812 if path.is_file()
2813 && path
2814 .extension()
2815 .and_then(|e| e.to_str())
2816 .is_some_and(|e| e.eq_ignore_ascii_case(ext))
2817 {
2818 out.push(path.to_path_buf());
2819 }
2820 }
2821 out
2822}
2823
2824fn canonical_identity_path(path: &Path) -> PathBuf {
2825 fs::canonicalize(path).unwrap_or_else(|_| path.to_path_buf())
2826}
2827
2828fn collect_skill_entries(dir: &Path) -> Vec<PathBuf> {
2829 if !dir.exists() {
2830 return Vec::new();
2831 }
2832
2833 let visited_dirs = std::sync::Mutex::new(std::collections::HashSet::new());
2834 let mut out = Vec::new();
2835 let mut seen_files = std::collections::HashSet::new();
2836
2837 let mut builder = ignore::WalkBuilder::new(dir);
2838 builder
2839 .hidden(true)
2840 .follow_links(true)
2841 .git_global(false)
2842 .git_exclude(false)
2843 .add_custom_ignore_filename(".fdignore")
2844 .filter_entry(move |entry| {
2845 let name = entry.file_name().to_string_lossy();
2846 if name == "node_modules" {
2847 return false;
2848 }
2849
2850 if !entry.path().is_dir() {
2851 return true;
2852 }
2853
2854 let canonical_dir = canonical_identity_path(entry.path());
2855 visited_dirs
2856 .lock()
2857 .unwrap_or_else(std::sync::PoisonError::into_inner)
2858 .insert(canonical_dir)
2859 });
2860
2861 for entry in builder.build().filter_map(std::result::Result::ok) {
2862 let path = entry.path();
2863 if !path.is_file() {
2864 continue;
2865 }
2866
2867 let rel = path.strip_prefix(dir).unwrap_or(path);
2868 let depth = rel.components().count();
2869 let name = path.file_name().and_then(|n| n.to_str()).unwrap_or("");
2870 let is_top_level_md = depth == 1 && path.extension().and_then(|e| e.to_str()) == Some("md");
2871 let is_nested_skill = depth > 1 && name == "SKILL.md";
2872 if !is_top_level_md && !is_nested_skill {
2873 continue;
2874 }
2875
2876 let canonical_file = canonical_identity_path(path);
2877 if seen_files.insert(canonical_file) {
2878 out.push(path.to_path_buf());
2879 }
2880 }
2881
2882 out
2883}
2884
2885fn collect_auto_skill_entries(dir: &Path) -> Vec<PathBuf> {
2886 collect_skill_entries(dir)
2887}
2888
2889fn collect_auto_prompt_entries(dir: &Path) -> Vec<PathBuf> {
2890 let mut out = Vec::new();
2891 if !dir.exists() {
2892 return out;
2893 }
2894 let Ok(entries) = fs::read_dir(dir) else {
2895 return out;
2896 };
2897 for entry in entries.flatten() {
2898 let path = entry.path();
2899 let name = entry.file_name();
2900 let name = name.to_string_lossy();
2901 if name.starts_with('.') || name == "node_modules" {
2902 continue;
2903 }
2904 let Ok(stats) = fs::metadata(&path) else {
2905 continue;
2906 };
2907 if stats.is_file() && path.extension().and_then(|e| e.to_str()) == Some("md") {
2908 out.push(path);
2909 }
2910 }
2911 out.sort();
2912 out
2913}
2914
2915fn collect_auto_theme_entries(dir: &Path) -> Vec<PathBuf> {
2916 let mut out = Vec::new();
2917 if !dir.exists() {
2918 return out;
2919 }
2920 let Ok(entries) = fs::read_dir(dir) else {
2921 return out;
2922 };
2923 for entry in entries.flatten() {
2924 let path = entry.path();
2925 let name = entry.file_name();
2926 let name = name.to_string_lossy();
2927 if name.starts_with('.') || name == "node_modules" {
2928 continue;
2929 }
2930 let Ok(stats) = fs::metadata(&path) else {
2931 continue;
2932 };
2933 if stats.is_file() && path.extension().and_then(|e| e.to_str()) == Some("json") {
2934 out.push(path);
2935 }
2936 }
2937 out.sort();
2938 out
2939}
2940
2941fn is_supported_extension_file(path: &Path) -> bool {
2942 let Some(name) = path.file_name().and_then(|name| name.to_str()) else {
2943 return false;
2944 };
2945
2946 if name.eq_ignore_ascii_case("extension.json") || name.ends_with(".native.json") {
2947 return true;
2948 }
2949
2950 let Some(ext) = path.extension().and_then(|ext| ext.to_str()) else {
2951 return false;
2952 };
2953
2954 if ext.eq_ignore_ascii_case("wasm") {
2955 return true;
2956 }
2957
2958 ["ts", "tsx", "js", "mjs", "cjs", "mts", "cts"]
2959 .iter()
2960 .any(|candidate| ext.eq_ignore_ascii_case(candidate))
2961}
2962
2963fn resolve_extension_entries(dir: &Path) -> Option<Vec<PathBuf>> {
2964 match load_extension_manifest(dir) {
2965 Ok(Some(_)) => {
2966 return Some(vec![dir.to_path_buf()]);
2967 }
2968 Ok(None) => {}
2969 Err(err) => {
2970 warn!(path = %dir.display(), "Invalid extension manifest: {err}");
2971 return None;
2972 }
2973 }
2974
2975 let package_json_path = dir.join("package.json");
2976 if package_json_path.exists() {
2977 match read_pi_manifest(dir) {
2978 Ok(Some(manifest)) => {
2979 if let Some(exts) = manifest.extensions {
2980 let all_files = collect_extension_manifest_entries(&exts, dir);
2981 let patterns = exts
2982 .iter()
2983 .filter(|entry| is_pattern(entry))
2984 .cloned()
2985 .collect::<Vec<_>>();
2986 let mut entries = if patterns.is_empty() {
2987 all_files
2988 } else {
2989 let enabled = apply_patterns(&all_files, &patterns, dir);
2990 enabled.into_iter().collect::<Vec<_>>()
2991 };
2992 entries.sort();
2993 entries.dedup();
2994 return Some(entries);
2995 }
2996 }
2997 Ok(None) => {}
2998 Err(err) => {
2999 warn!(path = %package_json_path.display(), "Invalid package manifest: {err}");
3000 return None;
3001 }
3002 }
3003 }
3004
3005 let index_native = dir.join("index.native.json");
3006 if index_native.exists() {
3007 return Some(vec![index_native]);
3008 }
3009
3010 for index_name in [
3011 "index.ts",
3012 "index.tsx",
3013 "index.js",
3014 "index.mjs",
3015 "index.cjs",
3016 "index.mts",
3017 "index.cts",
3018 ] {
3019 let candidate = dir.join(index_name);
3020 if candidate.exists() {
3021 return Some(vec![candidate]);
3022 }
3023 }
3024
3025 None
3026}
3027
3028fn suppress_root_extension_walk(dir: &Path) -> bool {
3029 match load_extension_manifest(dir) {
3030 Ok(Some(_)) | Err(_) => return true,
3031 Ok(None) => {}
3032 }
3033
3034 let package_json_path = dir.join("package.json");
3035 if !package_json_path.exists() {
3036 return false;
3037 }
3038
3039 match read_pi_manifest(dir) {
3040 Ok(Some(manifest)) => manifest.extensions.is_some(),
3041 Ok(None) => false,
3042 Err(_) => true,
3043 }
3044}
3045
3046fn collect_auto_extension_entries(dir: &Path) -> Vec<PathBuf> {
3047 if !dir.exists() {
3048 return Vec::new();
3049 }
3050
3051 let mut out = Vec::new();
3052 let suppress_root_walk = suppress_root_extension_walk(dir);
3053 if let Some(entries) = resolve_extension_entries(dir) {
3054 out.extend(entries);
3055 }
3056
3057 if suppress_root_walk {
3058 out.sort();
3059 out.dedup();
3060 return out;
3061 }
3062
3063 let mut builder = ignore::WalkBuilder::new(dir);
3064 builder
3065 .hidden(true)
3066 .follow_links(true)
3067 .max_depth(Some(1))
3068 .git_ignore(false)
3069 .git_global(false)
3070 .git_exclude(false)
3071 .add_custom_ignore_filename(".fdignore")
3072 .filter_entry(|e| e.file_name() != std::ffi::OsStr::new("node_modules"));
3073
3074 for entry in builder.build().skip(1).filter_map(std::result::Result::ok) {
3075 let path = entry.path().to_path_buf();
3076 let Ok(stats) = fs::metadata(&path) else {
3077 continue;
3078 };
3079 if stats.is_file() {
3080 if is_supported_extension_file(&path) {
3081 out.push(path);
3082 }
3083 continue;
3084 }
3085 if stats.is_dir() {
3086 if let Some(entries) = resolve_extension_entries(&path) {
3087 out.extend(entries);
3088 }
3089 }
3090 }
3091 out.sort();
3092 out.dedup();
3093 out
3094}
3095
3096fn read_installed_npm_version(installed_path: &Path) -> Option<String> {
3097 let package_json = installed_path.join("package.json");
3098 let raw = fs::read_to_string(package_json).ok()?;
3099 let json: Value = serde_json::from_str(&raw).ok()?;
3100 json.get("version")
3101 .and_then(Value::as_str)
3102 .map(str::to_string)
3103}
3104
3105async fn get_latest_npm_version(installed_path: &Path, spec: &str) -> Result<String> {
3106 let (name, _) = parse_npm_spec(spec);
3107 let url = format!("https://registry.npmjs.org/{name}/latest");
3108 let client = crate::http::client::Client::new();
3109 let response = Box::pin(client.get(&url).send()).await.map_err(|e| {
3110 Error::tool(
3111 "npm",
3112 format!(
3113 "Failed to fetch npm registry for {}: {e}",
3114 installed_path.display()
3115 ),
3116 )
3117 })?;
3118
3119 let status = response.status();
3120 let body = response.text().await.map_err(|e| {
3121 Error::tool(
3122 "npm",
3123 format!(
3124 "Failed to read npm registry response for {}: {e}",
3125 installed_path.display()
3126 ),
3127 )
3128 })?;
3129
3130 if !(200..300).contains(&status) {
3131 return Err(Error::tool(
3132 "npm",
3133 format!("npm registry error (HTTP {status}): {body}"),
3134 ));
3135 }
3136
3137 let data: Value = serde_json::from_str(&body).map_err(|e| {
3138 Error::tool(
3139 "npm",
3140 format!(
3141 "Failed to parse npm registry response for {}: {e}",
3142 installed_path.display()
3143 ),
3144 )
3145 })?;
3146 data.get("version")
3147 .and_then(Value::as_str)
3148 .map(str::to_string)
3149 .ok_or_else(|| Error::tool("npm", "Registry response missing version"))
3150}
3151
3152#[derive(Debug, Clone)]
3153enum ParsedSource {
3154 Npm {
3155 spec: String,
3156 name: String,
3157 pinned: bool,
3158 },
3159 Git {
3160 clone_source: String,
3161 repo: String,
3162 host: String,
3163 path: String,
3164 r#ref: Option<String>,
3165 pinned: bool,
3166 },
3167 Local {
3168 path: PathBuf,
3169 },
3170}
3171
3172fn parse_source(source: &str, cwd: &Path) -> ParsedSource {
3173 let source = source.trim();
3174 if let Some(rest) = source.strip_prefix("npm:") {
3175 let spec = rest.trim().to_string();
3176 let (name, version) = parse_npm_spec(&spec);
3177 return ParsedSource::Npm {
3178 spec,
3179 name,
3180 pinned: version.is_some(),
3181 };
3182 }
3183
3184 if let Some(rest) = source.strip_prefix("git:") {
3185 return parse_git_source(rest.trim(), cwd);
3186 }
3187
3188 if looks_like_git_url(source) || source.starts_with("https://") || source.starts_with("http://")
3189 {
3190 return parse_git_source(source, cwd);
3191 }
3192
3193 if let Some(resolved) = resolve_install_source_alias(source, cwd) {
3194 return parse_source(&resolved, cwd);
3195 }
3196
3197 ParsedSource::Local {
3198 path: resolve_local_path(source, cwd),
3199 }
3200}
3201
3202fn validate_non_empty_source<'a>(source: &'a str, label: &str) -> Result<&'a str> {
3203 let trimmed = source.trim();
3204 if trimmed.is_empty() {
3205 return Err(Error::config(format!("{label} must be non-empty")));
3206 }
3207 Ok(trimmed)
3208}
3209
3210fn resolve_install_source_alias(source: &str, cwd: &Path) -> Option<String> {
3211 if source.is_empty() || looks_like_local_path(source) {
3212 return None;
3213 }
3214
3215 if resolve_local_path(source, cwd).exists() {
3217 return None;
3218 }
3219
3220 match ExtensionIndexStore::default_store().resolve_install_source(source) {
3221 Ok(Some(resolved)) if resolved != source => Some(resolved),
3222 Ok(_) => None,
3223 Err(err) => {
3224 tracing::debug!(
3225 "failed to resolve install source alias via extension index (using source as-is): {err}"
3226 );
3227 None
3228 }
3229 }
3230}
3231
3232fn git_clone_source(source: &str, cwd: &Path) -> String {
3233 let spec = source.trim().strip_prefix("git:").unwrap_or(source).trim();
3234 let (repo_raw, _) = split_git_spec_ref(spec);
3235 if looks_like_local_path(repo_raw) {
3236 local_path_from_spec(repo_raw, cwd)
3237 .to_string_lossy()
3238 .to_string()
3239 } else {
3240 repo_raw.to_string()
3241 }
3242}
3243
3244fn split_git_spec_ref(spec: &str) -> (&str, Option<&str>) {
3247 let spec = spec.trim();
3248 if spec.is_empty() {
3249 return ("", None);
3250 }
3251
3252 if looks_like_local_path(spec) {
3253 let mut parts = spec.splitn(2, '@');
3254 let repo = parts.next().unwrap_or("").trim();
3255 let r#ref = parts.next().map(str::trim).filter(|part| !part.is_empty());
3256 return (repo, r#ref);
3257 }
3258
3259 let reserved_prefix_end = git_ref_reserved_prefix_end(spec);
3260 for (idx, _) in spec.match_indices('@').rev() {
3261 if idx < reserved_prefix_end {
3262 continue;
3263 }
3264 let repo = spec[..idx].trim();
3265 let r#ref = spec[idx + 1..].trim();
3266 if !repo.is_empty() && !r#ref.is_empty() {
3267 return (repo, Some(r#ref));
3268 }
3269 }
3270
3271 (spec, None)
3272}
3273
3274fn git_ref_reserved_prefix_end(spec: &str) -> usize {
3275 if let Some(scheme_idx) = spec.find("://") {
3276 let authority_start = scheme_idx + 3;
3277 return spec[authority_start..]
3278 .find('/')
3279 .map_or(spec.len(), |idx| authority_start + idx);
3280 }
3281
3282 if let Some(at_idx) = spec.find('@') {
3283 let tail = &spec[at_idx + 1..];
3284 if let Some(colon_idx) = tail.find(':') {
3285 let slash_idx = tail.find('/');
3286 if slash_idx.is_none_or(|slash| colon_idx < slash) {
3287 return at_idx + 1 + colon_idx;
3288 }
3289 }
3290 }
3291
3292 0
3293}
3294
3295fn normalize_remote_git_repo(repo_raw: &str) -> (String, String, String) {
3298 let repo_raw = repo_raw.trim();
3299
3300 if let Ok(url) = url::Url::parse(repo_raw) {
3301 let host = url.host_str().unwrap_or("").to_string();
3302 let path = url
3303 .path()
3304 .trim_matches('/')
3305 .trim_end_matches(".git")
3306 .to_string();
3307 let repo = if path.is_empty() {
3308 host.clone()
3309 } else {
3310 format!("{host}/{path}")
3311 };
3312 return (repo, host, path);
3313 }
3314
3315 if !repo_raw.contains("://") {
3316 let first_slash = repo_raw.find('/');
3317 if let Some(colon_idx) = repo_raw.find(':') {
3318 if first_slash.is_none_or(|slash| colon_idx < slash) {
3319 let host_part = &repo_raw[..colon_idx];
3320 if let Some(at_idx) = host_part.rfind('@') {
3321 let host = host_part[at_idx + 1..].trim().to_string();
3322 let path = repo_raw[colon_idx + 1..]
3323 .trim()
3324 .trim_matches('/')
3325 .trim_end_matches(".git")
3326 .split('/')
3327 .filter(|s| !s.is_empty() && *s != "." && *s != "..")
3328 .collect::<Vec<_>>()
3329 .join("/");
3330 let repo = if path.is_empty() {
3331 host.clone()
3332 } else {
3333 format!("{host}/{path}")
3334 };
3335 return (repo, host, path);
3336 }
3337 }
3338 }
3339 }
3340
3341 let normalized = repo_raw
3342 .trim_start_matches("https://")
3343 .trim_start_matches("http://")
3344 .trim_end_matches(".git")
3345 .to_string();
3346
3347 let segments = normalized
3348 .split('/')
3349 .filter(|segment| !segment.is_empty() && *segment != "." && *segment != "..")
3350 .collect::<Vec<_>>();
3351
3352 let host = segments
3353 .first()
3354 .copied()
3355 .unwrap_or("")
3356 .rsplit('@')
3357 .next()
3358 .unwrap_or("")
3359 .to_string();
3360 let path = if segments.len() >= 2 {
3361 segments[1..].join("/")
3362 } else {
3363 String::new()
3364 };
3365 let repo = if path.is_empty() {
3366 host.clone()
3367 } else {
3368 format!("{host}/{path}")
3369 };
3370
3371 (repo, host, path)
3372}
3373
3374fn normalized_git_repo_key(spec: &str) -> String {
3375 let (repo_raw, _) = split_git_spec_ref(spec);
3376 if looks_like_local_path(repo_raw) {
3377 repo_raw.to_string()
3378 } else {
3379 let (repo, _, _) = normalize_remote_git_repo(repo_raw);
3380 repo
3381 }
3382}
3383
3384fn parse_git_source(spec: &str, cwd: &Path) -> ParsedSource {
3385 let (repo_raw, parsed_ref) = split_git_spec_ref(spec);
3386 let r#ref = parsed_ref.map(str::to_string);
3387 let pinned = r#ref.is_some();
3388 let clone_source = git_clone_source(spec, cwd);
3391
3392 let (repo, host, path) = if looks_like_local_path(repo_raw) {
3393 let repo_path = local_path_from_spec(repo_raw, cwd);
3394
3395 let mut hasher = Sha256::new();
3398 hasher.update(repo_path.to_string_lossy().as_bytes());
3399 let digest = hasher.finalize();
3400 let key = hex_encode(&digest)[..16].to_string();
3401
3402 (
3403 repo_path.to_string_lossy().to_string(),
3404 "local".to_string(),
3405 key,
3406 )
3407 } else {
3408 normalize_remote_git_repo(repo_raw)
3409 };
3410
3411 ParsedSource::Git {
3412 clone_source,
3413 repo,
3414 host,
3415 path,
3416 r#ref,
3417 pinned,
3418 }
3419}
3420
3421fn looks_like_git_url(source: &str) -> bool {
3422 const HOSTS: [&str; 4] = ["github.com", "gitlab.com", "bitbucket.org", "codeberg.org"];
3423 let normalized = source
3424 .trim_start_matches("https://")
3425 .trim_start_matches("http://");
3426 HOSTS
3427 .iter()
3428 .any(|host| normalized.starts_with(&format!("{host}/")))
3429}
3430
3431fn looks_like_windows_drive_absolute_path(spec: &str) -> bool {
3432 let bytes = spec.as_bytes();
3433 bytes.len() >= 3
3434 && bytes[0].is_ascii_alphabetic()
3435 && bytes[1] == b':'
3436 && matches!(bytes[2], b'/' | b'\\')
3437}
3438
3439fn looks_like_local_path(spec: &str) -> bool {
3440 let spec = spec.trim();
3441 spec == "."
3442 || spec == ".."
3443 || spec.starts_with("file://")
3444 || spec.starts_with("\\\\")
3445 || spec.starts_with('/')
3446 || spec.starts_with(".\\")
3447 || spec.starts_with("..\\")
3448 || spec.starts_with("./")
3449 || spec.starts_with("../")
3450 || spec.starts_with('~')
3451 || looks_like_windows_drive_absolute_path(spec)
3452}
3453
3454fn local_path_from_spec(spec: &str, cwd: &Path) -> PathBuf {
3455 let spec = spec.trim();
3458 if let Some(rest) = spec.strip_prefix("file://") {
3459 return resolve_local_path(&file_url_local_path(rest), cwd);
3461 }
3462 resolve_local_path(spec, cwd)
3463}
3464
3465fn file_url_local_path(path: &str) -> String {
3466 let path = path.trim();
3467
3468 if looks_like_windows_drive_absolute_path(path) {
3469 return path.to_string();
3470 }
3471
3472 if let Some(stripped) = path
3473 .strip_prefix('/')
3474 .filter(|stripped| looks_like_windows_drive_absolute_path(stripped))
3475 {
3476 return stripped.to_string();
3477 }
3478
3479 if path.eq_ignore_ascii_case("localhost") {
3480 return "/".to_string();
3481 }
3482 if let Some((host, stripped)) = path.split_once('/') {
3483 if host.eq_ignore_ascii_case("localhost") {
3484 if looks_like_windows_drive_absolute_path(stripped) {
3485 return stripped.to_string();
3486 }
3487 if let Some(drive_path) = stripped
3488 .strip_prefix('/')
3489 .filter(|drive_path| looks_like_windows_drive_absolute_path(drive_path))
3490 {
3491 return drive_path.to_string();
3492 }
3493 return format!("/{stripped}");
3494 }
3495 }
3496
3497 if !path.is_empty() && !path.starts_with('/') {
3498 return format!("//{path}");
3499 }
3500
3501 path.to_string()
3502}
3503
3504fn resolve_local_path(input: &str, cwd: &Path) -> PathBuf {
3505 let trimmed = input.trim();
3506 if spec_is_platform_absolute(trimmed) {
3507 return PathBuf::from(trimmed);
3508 }
3509 if trimmed == "~" {
3510 return normalize_dot_segments(&dirs::home_dir().unwrap_or_else(|| cwd.to_path_buf()));
3511 }
3512 if let Some(rest) = trimmed.strip_prefix("~/") {
3513 return normalize_dot_segments(
3514 &dirs::home_dir()
3515 .unwrap_or_else(|| cwd.to_path_buf())
3516 .join(rest),
3517 );
3518 }
3519 if trimmed.starts_with('~') {
3520 return normalize_dot_segments(
3521 &dirs::home_dir()
3522 .unwrap_or_else(|| cwd.to_path_buf())
3523 .join(trimmed.trim_start_matches('~')),
3524 );
3525 }
3526 normalize_dot_segments(&cwd.join(trimmed))
3527}
3528
3529fn spec_is_platform_absolute(spec: &str) -> bool {
3530 Path::new(spec).is_absolute()
3531 || spec.starts_with("\\\\")
3532 || looks_like_windows_drive_absolute_path(spec)
3533}
3534
3535fn normalize_dot_segments(path: &Path) -> PathBuf {
3536 use std::ffi::{OsStr, OsString};
3537 use std::path::Component;
3538
3539 let mut out = PathBuf::new();
3540 let mut normals: Vec<OsString> = Vec::new();
3541 let mut has_prefix = false;
3542 let mut has_root = false;
3543
3544 for component in path.components() {
3545 match component {
3546 Component::Prefix(prefix) => {
3547 out.push(prefix.as_os_str());
3548 has_prefix = true;
3549 }
3550 Component::RootDir => {
3551 out.push(component.as_os_str());
3552 has_root = true;
3553 }
3554 Component::CurDir => {}
3555 Component::ParentDir => match normals.last() {
3556 Some(last) if last.as_os_str() != OsStr::new("..") => {
3557 normals.pop();
3558 }
3559 _ => {
3560 if !has_root && !has_prefix {
3561 normals.push(OsString::from(".."));
3562 }
3563 }
3564 },
3565 Component::Normal(part) => normals.push(part.to_os_string()),
3566 }
3567 }
3568
3569 for part in normals {
3570 out.push(part);
3571 }
3572
3573 out
3574}
3575
3576fn parse_npm_spec(spec: &str) -> (String, Option<String>) {
3577 let spec = spec.trim();
3578 if spec.is_empty() {
3579 return (String::new(), None);
3580 }
3581
3582 let at_pos = spec
3583 .strip_prefix('@')
3584 .map_or_else(|| spec.find('@'), |rest| rest.rfind('@').map(|idx| idx + 1));
3585
3586 match at_pos {
3587 Some(pos) if pos + 1 < spec.len() => {
3588 (spec[..pos].to_string(), Some(spec[pos + 1..].to_string()))
3589 }
3590 _ => (spec.to_string(), None),
3591 }
3592}
3593
3594fn ensure_npm_project(root: &Path) -> Result<()> {
3595 fs::create_dir_all(root)?;
3596 ensure_git_ignore(root)?;
3597 let package_json = root.join("package.json");
3598 if !package_json.exists() {
3599 let value = serde_json::json!({ "name": "pi-packages", "private": true });
3600 fs::write(&package_json, serde_json::to_string_pretty(&value)?)?;
3601 }
3602 Ok(())
3603}
3604
3605fn ensure_git_ignore(dir: &Path) -> Result<()> {
3606 fs::create_dir_all(dir)?;
3607 let ignore_path = dir.join(".gitignore");
3608 if !ignore_path.exists() {
3609 fs::write(ignore_path, "*\n!.gitignore\n")?;
3610 }
3611 Ok(())
3612}
3613
3614fn prune_empty_git_parents(target_dir: &Path, root: &Path) {
3615 let Ok(root) = root.canonicalize() else {
3616 return;
3617 };
3618 let mut current = target_dir.parent().map(PathBuf::from);
3619
3620 while let Some(dir) = current {
3621 let Ok(canon) = dir.canonicalize() else { break };
3622 if canon == root || !canon.starts_with(&root) {
3623 break;
3624 }
3625 let Ok(entries) = fs::read_dir(&dir) else {
3626 break;
3627 };
3628 if entries.into_iter().next().is_some() {
3629 break;
3630 }
3631 let _ = fs::remove_dir(&dir);
3632 current = dir.parent().map(PathBuf::from);
3633 }
3634}
3635
3636fn run_command<I, S>(program: &str, args: I, cwd: Option<&Path>) -> Result<()>
3637where
3638 I: IntoIterator<Item = S>,
3639 S: AsRef<OsStr>,
3640{
3641 let mut cmd = Command::new(program);
3642 cmd.args(args)
3643 .stdin(Stdio::null())
3644 .stdout(Stdio::piped())
3645 .stderr(Stdio::piped());
3646 if let Some(cwd) = cwd {
3647 cmd.current_dir(cwd);
3648 }
3649
3650 let output = cmd
3651 .output()
3652 .map_err(|e| Error::tool(program, format!("Failed to spawn {program}: {e}")))?;
3653
3654 if !output.status.success() {
3655 let stdout = String::from_utf8_lossy(&output.stdout);
3656 let stderr = String::from_utf8_lossy(&output.stderr);
3657 let mut msg = format!("Command failed: {program}");
3658 if let Some(code) = output.status.code() {
3659 let _ = write!(msg, " (exit {code})");
3660 }
3661 if !stdout.trim().is_empty() {
3662 let _ = write!(msg, "\nstdout:\n{stdout}");
3663 }
3664 if !stderr.trim().is_empty() {
3665 let _ = write!(msg, "\nstderr:\n{stderr}");
3666 }
3667 return Err(Error::tool(program, msg));
3668 }
3669
3670 Ok(())
3671}
3672
3673fn run_command_capture<I, S>(program: &str, args: I, cwd: Option<&Path>) -> Result<String>
3674where
3675 I: IntoIterator<Item = S>,
3676 S: AsRef<OsStr>,
3677{
3678 let mut cmd = Command::new(program);
3679 cmd.args(args)
3680 .stdin(Stdio::null())
3681 .stdout(Stdio::piped())
3682 .stderr(Stdio::piped());
3683 if let Some(cwd) = cwd {
3684 cmd.current_dir(cwd);
3685 }
3686
3687 let output = cmd
3688 .output()
3689 .map_err(|e| Error::tool(program, format!("Failed to spawn {program}: {e}")))?;
3690
3691 if !output.status.success() {
3692 let stdout = String::from_utf8_lossy(&output.stdout);
3693 let stderr = String::from_utf8_lossy(&output.stderr);
3694 let mut msg = format!("Command failed: {program}");
3695 if let Some(code) = output.status.code() {
3696 let _ = write!(msg, " (exit {code})");
3697 }
3698 if !stdout.trim().is_empty() {
3699 let _ = write!(msg, "\nstdout:\n{stdout}");
3700 }
3701 if !stderr.trim().is_empty() {
3702 let _ = write!(msg, "\nstderr:\n{stderr}");
3703 }
3704 return Err(Error::tool(program, msg));
3705 }
3706
3707 Ok(String::from_utf8_lossy(&output.stdout).trim().to_string())
3708}
3709
3710const fn scope_label(scope: PackageScope) -> &'static str {
3711 match scope {
3712 PackageScope::User => "user",
3713 PackageScope::Project => "project",
3714 PackageScope::Temporary => "temporary",
3715 }
3716}
3717
3718const fn trust_state_label(state: PackageEntryTrustState) -> &'static str {
3719 match state {
3720 PackageEntryTrustState::Trusted => "trusted",
3721 PackageEntryTrustState::Rejected => "rejected",
3722 }
3723}
3724
3725fn verification_error(code: &str, reason: &str, remediation: &str) -> Error {
3726 Error::tool(
3727 "package_manager",
3728 format!(
3729 "Package lock/provenance verification failed [{code}]: {reason}\nRemediation: {remediation}"
3730 ),
3731 )
3732}
3733
3734pub fn evaluate_lock_transition(
3735 existing: Option<&PackageLockEntry>,
3736 candidate: &PackageLockEntry,
3737 action: PackageLockAction,
3738) -> std::result::Result<LockTransitionPlan, PackageLockMismatch> {
3739 let Some(existing) = existing else {
3740 return Ok(LockTransitionPlan {
3741 reason_codes: vec!["first_seen".to_string()],
3742 from_state: "untracked".to_string(),
3743 to_state: "trusted".to_string(),
3744 });
3745 };
3746
3747 let allow_mutation = allow_lock_entry_update(candidate, action);
3748
3749 if (existing.source_kind != candidate.source_kind || existing.source != candidate.source)
3750 && !allow_mutation
3751 {
3752 return Err(PackageLockMismatch {
3753 code: "provenance_mismatch",
3754 reason: format!(
3755 "source identity changed for {}: previous='{}' ({:?}), current='{}' ({:?})",
3756 candidate.identity,
3757 existing.source,
3758 existing.source_kind,
3759 candidate.source,
3760 candidate.source_kind
3761 ),
3762 remediation: format!(
3763 "Review the source change, then run `pi remove {}` and `pi install {}` to re-establish trust.",
3764 candidate.source, candidate.source
3765 ),
3766 });
3767 }
3768
3769 if existing.resolved != candidate.resolved && !allow_mutation {
3770 return Err(PackageLockMismatch {
3771 code: "provenance_mismatch",
3772 reason: format!(
3773 "resolved provenance changed for {} while source is immutable in this operation",
3774 candidate.identity
3775 ),
3776 remediation: format!(
3777 "Use `pi update {}` for unpinned sources, or reinstall after intentional provenance changes.",
3778 candidate.source
3779 ),
3780 });
3781 }
3782
3783 if existing.digest_sha256 != candidate.digest_sha256 && !allow_mutation {
3784 return Err(PackageLockMismatch {
3785 code: "digest_mismatch",
3786 reason: format!(
3787 "digest changed for {}: expected {}, got {}",
3788 candidate.identity, existing.digest_sha256, candidate.digest_sha256
3789 ),
3790 remediation: format!(
3791 "Inspect upstream changes. If expected, run `pi remove {}` then `pi install {}` to trust the new digest.",
3792 candidate.source, candidate.source
3793 ),
3794 });
3795 }
3796
3797 let mut reason_codes = Vec::new();
3798 if existing.resolved != candidate.resolved {
3799 reason_codes.push("provenance_changed".to_string());
3800 }
3801 if existing.digest_sha256 != candidate.digest_sha256 {
3802 reason_codes.push("digest_changed".to_string());
3803 }
3804 if reason_codes.is_empty() {
3805 reason_codes.push("verified".to_string());
3806 }
3807
3808 Ok(LockTransitionPlan {
3809 reason_codes,
3810 from_state: trust_state_label(existing.trust_state).to_string(),
3811 to_state: "trusted".to_string(),
3812 })
3813}
3814
3815const fn allow_lock_entry_update(candidate: &PackageLockEntry, action: PackageLockAction) -> bool {
3816 match action {
3817 PackageLockAction::Install => false,
3818 PackageLockAction::Update => match candidate.resolved {
3819 PackageResolvedProvenance::Npm { pinned, .. }
3820 | PackageResolvedProvenance::Git { pinned, .. } => !pinned,
3821 PackageResolvedProvenance::Local { .. } => false,
3822 },
3823 }
3824}
3825
3826pub fn sort_lock_entries(entries: &mut [PackageLockEntry]) {
3827 entries.sort_by(|left, right| {
3828 left.identity
3829 .cmp(&right.identity)
3830 .then_with(|| left.source.cmp(&right.source))
3831 });
3832}
3833
3834pub fn read_package_lockfile(path: &Path) -> Result<PackageLockfile> {
3835 if !path.exists() {
3836 return Ok(PackageLockfile::default());
3837 }
3838
3839 let content = fs::read_to_string(path)?;
3840 let mut lockfile: PackageLockfile = serde_json::from_str(&content).map_err(|err| {
3841 Error::config(format!(
3842 "Invalid package lockfile JSON in {}: {err}",
3843 path.display()
3844 ))
3845 })?;
3846 if lockfile.schema.trim().is_empty() {
3847 lockfile.schema = PACKAGE_LOCK_SCHEMA.to_string();
3848 }
3849 sort_lock_entries(&mut lockfile.entries);
3850 Ok(lockfile)
3851}
3852
3853pub fn write_package_lockfile_atomic(path: &Path, lockfile: &PackageLockfile) -> Result<()> {
3854 let value = serde_json::to_value(lockfile)?;
3855 write_settings_json_atomic(path, &value)
3856}
3857
3858fn is_exact_npm_version(value: &str) -> bool {
3859 !value.is_empty()
3860 && !value.contains(|ch: char| {
3861 matches!(
3862 ch,
3863 '^' | '~' | '>' | '<' | '=' | '*' | 'x' | 'X' | '|' | ' ' | '\t'
3864 )
3865 })
3866}
3867
3868pub fn digest_package_path(path: &Path) -> Result<String> {
3869 if path.is_file() {
3870 let mut hasher = Sha256::new();
3871 hasher.update(b"file\0");
3872 let file_name = path
3873 .file_name()
3874 .and_then(|name| name.to_str())
3875 .unwrap_or("entry");
3876 hasher.update(file_name.as_bytes());
3877 hasher.update(b"\0");
3878 let bytes = fs::read(path)?
3879 .into_iter()
3880 .filter(|byte| *byte != b'\r')
3881 .collect::<Vec<_>>();
3882 hasher.update(&bytes);
3883 hasher.update(b"\0");
3884 return Ok(hex_encode(hasher.finalize().as_slice()));
3885 }
3886
3887 if !path.is_dir() {
3888 return Err(Error::tool(
3889 "package_manager",
3890 format!(
3891 "Cannot compute digest for non-file/non-directory path: {}",
3892 path.display()
3893 ),
3894 ));
3895 }
3896
3897 let mut files = Vec::new();
3898 collect_digest_files_recursive(path, path, &mut files)?;
3899 files.sort_by_key(|(_, relative)| relative.clone());
3900
3901 let mut hasher = Sha256::new();
3902 for (full_path, relative) in files {
3903 hasher.update(b"file\0");
3904 hasher.update(relative.as_bytes());
3905 hasher.update(b"\0");
3906 let bytes = fs::read(full_path)?
3907 .into_iter()
3908 .filter(|byte| *byte != b'\r')
3909 .collect::<Vec<_>>();
3910 hasher.update(&bytes);
3911 hasher.update(b"\0");
3912 }
3913
3914 Ok(hex_encode(hasher.finalize().as_slice()))
3915}
3916
3917fn collect_digest_files_recursive(
3918 root: &Path,
3919 dir: &Path,
3920 out: &mut Vec<(PathBuf, String)>,
3921) -> Result<()> {
3922 for entry in fs::read_dir(dir)? {
3923 let entry = entry?;
3924 let path = entry.path();
3925 let file_type = entry.file_type()?;
3926 let name = entry.file_name();
3927 if name == OsStr::new(".git") {
3928 continue;
3929 }
3930
3931 if file_type.is_dir() {
3932 collect_digest_files_recursive(root, &path, out)?;
3933 continue;
3934 }
3935
3936 if !file_type.is_file() {
3937 continue;
3938 }
3939
3940 out.push((path.clone(), relative_posix(root, &path)));
3941 }
3942 Ok(())
3943}
3944
3945fn global_settings_path(cwd: &Path) -> PathBuf {
3946 Config::config_path_override_from_env(cwd)
3947 .unwrap_or_else(|| Config::global_dir().join("settings.json"))
3948}
3949
3950fn project_settings_path(cwd: &Path) -> PathBuf {
3951 cwd.join(Config::project_dir()).join("settings.json")
3952}
3953
3954#[derive(Debug, Clone, Copy)]
3955enum UpdateAction {
3956 Add,
3957 Remove,
3958}
3959
3960fn list_packages_in_settings(path: &Path) -> Result<Vec<PackageEntry>> {
3961 let value = read_settings_json(path)?;
3962 let packages = value
3963 .get("packages")
3964 .and_then(Value::as_array)
3965 .cloned()
3966 .unwrap_or_default();
3967
3968 let mut out = Vec::new();
3969 for pkg in packages {
3970 if let Some(spec) = extract_package_spec(&pkg) {
3971 out.push(PackageEntry {
3972 scope: PackageScope::User, source: spec.source,
3974 filter: spec.filter,
3975 });
3976 }
3977 }
3978 Ok(out)
3979}
3980
3981fn update_package_sources(
3982 path: &Path,
3983 source: &str,
3984 action: UpdateAction,
3985 cwd: &Path,
3986) -> Result<()> {
3987 let source = source.trim();
3988 if source.is_empty() {
3989 return Err(Error::Config(
3990 "settings package source cannot be empty".to_string(),
3991 ));
3992 }
3993
3994 let mut root = read_settings_json(path)?;
3995 if !root.is_object() {
3996 root = serde_json::json!({});
3997 }
3998
3999 if !matches!(root.get("packages"), Some(Value::Array(_))) {
4000 root["packages"] = Value::Array(Vec::new());
4001 }
4002
4003 let packages = root
4004 .get_mut("packages")
4005 .and_then(Value::as_array_mut)
4006 .ok_or_else(|| {
4007 Error::Config("failed to initialize settings 'packages' as an array".to_string())
4008 })?;
4009
4010 match action {
4011 UpdateAction::Add => {
4012 let exists = packages.iter().any(|existing| {
4013 extract_package_source(existing)
4014 .is_some_and(|(s, _)| sources_match_in_dir(&s, source, cwd))
4015 });
4016 if !exists {
4017 packages.push(Value::String(source.to_string()));
4018 }
4019 }
4020 UpdateAction::Remove => {
4021 packages.retain(|existing| {
4022 !extract_package_source(existing)
4023 .is_some_and(|(s, _)| sources_match_in_dir(&s, source, cwd))
4024 });
4025 }
4026 }
4027
4028 write_settings_json_atomic(path, &root)
4029}
4030
4031fn extract_package_source(value: &Value) -> Option<(String, bool)> {
4032 if let Some(s) = value.as_str() {
4033 return Some((s.to_string(), false));
4034 }
4035 let obj = value.as_object()?;
4036 let source = obj.get("source")?.as_str()?.to_string();
4037 Some((source, true))
4038}
4039
4040#[derive(Debug, Clone, PartialEq, Eq)]
4041enum NormalizedKind {
4042 Npm,
4043 Git,
4044 Local,
4045}
4046
4047#[derive(Debug, Clone, PartialEq, Eq)]
4048struct NormalizedSource {
4049 kind: NormalizedKind,
4050 key: String,
4051}
4052
4053fn sources_match(a: &str, b: &str) -> bool {
4054 normalize_source(a).is_some_and(|left| normalize_source(b).is_some_and(|right| left == right))
4055}
4056
4057fn sources_match_in_dir(a: &str, b: &str, cwd: &Path) -> bool {
4058 normalize_source_in_dir(a, cwd)
4059 .is_some_and(|left| normalize_source_in_dir(b, cwd).is_some_and(|right| left == right))
4060}
4061
4062fn normalize_source(source: &str) -> Option<NormalizedSource> {
4063 let source = source.trim();
4064 if source.is_empty() {
4065 return None;
4066 }
4067 if let Some(rest) = source.strip_prefix("npm:") {
4068 let spec = rest.trim();
4069 let (name, _) = parse_npm_spec(spec);
4070 return Some(NormalizedSource {
4071 kind: NormalizedKind::Npm,
4072 key: name,
4073 });
4074 }
4075 if let Some(rest) = source.strip_prefix("git:") {
4076 return Some(NormalizedSource {
4077 kind: NormalizedKind::Git,
4078 key: normalized_git_repo_key(rest.trim()),
4079 });
4080 }
4081 if looks_like_git_url(source) || source.starts_with("https://") || source.starts_with("http://")
4082 {
4083 return Some(NormalizedSource {
4084 kind: NormalizedKind::Git,
4085 key: normalized_git_repo_key(source),
4086 });
4087 }
4088 Some(NormalizedSource {
4089 kind: NormalizedKind::Local,
4090 key: source.to_string(),
4091 })
4092}
4093
4094fn normalize_source_in_dir(source: &str, cwd: &Path) -> Option<NormalizedSource> {
4095 let source = source.trim();
4096 if source.is_empty() {
4097 return None;
4098 }
4099
4100 match parse_source(source, cwd) {
4103 ParsedSource::Npm { name, .. } => Some(NormalizedSource {
4104 kind: NormalizedKind::Npm,
4105 key: name,
4106 }),
4107 ParsedSource::Git { repo, .. } => Some(NormalizedSource {
4108 kind: NormalizedKind::Git,
4109 key: repo,
4110 }),
4111 ParsedSource::Local { path } => Some(NormalizedSource {
4112 kind: NormalizedKind::Local,
4113 key: path.to_string_lossy().to_string(),
4114 }),
4115 }
4116}
4117
4118fn read_settings_json(path: &Path) -> Result<Value> {
4119 if !path.exists() {
4120 return Ok(serde_json::json!({}));
4121 }
4122 let content = fs::read_to_string(path)?;
4123 serde_json::from_str(&content).map_err(|e| {
4124 Error::config(format!(
4125 "Invalid JSON in settings file {}: {e}",
4126 path.display()
4127 ))
4128 })
4129}
4130
4131fn write_settings_json_atomic(path: &Path, value: &Value) -> Result<()> {
4132 let data = serde_json::to_string_pretty(value)?;
4133 let parent = path.parent().unwrap_or_else(|| Path::new("."));
4134 fs::create_dir_all(parent)?;
4135
4136 let tmp = tempfile::NamedTempFile::new_in(parent)?;
4137 fs::write(tmp.path(), data)?;
4138 let tmp_path = tmp.into_temp_path();
4139 tmp_path
4140 .persist(path)
4141 .map_err(|e| Error::Io(Box::new(e.error)))?;
4142 Ok(())
4143}
4144
4145fn compat_scan_enabled() -> bool {
4146 let value = std::env::var("PI_EXT_COMPAT_SCAN").unwrap_or_default();
4147 matches!(
4148 value.trim().to_ascii_lowercase().as_str(),
4149 "1" | "true" | "yes" | "on"
4150 )
4151}
4152
4153fn maybe_emit_compat_ledgers(extensions: &[ResolvedResource]) {
4154 if !compat_scan_enabled() {
4155 return;
4156 }
4157
4158 let mut enabled = extensions.iter().filter(|r| r.enabled).collect::<Vec<_>>();
4159 enabled.sort_by(|left, right| left.path.cmp(&right.path));
4160
4161 for resource in enabled {
4162 let root = if resource.path.is_dir() {
4163 resource.path.clone()
4164 } else {
4165 resource
4166 .path
4167 .parent()
4168 .map_or_else(|| resource.path.clone(), Path::to_path_buf)
4169 };
4170 let scanner = CompatibilityScanner::new(root);
4171 let ledger = match scanner.scan_path(&resource.path) {
4172 Ok(ledger) => ledger,
4173 Err(err) => {
4174 warn!(event = "ext.compat_ledger_error", error = %err);
4175 continue;
4176 }
4177 };
4178
4179 if ledger.is_empty() {
4180 continue;
4181 }
4182
4183 match serde_json::to_string(&ledger) {
4184 Ok(ledger_json) => {
4185 info!(
4186 event = "ext.compat_ledger",
4187 schema = %ledger.schema,
4188 ledger = %ledger_json
4189 );
4190 }
4191 Err(err) => {
4192 warn!(event = "ext.compat_ledger_serialize_error", error = %err);
4193 }
4194 }
4195 }
4196}
4197
4198#[cfg(test)]
4199mod tests {
4200 use super::*;
4201 use asupersync::runtime::RuntimeBuilder;
4202 use serde_json::json;
4203 use std::fs;
4204 use std::future::Future;
4205
4206 fn run_async<T>(future: impl Future<Output = T>) -> T {
4207 let runtime = RuntimeBuilder::current_thread()
4208 .build()
4209 .expect("build runtime");
4210 runtime.block_on(future)
4211 }
4212
4213 #[test]
4214 fn test_finish_package_task_propagates_panic_before_cancellation() {
4215 let handle = std::thread::spawn(|| -> () {
4216 panic!("package manager worker panic");
4217 });
4218
4219 let panic = std::panic::catch_unwind(std::panic::AssertUnwindSafe(|| {
4220 let _: Result<()> = finish_package_task(handle, Err(()), "Install task cancelled");
4221 }));
4222
4223 assert!(
4224 panic.is_err(),
4225 "worker panic should not be masked as cancellation"
4226 );
4227 }
4228
4229 #[test]
4230 fn test_finish_package_task_maps_nonpanic_cancellation_to_tool_error() {
4231 let handle = std::thread::spawn(|| {});
4232
4233 let err = finish_package_task::<(), _>(handle, Err(()), "Install task cancelled")
4234 .expect_err("error");
4235
4236 assert!(
4237 err.to_string().contains("Install task cancelled"),
4238 "unexpected error: {err}"
4239 );
4240 }
4241
4242 #[test]
4243 fn test_finish_package_task_returns_success_payload() {
4244 let handle = std::thread::spawn(|| {});
4245
4246 let value =
4247 finish_package_task::<usize, ()>(handle, Ok(Ok(7usize)), "task cancelled").unwrap();
4248
4249 assert_eq!(value, 7);
4250 }
4251
4252 #[test]
4253 fn test_parse_npm_spec_scoped_and_unscoped() {
4254 assert_eq!(parse_npm_spec("foo"), ("foo".to_string(), None));
4255 assert_eq!(
4256 parse_npm_spec("foo@1.2.3"),
4257 ("foo".to_string(), Some("1.2.3".to_string()))
4258 );
4259 assert_eq!(
4260 parse_npm_spec("@scope/name@1.2.3"),
4261 ("@scope/name".to_string(), Some("1.2.3".to_string()))
4262 );
4263 assert_eq!(
4264 parse_npm_spec("@scope/name"),
4265 ("@scope/name".to_string(), None)
4266 );
4267 }
4268
4269 #[test]
4270 fn test_sources_match_normalization() {
4271 assert!(sources_match("npm:foo@1", "npm:foo@2"));
4272 assert!(sources_match(
4273 "git:github.com/a/b@v1",
4274 "git:github.com/a/b@v2"
4275 ));
4276 assert!(sources_match(
4277 "https://github.com/a/b.git@v1",
4278 "github.com/a/b"
4279 ));
4280 assert!(sources_match(
4281 "git:https://token-a@github.com/a/b.git@v1",
4282 "git:https://token-b@github.com/a/b.git@v2"
4283 ));
4284 assert!(!sources_match("npm:foo", "npm:bar"));
4285 assert!(!sources_match("git:github.com/a/b", "git:github.com/a/c"));
4286 }
4287
4288 #[test]
4289 #[cfg(unix)]
4290 fn test_package_identity_matches_pi_mono() {
4291 let dir = tempfile::tempdir().expect("tempdir");
4292 let manager = PackageManager::new(dir.path().to_path_buf());
4293
4294 assert_eq!(
4295 manager.package_identity("npm:@scope/name@1.2.3"),
4296 "npm:@scope/name"
4297 );
4298 assert_eq!(
4299 manager.package_identity("git:https://github.com/a/b.git@v1"),
4300 "git:github.com/a/b"
4301 );
4302 assert_eq!(
4303 manager.package_identity("git:https://token@github.com/a/b.git@v1"),
4304 "git:github.com/a/b"
4305 );
4306
4307 let identity = manager.package_identity("./foo/../bar");
4308 let expected_suffix = format!("{}/bar", dir.path().display());
4309 assert!(identity.ends_with(&expected_suffix), "{identity}");
4310 }
4311
4312 #[test]
4313 fn parse_source_prefers_existing_local_paths_over_index_aliases() {
4314 let dir = tempfile::tempdir().expect("tempdir");
4315 let local = dir.path().join("checkpoint-pi");
4316 fs::create_dir_all(&local).expect("create local path");
4317
4318 match parse_source("checkpoint-pi", dir.path()) {
4319 ParsedSource::Local { path } => assert_eq!(path, local),
4320 other => panic!("Unexpected parsed source: {:?}", other),
4321 }
4322 }
4323
4324 #[test]
4325 fn test_installed_path_project_scope() {
4326 let dir = tempfile::tempdir().expect("tempdir");
4327 let manager = PackageManager::new(dir.path().to_path_buf());
4328
4329 let npm = manager
4335 .installed_path_sync("npm:foo@1.2.3", PackageScope::Project)
4336 .expect("installed_path")
4337 .expect("path");
4338 assert_eq!(
4339 npm,
4340 dir.path()
4341 .join(Config::project_dir())
4342 .join("npm")
4343 .join("node_modules")
4344 .join("foo")
4345 );
4346
4347 let git = manager
4348 .installed_path_sync("git:github.com/user/repo@v1", PackageScope::Project)
4349 .expect("installed_path")
4350 .expect("path");
4351 assert_eq!(
4352 git,
4353 dir.path()
4354 .join(Config::project_dir())
4355 .join("git")
4356 .join("github.com")
4357 .join("user/repo")
4358 );
4359
4360 let git_with_auth = manager
4361 .installed_path_sync(
4362 "git:https://token@github.com/user/repo.git@main",
4363 PackageScope::Project,
4364 )
4365 .expect("installed_path")
4366 .expect("path");
4367 assert_eq!(git_with_auth, git);
4368 }
4369
4370 #[test]
4371 fn test_installed_path_project_scope_local_git_hashes_absolute_path() {
4372 let dir = tempfile::tempdir().expect("tempdir");
4373 let manager = PackageManager::new(dir.path().to_path_buf());
4374
4375 let repo_path = dir.path().join("repo");
4376 fs::create_dir_all(&repo_path).expect("create local repo dir");
4377
4378 let mut hasher = Sha256::new();
4379 hasher.update(repo_path.to_string_lossy().as_bytes());
4380 let digest = hasher.finalize();
4381 let key = hex_encode(&digest)[..16].to_string();
4382
4383 let local = manager
4384 .installed_path_sync("git:./repo", PackageScope::Project)
4385 .expect("installed_path")
4386 .expect("path");
4387 assert_eq!(
4388 local,
4389 dir.path()
4390 .join(Config::project_dir())
4391 .join("git")
4392 .join("local")
4393 .join(key),
4394 "local git sources should map to a stable hashed install directory",
4395 );
4396 }
4397
4398 #[test]
4399 fn installed_path_sync_rejects_blank_source() {
4400 let dir = tempfile::tempdir().expect("tempdir");
4401 let manager = PackageManager::new(dir.path().to_path_buf());
4402
4403 let err = manager
4404 .installed_path_sync(" ", PackageScope::Project)
4405 .expect_err("blank package source should fail");
4406 assert!(
4407 err.to_string().contains("Package source must be non-empty"),
4408 "unexpected error: {err}"
4409 );
4410 }
4411
4412 #[test]
4413 fn remove_sync_rejects_git_host_only_source_without_deleting_host_bucket() {
4414 let dir = tempfile::tempdir().expect("tempdir");
4415 let manager = PackageManager::new(dir.path().to_path_buf());
4416 let host_bucket = dir
4417 .path()
4418 .join(Config::project_dir())
4419 .join("git")
4420 .join("github.com");
4421 let repo_dir = host_bucket.join("user").join("repo");
4422 fs::create_dir_all(&repo_dir).expect("create repo dir");
4423 fs::write(repo_dir.join("package.json"), "{}").expect("write sentinel");
4424
4425 let err = manager
4426 .remove_sync("git:github.com", PackageScope::Project)
4427 .expect_err("host-only git source should be rejected");
4428
4429 assert!(
4430 err.to_string().contains("Invalid git package source"),
4431 "unexpected error: {err}"
4432 );
4433 assert!(host_bucket.exists(), "host bucket should be preserved");
4434 assert!(repo_dir.exists(), "nested repo should be preserved");
4435 }
4436
4437 #[test]
4438 fn test_project_settings_override_global_package_filters() {
4439 run_async(async {
4440 let temp_dir = tempfile::tempdir().expect("tempdir");
4441 let project_root = temp_dir.path().join("project");
4442 fs::create_dir_all(project_root.join(".pi")).expect("create project settings dir");
4443
4444 let package_root = temp_dir.path().join("pkg");
4445 fs::create_dir_all(package_root.join("extensions")).expect("create extensions dir");
4446 fs::write(package_root.join("extensions/a.native.json"), "{}")
4447 .expect("write a.native.json");
4448 fs::write(package_root.join("extensions/b.native.json"), "{}")
4449 .expect("write b.native.json");
4450
4451 let global_settings_path = temp_dir.path().join("global-settings.json");
4452 let project_settings_path = project_root.join(".pi/settings.json");
4453
4454 let global_settings = json!({
4455 "packages": [{
4456 "source": package_root.to_string_lossy(),
4457 "extensions": ["extensions/a.native.json"]
4458 }]
4459 });
4460 fs::write(
4461 &global_settings_path,
4462 serde_json::to_string_pretty(&global_settings).expect("serialize global settings"),
4463 )
4464 .expect("write global settings");
4465
4466 let project_settings = json!({
4467 "packages": [{
4468 "source": package_root.to_string_lossy(),
4469 "extensions": ["extensions/b.native.json"]
4470 }]
4471 });
4472 fs::write(
4473 &project_settings_path,
4474 serde_json::to_string_pretty(&project_settings)
4475 .expect("serialize project settings"),
4476 )
4477 .expect("write project settings");
4478
4479 let roots = ResolveRoots {
4480 global_settings_path: global_settings_path.clone(),
4481 project_settings_path: project_settings_path.clone(),
4482 global_base_dir: temp_dir.path().join("global-base"),
4483 project_base_dir: project_root.join(".pi"),
4484 project_settings_enabled: true,
4485 };
4486 fs::create_dir_all(&roots.global_base_dir).expect("create global base dir");
4487
4488 let manager = PackageManager::new(project_root);
4489 let resolved = manager.resolve_with_roots(&roots).await.expect("resolve");
4490
4491 let enabled_extensions = resolved
4492 .extensions
4493 .iter()
4494 .filter(|entry| entry.enabled)
4495 .collect::<Vec<_>>();
4496 assert_eq!(enabled_extensions.len(), 1);
4497 let expected_path = package_root.join("extensions/b.native.json");
4498 assert_eq!(enabled_extensions[0].path, expected_path);
4499 assert_eq!(enabled_extensions[0].metadata.scope, PackageScope::Project);
4500
4501 let disabled = resolved
4502 .extensions
4503 .iter()
4504 .find(|entry| entry.path == package_root.join("extensions/a.native.json"))
4505 .expect("a.native.json entry");
4506 assert!(!disabled.enabled);
4507 assert!(
4508 resolved
4509 .extensions
4510 .iter()
4511 .all(|entry| entry.metadata.scope == PackageScope::Project)
4512 );
4513 });
4514 }
4515
4516 #[test]
4517 fn test_list_packages_with_override_roots_ignores_project_settings() {
4518 let temp_dir = tempfile::tempdir().expect("tempdir");
4519 let project_root = temp_dir.path().join("project");
4520 fs::create_dir_all(project_root.join(".pi")).expect("create project settings dir");
4521
4522 let override_settings_path = temp_dir.path().join("override-settings.json");
4523 let project_settings_path = project_root.join(".pi/settings.json");
4524
4525 fs::write(
4526 &override_settings_path,
4527 serde_json::to_string_pretty(&json!({
4528 "packages": ["npm:override-only"]
4529 }))
4530 .expect("serialize override settings"),
4531 )
4532 .expect("write override settings");
4533 fs::write(
4534 &project_settings_path,
4535 serde_json::to_string_pretty(&json!({
4536 "packages": ["npm:project-leak"]
4537 }))
4538 .expect("serialize project settings"),
4539 )
4540 .expect("write project settings");
4541
4542 let roots = ResolveRoots::from_override(&project_root, Some(&override_settings_path));
4543 let manager = PackageManager::new(project_root);
4544 let packages = PackageManager::list_packages_with_roots(&roots).expect("list packages");
4545
4546 assert_eq!(packages.len(), 1);
4547 assert_eq!(packages[0].source, "npm:override-only");
4548 assert_eq!(packages[0].scope, PackageScope::User);
4549 assert!(
4550 !roots.project_settings_enabled,
4551 "full config override should disable project settings"
4552 );
4553 }
4554
4555 #[test]
4556 fn test_config_override_roots_ignore_project_package_filters() {
4557 run_async(async {
4558 let temp_dir = tempfile::tempdir().expect("tempdir");
4559 let project_root = temp_dir.path().join("project");
4560 fs::create_dir_all(project_root.join(".pi")).expect("create project settings dir");
4561 fs::create_dir_all(project_root.join(".pi/extensions"))
4562 .expect("create project extension dir");
4563
4564 let package_root = temp_dir.path().join("pkg");
4565 fs::create_dir_all(package_root.join("extensions")).expect("create extensions dir");
4566 fs::write(package_root.join("extensions/a.native.json"), "{}")
4567 .expect("write a.native.json");
4568
4569 let package_root2 = temp_dir.path().join("pkg2");
4570 fs::create_dir_all(package_root2.join("extensions")).expect("create extensions dir");
4571 fs::write(package_root2.join("extensions/b.native.json"), "{}")
4572 .expect("write b.native.json");
4573 let project_local_extension =
4574 project_root.join(".pi/extensions/project-local.native.json");
4575 let project_auto_extension =
4576 project_root.join(".pi/extensions/project-auto.native.json");
4577 fs::write(&project_local_extension, "{}").expect("write project local extension");
4578 fs::write(&project_auto_extension, "{}").expect("write project auto extension");
4579
4580 let override_settings_path = temp_dir.path().join("override-settings.json");
4581 let project_settings_path = project_root.join(".pi/settings.json");
4582
4583 let override_settings = json!({
4584 "packages": [{
4585 "source": package_root.to_string_lossy(),
4586 "extensions": ["extensions/a.native.json"]
4587 }]
4588 });
4589 fs::write(
4590 &override_settings_path,
4591 serde_json::to_string_pretty(&override_settings)
4592 .expect("serialize override settings"),
4593 )
4594 .expect("write override settings");
4595
4596 let project_settings = json!({
4597 "extensions": ["extensions/project-local.native.json"],
4598 "packages": [{
4599 "source": package_root2.to_string_lossy(),
4600 "extensions": ["extensions/b.native.json"]
4601 }]
4602 });
4603 fs::write(
4604 &project_settings_path,
4605 serde_json::to_string_pretty(&project_settings)
4606 .expect("serialize project settings"),
4607 )
4608 .expect("write project settings");
4609
4610 let roots = ResolveRoots {
4611 global_settings_path: override_settings_path.clone(),
4612 project_settings_path: project_settings_path.clone(),
4613 global_base_dir: temp_dir.path().join("global-base"),
4614 project_base_dir: project_root.join(".pi"),
4615 project_settings_enabled: false,
4616 };
4617 fs::create_dir_all(&roots.global_base_dir).expect("create global base dir");
4618
4619 let manager = PackageManager::new(project_root);
4620 let resolved = manager.resolve_with_roots(&roots).await.expect("resolve");
4621
4622 let enabled_extensions = resolved
4623 .extensions
4624 .iter()
4625 .filter(|entry| entry.enabled)
4626 .collect::<Vec<_>>();
4627 assert_eq!(enabled_extensions.len(), 1);
4628 assert_eq!(
4629 enabled_extensions[0].path,
4630 package_root.join("extensions/a.native.json")
4631 );
4632 assert_eq!(enabled_extensions[0].metadata.scope, PackageScope::User);
4633 assert!(
4634 resolved
4635 .extensions
4636 .iter()
4637 .all(|entry| entry.path != package_root2.join("extensions/b.native.json")),
4638 "project package resources should be ignored when a full config override is active"
4639 );
4640 assert!(
4641 resolved
4642 .extensions
4643 .iter()
4644 .all(|entry| entry.path != project_local_extension),
4645 "project local settings entries should be ignored when a full config override is active"
4646 );
4647 assert!(
4648 resolved
4649 .extensions
4650 .iter()
4651 .all(|entry| entry.path != project_auto_extension),
4652 "project auto-discovered resources should be ignored when a full config override is active"
4653 );
4654 });
4655 }
4656
4657 #[test]
4658 fn test_resolve_extension_sources_uses_temporary_scope() {
4659 run_async(async {
4660 let temp_dir = tempfile::tempdir().expect("tempdir");
4661 let extension_path = temp_dir.path().join("ext.native.json");
4662 fs::write(&extension_path, "{}").expect("write extension");
4663
4664 let manager = PackageManager::new(temp_dir.path().to_path_buf());
4665 let sources = vec![extension_path.to_string_lossy().to_string()];
4666 let resolved = manager
4667 .resolve_extension_sources(
4668 &sources,
4669 ResolveExtensionSourcesOptions {
4670 local: false,
4671 temporary: true,
4672 },
4673 )
4674 .await
4675 .expect("resolve extension sources");
4676
4677 assert_eq!(resolved.extensions.len(), 1);
4678 let entry = &resolved.extensions[0];
4679 assert!(entry.enabled);
4680 assert_eq!(entry.path, extension_path);
4681 assert_eq!(entry.metadata.scope, PackageScope::Temporary);
4682 assert_eq!(entry.metadata.origin, ResourceOrigin::Package);
4683 assert_eq!(entry.metadata.source, sources[0]);
4684 });
4685 }
4686
4687 #[test]
4688 fn test_resolve_extension_sources_rejects_missing_local_path() {
4689 run_async(async {
4690 let temp_dir = tempfile::tempdir().expect("tempdir");
4691 let missing_path = temp_dir.path().join("missing.native.json");
4692
4693 let manager = PackageManager::new(temp_dir.path().to_path_buf());
4694 let err = manager
4695 .resolve_extension_sources(
4696 &[missing_path.to_string_lossy().to_string()],
4697 ResolveExtensionSourcesOptions {
4698 local: false,
4699 temporary: true,
4700 },
4701 )
4702 .await
4703 .expect_err("missing CLI extension path should fail");
4704
4705 assert!(
4706 err.to_string().contains("does not exist"),
4707 "unexpected error: {err}"
4708 );
4709 });
4710 }
4711
4712 #[test]
4713 fn test_resolve_extension_sources_rejects_unsupported_local_file() {
4714 run_async(async {
4715 let temp_dir = tempfile::tempdir().expect("tempdir");
4716 let unsupported_path = temp_dir.path().join("notes.txt");
4717 fs::write(&unsupported_path, "not an extension").expect("write unsupported file");
4718
4719 let manager = PackageManager::new(temp_dir.path().to_path_buf());
4720 let err = manager
4721 .resolve_extension_sources(
4722 &[unsupported_path.to_string_lossy().to_string()],
4723 ResolveExtensionSourcesOptions {
4724 local: false,
4725 temporary: true,
4726 },
4727 )
4728 .await
4729 .expect_err("unsupported CLI extension file should fail");
4730
4731 assert!(
4732 err.to_string()
4733 .contains("Unsupported extension source file"),
4734 "unexpected error: {err}"
4735 );
4736 });
4737 }
4738
4739 #[test]
4740 fn test_resolve_local_path_normalizes_dot_segments() {
4741 let temp_dir = tempfile::tempdir().expect("tempdir");
4742 let resolved = resolve_local_path("./foo/../bar", temp_dir.path());
4743 assert_eq!(resolved, temp_dir.path().join("bar"));
4744 }
4745
4746 #[cfg(unix)]
4747 #[test]
4748 fn test_resolve_local_extension_source_accepts_symlink() {
4749 let temp_dir = tempfile::tempdir().expect("tempdir");
4750 let extension_path = temp_dir.path().join("ext.native.json");
4751 fs::write(&extension_path, "{}").expect("write extension");
4752
4753 let symlink_path = temp_dir.path().join("ext-link.native.json");
4754 std::os::unix::fs::symlink(&extension_path, &symlink_path).expect("create symlink");
4755
4756 let mut accumulator = ResourceAccumulator::new();
4757 let mut metadata = PathMetadata {
4758 source: symlink_path.to_string_lossy().to_string(),
4759 scope: PackageScope::Temporary,
4760 origin: ResourceOrigin::Package,
4761 base_dir: None,
4762 };
4763
4764 PackageManager::resolve_local_extension_source(
4765 &symlink_path,
4766 &mut accumulator,
4767 None,
4768 &mut metadata,
4769 true,
4770 )
4771 .expect("resolve symlink extension source");
4772
4773 assert_eq!(accumulator.extensions.items.len(), 1);
4774 assert_eq!(accumulator.extensions.items[0].path, symlink_path);
4775 }
4776
4777 #[test]
4778 fn test_resolve_extension_sources_rejects_blank_source() {
4779 run_async(async {
4780 let temp_dir = tempfile::tempdir().expect("tempdir");
4781 let manager = PackageManager::new(temp_dir.path().to_path_buf());
4782 let err = manager
4783 .resolve_extension_sources(
4784 &[" ".to_string()],
4785 ResolveExtensionSourcesOptions {
4786 local: false,
4787 temporary: true,
4788 },
4789 )
4790 .await
4791 .expect_err("blank extension source should fail");
4792
4793 assert!(
4794 err.to_string()
4795 .contains("Extension source must be non-empty"),
4796 "unexpected error: {err}"
4797 );
4798 });
4799 }
4800
4801 #[test]
4802 fn test_manifest_extensions_resolve_with_patterns() {
4803 run_async(async {
4804 let temp_dir = tempfile::tempdir().expect("tempdir");
4805 let package_root = temp_dir.path().join("pkg");
4806 let extensions_dir = package_root.join("extensions");
4807 fs::create_dir_all(&extensions_dir).expect("create extensions dir");
4808 fs::write(extensions_dir.join("a.native.json"), "{}").expect("write a.native.json");
4809 fs::write(extensions_dir.join("b.native.json"), "{}").expect("write b.native.json");
4810
4811 let manifest = json!({
4812 "name": "pkg",
4813 "version": "1.0.0",
4814 "pi": {
4815 "extensions": ["extensions", "!extensions/b.native.json"]
4816 }
4817 });
4818 fs::write(
4819 package_root.join("package.json"),
4820 serde_json::to_string_pretty(&manifest).expect("serialize manifest"),
4821 )
4822 .expect("write manifest");
4823
4824 let manager = PackageManager::new(temp_dir.path().to_path_buf());
4825 let sources = vec![package_root.to_string_lossy().to_string()];
4826 let resolved = manager
4827 .resolve_extension_sources(
4828 &sources,
4829 ResolveExtensionSourcesOptions {
4830 local: false,
4831 temporary: true,
4832 },
4833 )
4834 .await
4835 .expect("resolve extension sources");
4836
4837 let paths = resolved
4838 .extensions
4839 .iter()
4840 .map(|entry| entry.path.clone())
4841 .collect::<Vec<_>>();
4842 assert!(paths.contains(&package_root.join("extensions/a.native.json")));
4843 assert!(!paths.contains(&package_root.join("extensions/b.native.json")));
4844 });
4845 }
4846
4847 #[test]
4848 fn test_resolve_extension_sources_accepts_single_string_manifest_extension() {
4849 run_async(async {
4850 let temp_dir = tempfile::tempdir().expect("tempdir");
4851 let package_root = temp_dir.path().join("pkg");
4852 let entry_path = package_root.join("extensions").join("index.ts");
4853 fs::create_dir_all(entry_path.parent().expect("entry parent")).expect("create dir");
4854 fs::write(&entry_path, "export default {}").expect("write index.ts");
4855 fs::write(
4856 package_root.join("package.json"),
4857 serde_json::to_string_pretty(&json!({
4858 "name": "pkg",
4859 "version": "1.0.0",
4860 "pi": {
4861 "extensions": "extensions/index.ts"
4862 }
4863 }))
4864 .expect("serialize manifest"),
4865 )
4866 .expect("write manifest");
4867
4868 let manager = PackageManager::new(temp_dir.path().to_path_buf());
4869 let sources = vec![package_root.to_string_lossy().to_string()];
4870 let resolved = manager
4871 .resolve_extension_sources(
4872 &sources,
4873 ResolveExtensionSourcesOptions {
4874 local: false,
4875 temporary: true,
4876 },
4877 )
4878 .await
4879 .expect("resolve extension sources");
4880
4881 assert_eq!(resolved.extensions.len(), 1);
4882 let entry = &resolved.extensions[0];
4883 assert_eq!(entry.path, entry_path);
4884 assert!(entry.enabled);
4885 assert_eq!(entry.metadata.source, sources[0]);
4886 });
4887 }
4888
4889 #[test]
4890 fn test_resolve_extension_sources_errors_on_malformed_package_manifest() {
4891 run_async(async {
4892 let temp_dir = tempfile::tempdir().expect("tempdir");
4893 let package_root = temp_dir.path().join("pkg");
4894 let extensions_dir = package_root.join("extensions");
4895 fs::create_dir_all(&extensions_dir).expect("create extensions dir");
4896 fs::write(extensions_dir.join("a.native.json"), "{}").expect("write extension");
4897 fs::write(package_root.join("package.json"), "{ not valid json")
4898 .expect("write malformed package.json");
4899
4900 let manager = PackageManager::new(temp_dir.path().to_path_buf());
4901 let err = manager
4902 .resolve_extension_sources(
4903 &[package_root.to_string_lossy().to_string()],
4904 ResolveExtensionSourcesOptions {
4905 local: false,
4906 temporary: true,
4907 },
4908 )
4909 .await
4910 .expect_err("malformed package manifest must fail closed");
4911
4912 let message = err.to_string();
4913 assert!(message.contains("Failed to parse package manifest"));
4914 assert!(message.contains(&package_root.join("package.json").display().to_string()));
4915 });
4916 }
4917
4918 #[test]
4919 fn test_resolve_extension_sources_errors_on_outside_root_manifest_path() {
4920 run_async(async {
4921 let temp_dir = tempfile::tempdir().expect("tempdir");
4922 let package_root = temp_dir.path().join("pkg");
4923 let escaped_dir = temp_dir.path().join("escaped");
4924 fs::create_dir_all(&package_root).expect("create package root");
4925 fs::create_dir_all(&escaped_dir).expect("create escaped dir");
4926 fs::write(escaped_dir.join("index.ts"), "export default {};")
4927 .expect("write escaped extension");
4928 fs::write(
4929 package_root.join("package.json"),
4930 serde_json::to_string_pretty(&json!({
4931 "name": "pkg",
4932 "pi": {
4933 "extensions": ["../escaped/index.ts"]
4934 }
4935 }))
4936 .expect("serialize package.json"),
4937 )
4938 .expect("write package.json");
4939
4940 let manager = PackageManager::new(temp_dir.path().to_path_buf());
4941 let err = manager
4942 .resolve_extension_sources(
4943 &[package_root.to_string_lossy().to_string()],
4944 ResolveExtensionSourcesOptions {
4945 local: false,
4946 temporary: true,
4947 },
4948 )
4949 .await
4950 .expect_err("outside-root manifest entries must fail closed");
4951
4952 let message = err.to_string();
4953 assert!(message.contains("`pi.extensions` paths must stay within the package root"));
4954 assert!(message.contains(&package_root.join("package.json").display().to_string()));
4955 });
4956 }
4957
4958 #[test]
4959 fn test_extension_manifest_directory_detected() {
4960 let temp_dir = tempfile::tempdir().expect("tempdir");
4961 let extension_dir = temp_dir.path().join("ext");
4962 fs::create_dir_all(&extension_dir).expect("create extension dir");
4963 fs::write(
4964 extension_dir.join("extension.json"),
4965 serde_json::to_string_pretty(&json!({
4966 "schema": "pi.ext.manifest.v1",
4967 "extension_id": "test.ext",
4968 "name": "Test Extension",
4969 "version": "0.1.0",
4970 "api_version": "1.0",
4971 "runtime": "native-rust",
4972 "entrypoint": "index.native.json",
4973 "capabilities": []
4974 }))
4975 .expect("serialize extension manifest"),
4976 )
4977 .expect("write extension manifest");
4978 fs::write(extension_dir.join("index.native.json"), "{}").expect("write extension entry");
4979
4980 let entries = resolve_extension_entries(&extension_dir).expect("entries");
4981 assert_eq!(entries, vec![extension_dir]);
4982 }
4983
4984 #[test]
4985 fn test_resolve_extension_entries_skips_invalid_package_manifest() {
4986 let temp_dir = tempfile::tempdir().expect("tempdir");
4987 let extension_dir = temp_dir.path().join("ext");
4988 fs::create_dir_all(&extension_dir).expect("create extension dir");
4989 fs::write(extension_dir.join("package.json"), "{ not valid json")
4990 .expect("write malformed package.json");
4991 fs::write(extension_dir.join("index.ts"), "export {};\n").expect("write fallback entry");
4992
4993 assert!(
4994 resolve_extension_entries(&extension_dir).is_none(),
4995 "invalid package.json should not fall back to index.* entrypoints"
4996 );
4997 }
4998
4999 #[test]
5000 fn test_resolve_extension_entries_skips_invalid_extension_manifest() {
5001 let temp_dir = tempfile::tempdir().expect("tempdir");
5002 let extension_dir = temp_dir.path().join("ext");
5003 fs::create_dir_all(&extension_dir).expect("create extension dir");
5004 fs::write(extension_dir.join("extension.json"), "{ not valid json")
5005 .expect("write malformed extension.json");
5006 fs::write(extension_dir.join("index.ts"), "export {};\n").expect("write fallback entry");
5007
5008 assert!(
5009 resolve_extension_entries(&extension_dir).is_none(),
5010 "invalid extension.json should not fall back to index.* entrypoints"
5011 );
5012 }
5013
5014 #[test]
5019 fn is_pattern_detects_all_prefix_operators() {
5020 assert!(is_pattern("!exclude_me"));
5021 assert!(is_pattern("+force_include"));
5022 assert!(is_pattern("-force_exclude"));
5023 assert!(is_pattern("*.js"));
5024 assert!(is_pattern("foo?bar"));
5025 assert!(!is_pattern("plain_entry"));
5026 assert!(!is_pattern("extensions/a.js"));
5027 assert!(!is_pattern(""));
5028 }
5029
5030 #[test]
5031 fn split_patterns_separates_plain_from_operators() {
5032 let entries = vec![
5033 "a.js".to_string(),
5034 "!b.js".to_string(),
5035 "c.js".to_string(),
5036 "+d.js".to_string(),
5037 "-e.js".to_string(),
5038 "*.ts".to_string(),
5039 ];
5040 let (plain, patterns) = split_patterns(&entries);
5041 assert_eq!(plain, vec!["a.js", "c.js"]);
5042 assert_eq!(patterns, vec!["!b.js", "+d.js", "-e.js", "*.ts"]);
5043 }
5044
5045 #[test]
5046 fn split_patterns_empty_input() {
5047 let (plain, patterns) = split_patterns(&[]);
5048 assert!(plain.is_empty());
5049 assert!(patterns.is_empty());
5050 }
5051
5052 #[test]
5057 fn posix_string_normalizes_separators() {
5058 assert_eq!(posix_string(Path::new("a/b/c")), "a/b/c");
5059 assert_eq!(posix_string(Path::new("/abs/path")), "/abs/path");
5060 }
5061
5062 #[test]
5063 fn relative_posix_computes_relative_path() {
5064 let base = Path::new("/home/user/project");
5065 let path = Path::new("/home/user/project/src/main.rs");
5066 assert_eq!(relative_posix(base, path), "src/main.rs");
5067 }
5068
5069 #[test]
5070 fn relative_posix_with_parent_traversal() {
5071 let base = Path::new("/home/user/project/src");
5072 let path = Path::new("/home/user/project/tests/foo.rs");
5073 assert_eq!(relative_posix(base, path), "../tests/foo.rs");
5074 }
5075
5076 #[test]
5077 fn relative_posix_no_common_prefix() {
5078 let base = Path::new("/a/b");
5079 let path = Path::new("/c/d");
5080 let result = relative_posix(base, path);
5081 assert_eq!(result, "../../c/d");
5082 }
5083
5084 #[test]
5089 fn normalize_exact_pattern_strips_dot_slash() {
5090 assert_eq!(normalize_exact_pattern("./foo.js"), "foo.js");
5091 assert_eq!(normalize_exact_pattern("foo.js"), "foo.js");
5092 assert_eq!(normalize_exact_pattern(""), "");
5093 }
5094
5095 #[test]
5100 fn pattern_matches_simple_glob() {
5101 assert!(pattern_matches("*.js", "foo.js"));
5102 assert!(pattern_matches("*.js", "bar.js"));
5103 assert!(!pattern_matches("*.js", "foo.ts"));
5104 }
5105
5106 #[test]
5107 fn pattern_matches_exact() {
5108 assert!(pattern_matches("foo.js", "foo.js"));
5109 assert!(!pattern_matches("foo.js", "bar.js"));
5110 }
5111
5112 #[test]
5113 fn pattern_matches_question_mark() {
5114 assert!(pattern_matches("?.js", "a.js"));
5115 assert!(!pattern_matches("?.js", "ab.js"));
5116 }
5117
5118 #[test]
5123 fn looks_like_git_url_recognizes_known_hosts() {
5124 assert!(looks_like_git_url("github.com/user/repo"));
5125 assert!(looks_like_git_url("https://github.com/user/repo"));
5126 assert!(looks_like_git_url("gitlab.com/user/repo"));
5127 assert!(looks_like_git_url("bitbucket.org/user/repo"));
5128 assert!(looks_like_git_url("codeberg.org/user/repo"));
5129 assert!(!looks_like_git_url("example.com/user/repo"));
5130 assert!(!looks_like_git_url("npm:foo"));
5131 assert!(!looks_like_git_url("./local"));
5132 }
5133
5134 #[test]
5135 fn looks_like_local_path_various_forms() {
5136 assert!(looks_like_local_path("."));
5137 assert!(looks_like_local_path(".."));
5138 assert!(looks_like_local_path("./relative"));
5139 assert!(looks_like_local_path("../parent"));
5140 assert!(looks_like_local_path("/absolute"));
5141 assert!(looks_like_local_path("C:/absolute"));
5142 assert!(looks_like_local_path("C:\\absolute"));
5143 assert!(looks_like_local_path("\\\\server\\share"));
5144 assert!(looks_like_local_path("~/home_relative"));
5145 assert!(looks_like_local_path("file:///abs/path"));
5146 assert!(!looks_like_local_path("npm:foo"));
5147 assert!(!looks_like_local_path("github.com/user/repo"));
5148 }
5149
5150 #[test]
5155 fn hex_encode_correctness() {
5156 assert_eq!(hex_encode(&[0x00, 0xff, 0xab, 0x12]), "00ffab12");
5157 assert_eq!(hex_encode(&[]), "");
5158 assert_eq!(hex_encode(&[0xde, 0xad, 0xbe, 0xef]), "deadbeef");
5159 }
5160
5161 #[test]
5166 fn normalize_dot_segments_removes_current_dir() {
5167 let result = normalize_dot_segments(Path::new("/a/./b/./c"));
5168 assert_eq!(result, PathBuf::from("/a/b/c"));
5169 }
5170
5171 #[test]
5172 fn normalize_dot_segments_resolves_parent_dir() {
5173 let result = normalize_dot_segments(Path::new("/a/b/../c"));
5174 assert_eq!(result, PathBuf::from("/a/c"));
5175 }
5176
5177 #[test]
5178 fn normalize_dot_segments_multiple_parents() {
5179 let result = normalize_dot_segments(Path::new("/a/b/c/../../d"));
5180 assert_eq!(result, PathBuf::from("/a/d"));
5181 }
5182
5183 #[test]
5184 fn normalize_dot_segments_cannot_go_above_root() {
5185 let result = normalize_dot_segments(Path::new("/a/../.."));
5186 assert_eq!(result, PathBuf::from("/"));
5187 }
5188
5189 #[test]
5190 fn normalize_dot_segments_relative_path_keeps_parents() {
5191 let result = normalize_dot_segments(Path::new("a/../../b"));
5192 assert_eq!(result, PathBuf::from("../b"));
5193 }
5194
5195 #[test]
5200 fn resolve_path_from_base_absolute_path() {
5201 let result = resolve_path_from_base("/abs/path", Path::new("/base"));
5202 assert_eq!(result, PathBuf::from("/abs/path"));
5203 }
5204
5205 #[test]
5206 fn resolve_path_from_base_relative_path() {
5207 let result = resolve_path_from_base("foo/bar", Path::new("/base"));
5208 assert_eq!(result, PathBuf::from("/base/foo/bar"));
5209 }
5210
5211 #[test]
5212 fn resolve_path_from_base_tilde_expansion() {
5213 let result = resolve_path_from_base("~/docs", Path::new("/base"));
5214 let home = dirs::home_dir().unwrap_or_else(|| PathBuf::from("/base"));
5215 assert_eq!(result, home.join("docs"));
5216 }
5217
5218 #[test]
5219 fn resolve_path_from_base_bare_tilde() {
5220 let result = resolve_path_from_base("~", Path::new("/base"));
5221 let home = dirs::home_dir().unwrap_or_else(|| PathBuf::from("/base"));
5222 assert_eq!(result, home);
5223 }
5224
5225 #[test]
5230 fn extract_string_array_from_string() {
5231 let val = json!("single");
5232 let result = extract_string_array(Some(&val));
5233 assert_eq!(result, vec!["single"]);
5234 }
5235
5236 #[test]
5237 fn extract_string_array_from_array() {
5238 let val = json!(["a", "b", "c"]);
5239 let result = extract_string_array(Some(&val));
5240 assert_eq!(result, vec!["a", "b", "c"]);
5241 }
5242
5243 #[test]
5244 fn extract_string_array_from_null() {
5245 let result = extract_string_array(None);
5246 assert!(result.is_empty());
5247 }
5248
5249 #[test]
5250 fn extract_string_array_filters_non_strings() {
5251 let val = json!(["a", 42, "b", null, "c"]);
5252 let result = extract_string_array(Some(&val));
5253 assert_eq!(result, vec!["a", "b", "c"]);
5254 }
5255
5256 #[test]
5261 fn extract_package_spec_from_string() {
5262 let spec = extract_package_spec(&json!("npm:foo@1.0"));
5263 assert!(spec.is_some());
5264 let spec = spec.unwrap();
5265 assert_eq!(spec.source, "npm:foo@1.0");
5266 assert!(spec.filter.is_none());
5267 }
5268
5269 #[test]
5270 fn extract_package_spec_from_object() {
5271 let val = json!({
5272 "source": "npm:bar",
5273 "extensions": ["a.js", "b.js"],
5274 "skills": "my-skill"
5275 });
5276 let spec = extract_package_spec(&val);
5277 assert!(spec.is_some());
5278 let spec = spec.unwrap();
5279 assert_eq!(spec.source, "npm:bar");
5280 let filter = spec.filter.unwrap();
5281 assert_eq!(
5282 filter.extensions,
5283 Some(vec!["a.js".to_string(), "b.js".to_string()])
5284 );
5285 assert_eq!(filter.skills, Some(vec!["my-skill".to_string()]));
5286 assert!(filter.prompts.is_none());
5287 assert!(filter.themes.is_none());
5288 }
5289
5290 #[test]
5291 fn extract_package_spec_from_object_missing_source() {
5292 let val = json!({"extensions": ["a.js"]});
5293 assert!(extract_package_spec(&val).is_none());
5294 }
5295
5296 #[test]
5297 fn extract_package_spec_from_non_string_non_object() {
5298 assert!(extract_package_spec(&json!(42)).is_none());
5299 assert!(extract_package_spec(&json!(null)).is_none());
5300 assert!(extract_package_spec(&json!(true)).is_none());
5301 }
5302
5303 #[test]
5304 fn extract_package_spec_rejects_blank_sources() {
5305 assert!(extract_package_spec(&json!(" ")).is_none());
5306 assert!(extract_package_spec(&json!({"source": " \t "})).is_none());
5307 }
5308
5309 #[test]
5310 fn extract_filter_field_absent_key() {
5311 let obj = serde_json::Map::new();
5312 assert!(extract_filter_field(&obj, "extensions").is_none());
5313 }
5314
5315 #[test]
5316 fn extract_filter_field_string_value() {
5317 let mut obj = serde_json::Map::new();
5318 obj.insert("skills".to_string(), json!("my-skill"));
5319 let result = extract_filter_field(&obj, "skills");
5320 assert_eq!(result, Some(vec!["my-skill".to_string()]));
5321 }
5322
5323 #[test]
5324 fn extract_filter_field_array_value() {
5325 let mut obj = serde_json::Map::new();
5326 obj.insert("themes".to_string(), json!(["dark", "light"]));
5327 let result = extract_filter_field(&obj, "themes");
5328 assert_eq!(result, Some(vec!["dark".to_string(), "light".to_string()]));
5329 }
5330
5331 #[test]
5332 fn extract_filter_field_non_string_array_or_null() {
5333 let mut obj = serde_json::Map::new();
5334 obj.insert("prompts".to_string(), json!(42));
5335 let result = extract_filter_field(&obj, "prompts");
5336 assert_eq!(result, Some(Vec::<String>::new()));
5337 }
5338
5339 #[test]
5344 fn is_enabled_by_overrides_no_overrides_enables_all() {
5345 let path = Path::new("/base/extensions/foo.js");
5346 let patterns: Vec<String> = vec!["extensions/foo.js".to_string()];
5347 assert!(is_enabled_by_overrides(path, &patterns, Path::new("/base")));
5348 }
5349
5350 #[test]
5351 fn is_enabled_by_overrides_exclude_disables() {
5352 let path = Path::new("/base/extensions/foo.js");
5353 let patterns = vec!["!*.js".to_string()];
5354 assert!(!is_enabled_by_overrides(
5355 path,
5356 &patterns,
5357 Path::new("/base")
5358 ));
5359 }
5360
5361 #[test]
5362 fn is_enabled_by_overrides_force_include_overrides_exclude() {
5363 let path = Path::new("/base/extensions/foo.js");
5364 let patterns = vec!["!*.js".to_string(), "+extensions/foo.js".to_string()];
5365 assert!(is_enabled_by_overrides(path, &patterns, Path::new("/base")));
5366 }
5367
5368 #[test]
5369 fn is_enabled_by_overrides_force_exclude_overrides_force_include() {
5370 let path = Path::new("/base/extensions/foo.js");
5371 let patterns = vec![
5372 "+extensions/foo.js".to_string(),
5373 "-extensions/foo.js".to_string(),
5374 ];
5375 assert!(!is_enabled_by_overrides(
5376 path,
5377 &patterns,
5378 Path::new("/base")
5379 ));
5380 }
5381
5382 #[test]
5387 fn apply_patterns_include_glob() {
5388 let base = Path::new("/base");
5389 let paths = vec![
5390 PathBuf::from("/base/a.js"),
5391 PathBuf::from("/base/b.ts"),
5392 PathBuf::from("/base/c.js"),
5393 ];
5394 let patterns = vec!["*.js".to_string()];
5395 let result = apply_patterns(&paths, &patterns, base);
5396 assert!(result.contains(&PathBuf::from("/base/a.js")));
5397 assert!(result.contains(&PathBuf::from("/base/c.js")));
5398 assert!(!result.contains(&PathBuf::from("/base/b.ts")));
5399 }
5400
5401 #[test]
5402 fn apply_patterns_exclude_removes_from_includes() {
5403 let base = Path::new("/base");
5404 let paths = vec![
5405 PathBuf::from("/base/a.js"),
5406 PathBuf::from("/base/b.js"),
5407 PathBuf::from("/base/c.js"),
5408 ];
5409 let patterns = vec!["*.js".to_string(), "!b.js".to_string()];
5410 let result = apply_patterns(&paths, &patterns, base);
5411 assert!(result.contains(&PathBuf::from("/base/a.js")));
5412 assert!(!result.contains(&PathBuf::from("/base/b.js")));
5413 assert!(result.contains(&PathBuf::from("/base/c.js")));
5414 }
5415
5416 #[test]
5417 fn apply_patterns_no_patterns_returns_all() {
5418 let base = Path::new("/base");
5419 let paths = vec![PathBuf::from("/base/a.js"), PathBuf::from("/base/b.js")];
5420 let result = apply_patterns(&paths, &[], base);
5421 assert_eq!(result.len(), 2);
5422 }
5423
5424 #[test]
5425 fn apply_patterns_force_include_adds_excluded() {
5426 let base = Path::new("/base");
5427 let paths = vec![PathBuf::from("/base/a.js"), PathBuf::from("/base/b.js")];
5428 let patterns = vec!["a.js".to_string(), "+b.js".to_string()];
5429 let result = apply_patterns(&paths, &patterns, base);
5430 assert!(result.contains(&PathBuf::from("/base/a.js")));
5431 assert!(result.contains(&PathBuf::from("/base/b.js")));
5432 }
5433
5434 #[test]
5435 fn apply_patterns_force_exclude_removes_everything() {
5436 let base = Path::new("/base");
5437 let paths = vec![PathBuf::from("/base/a.js"), PathBuf::from("/base/b.js")];
5438 let patterns = vec!["-a.js".to_string()];
5439 let result = apply_patterns(&paths, &patterns, base);
5440 assert!(!result.contains(&PathBuf::from("/base/a.js")));
5441 assert!(result.contains(&PathBuf::from("/base/b.js")));
5442 }
5443
5444 #[test]
5449 fn normalize_source_empty_returns_none() {
5450 assert!(normalize_source("").is_none());
5451 assert!(normalize_source(" ").is_none());
5452 }
5453
5454 #[test]
5455 fn normalize_source_npm() {
5456 let result = normalize_source("npm:@scope/pkg@2.0.0").unwrap();
5457 assert_eq!(result.kind, NormalizedKind::Npm);
5458 assert_eq!(result.key, "@scope/pkg");
5459 }
5460
5461 #[test]
5462 fn normalize_source_git() {
5463 let result = normalize_source("git:github.com/user/repo@v1").unwrap();
5464 assert_eq!(result.kind, NormalizedKind::Git);
5465 assert_eq!(result.key, "github.com/user/repo");
5466 }
5467
5468 #[test]
5469 fn normalize_source_https_git_url() {
5470 let result = normalize_source("https://github.com/user/repo.git@v2").unwrap();
5471 assert_eq!(result.kind, NormalizedKind::Git);
5472 assert_eq!(result.key, "github.com/user/repo");
5473 }
5474
5475 #[test]
5476 fn normalize_source_git_https_with_userinfo_and_ref() {
5477 let result = normalize_source("git:https://token@github.com/user/repo.git@v2").unwrap();
5478 assert_eq!(result.kind, NormalizedKind::Git);
5479 assert_eq!(result.key, "github.com/user/repo");
5480 }
5481
5482 #[test]
5483 fn normalize_source_local() {
5484 let result = normalize_source("my-local-package").unwrap();
5485 assert_eq!(result.kind, NormalizedKind::Local);
5486 assert_eq!(result.key, "my-local-package");
5487 }
5488
5489 #[test]
5494 fn parse_npm_spec_empty() {
5495 assert_eq!(parse_npm_spec(""), (String::new(), None));
5496 }
5497
5498 #[test]
5499 fn parse_npm_spec_whitespace() {
5500 assert_eq!(parse_npm_spec(" foo "), ("foo".to_string(), None));
5501 }
5502
5503 #[test]
5504 fn parse_npm_spec_scoped_with_version() {
5505 let (name, version) = parse_npm_spec("@org/pkg@^3.0.0");
5506 assert_eq!(name, "@org/pkg");
5507 assert_eq!(version, Some("^3.0.0".to_string()));
5508 }
5509
5510 #[test]
5511 fn parse_npm_spec_trailing_at() {
5512 let (name, version) = parse_npm_spec("foo@");
5513 assert_eq!(name, "foo@");
5514 assert!(version.is_none());
5515 }
5516
5517 #[test]
5522 fn resource_list_deduplicates_by_path() {
5523 let mut list = ResourceList::default();
5524 let metadata = PathMetadata {
5525 source: "test".to_string(),
5526 scope: PackageScope::User,
5527 origin: ResourceOrigin::Package,
5528 base_dir: None,
5529 };
5530 list.add(PathBuf::from("/a"), &metadata, true);
5531 list.add(PathBuf::from("/a"), &metadata, true);
5532 list.add(PathBuf::from("/b"), &metadata, false);
5533 assert_eq!(list.items.len(), 2);
5534 assert_eq!(list.items[0].path, PathBuf::from("/a"));
5535 assert_eq!(list.items[1].path, PathBuf::from("/b"));
5536 }
5537
5538 #[test]
5543 fn resource_accumulator_sorts_by_path() {
5544 let mut acc = ResourceAccumulator::new();
5545 let metadata = PathMetadata {
5546 source: "test".to_string(),
5547 scope: PackageScope::User,
5548 origin: ResourceOrigin::Package,
5549 base_dir: None,
5550 };
5551 acc.extensions.add(PathBuf::from("/z/ext"), &metadata, true);
5552 acc.extensions.add(PathBuf::from("/a/ext"), &metadata, true);
5553 acc.skills.add(PathBuf::from("/z/skill"), &metadata, true);
5554 acc.skills.add(PathBuf::from("/a/skill"), &metadata, true);
5555
5556 let resolved = acc.into_resolved_paths();
5557 assert_eq!(resolved.extensions[0].path, PathBuf::from("/a/ext"));
5558 assert_eq!(resolved.extensions[1].path, PathBuf::from("/z/ext"));
5559 assert_eq!(resolved.skills[0].path, PathBuf::from("/a/skill"));
5560 assert_eq!(resolved.skills[1].path, PathBuf::from("/z/skill"));
5561 }
5562
5563 #[test]
5568 fn settings_snapshot_entries_for_returns_correct_type() {
5569 let snapshot = SettingsSnapshot {
5570 packages: vec![],
5571 extensions: vec!["ext".to_string()],
5572 skills: vec!["skill".to_string()],
5573 prompts: vec!["prompt".to_string()],
5574 themes: vec!["theme".to_string()],
5575 };
5576 assert_eq!(snapshot.entries_for(ResourceType::Extensions), &["ext"]);
5577 assert_eq!(snapshot.entries_for(ResourceType::Skills), &["skill"]);
5578 assert_eq!(snapshot.entries_for(ResourceType::Prompts), &["prompt"]);
5579 assert_eq!(snapshot.entries_for(ResourceType::Themes), &["theme"]);
5580 }
5581
5582 #[test]
5587 fn read_settings_json_missing_file_returns_empty_object() {
5588 let result = read_settings_json(Path::new("/nonexistent/path/settings.json"));
5589 assert!(result.is_ok());
5590 assert_eq!(result.unwrap(), json!({}));
5591 }
5592
5593 #[test]
5594 fn read_settings_json_valid_file() {
5595 let dir = tempfile::tempdir().expect("tempdir");
5596 let path = dir.path().join("settings.json");
5597 fs::write(&path, r#"{"foo": "bar"}"#).expect("write");
5598 let result = read_settings_json(&path).expect("read");
5599 assert_eq!(result, json!({"foo": "bar"}));
5600 }
5601
5602 #[test]
5603 fn read_settings_json_invalid_json() {
5604 let dir = tempfile::tempdir().expect("tempdir");
5605 let path = dir.path().join("settings.json");
5606 fs::write(&path, "not json").expect("write");
5607 assert!(read_settings_json(&path).is_err());
5608 }
5609
5610 #[test]
5611 fn read_settings_snapshot_with_packages_and_entries() {
5612 let dir = tempfile::tempdir().expect("tempdir");
5613 let path = dir.path().join("settings.json");
5614 let settings = json!({
5615 "packages": ["npm:foo", {"source": "npm:bar", "extensions": ["a.js"]}],
5616 "extensions": ["ext1.js"],
5617 "skills": "my-skill",
5618 "themes": ["dark.json", "light.json"]
5619 });
5620 fs::write(&path, serde_json::to_string(&settings).unwrap()).expect("write");
5621 let snapshot = read_settings_snapshot(&path).expect("read");
5622 assert_eq!(snapshot.packages.len(), 2);
5623 assert_eq!(snapshot.packages[0].source, "npm:foo");
5624 assert_eq!(snapshot.packages[1].source, "npm:bar");
5625 assert!(snapshot.packages[1].filter.is_some());
5626 assert_eq!(snapshot.extensions, vec!["ext1.js"]);
5627 assert_eq!(snapshot.skills, vec!["my-skill"]);
5628 assert_eq!(snapshot.themes, vec!["dark.json", "light.json"]);
5629 assert!(snapshot.prompts.is_empty());
5630 }
5631
5632 #[test]
5637 fn write_settings_json_atomic_creates_file() {
5638 let dir = tempfile::tempdir().expect("tempdir");
5639 let path = dir.path().join("sub/settings.json");
5640 let value = json!({"test": true});
5641 write_settings_json_atomic(&path, &value).expect("write");
5642 let content = fs::read_to_string(&path).expect("read");
5643 let parsed: Value = serde_json::from_str(&content).expect("parse");
5644 assert_eq!(parsed, json!({"test": true}));
5645 }
5646
5647 #[test]
5648 fn update_package_sources_add_and_remove() {
5649 let dir = tempfile::tempdir().expect("tempdir");
5650 let path = dir.path().join("settings.json");
5651 fs::write(&path, "{}").expect("write initial");
5652
5653 update_package_sources(&path, "npm:foo", UpdateAction::Add, dir.path()).expect("add");
5654 let settings = read_settings_json(&path).expect("read");
5655 let packages = settings["packages"].as_array().expect("packages array");
5656 assert_eq!(packages.len(), 1);
5657 assert_eq!(packages[0], json!("npm:foo"));
5658
5659 update_package_sources(&path, "npm:foo@2.0", UpdateAction::Add, dir.path())
5661 .expect("add again");
5662 let settings = read_settings_json(&path).expect("read");
5663 let packages = settings["packages"].as_array().expect("packages array");
5664 assert_eq!(packages.len(), 1, "duplicate source should not be added");
5665
5666 update_package_sources(&path, "npm:foo", UpdateAction::Remove, dir.path()).expect("remove");
5667 let settings = read_settings_json(&path).expect("read");
5668 let packages = settings["packages"].as_array().expect("packages array");
5669 assert!(packages.is_empty());
5670 }
5671
5672 #[test]
5673 fn update_package_sources_normalizes_non_array_packages() {
5674 let dir = tempfile::tempdir().expect("tempdir");
5675 let path = dir.path().join("settings.json");
5676 let malformed = json!({
5677 "packages": { "source": "npm:legacy" }
5678 });
5679 fs::write(
5680 &path,
5681 serde_json::to_string(&malformed).expect("serialize malformed settings"),
5682 )
5683 .expect("write malformed settings");
5684
5685 update_package_sources(&path, "npm:foo", UpdateAction::Add, dir.path()).expect("add");
5686 let settings = read_settings_json(&path).expect("read");
5687 let packages = settings["packages"].as_array().expect("packages array");
5688 assert_eq!(packages, &vec![json!("npm:foo")]);
5689 }
5690
5691 #[test]
5692 fn update_package_sources_normalizes_non_object_root() {
5693 let dir = tempfile::tempdir().expect("tempdir");
5694 let path = dir.path().join("settings.json");
5695 fs::write(&path, "[]").expect("write malformed root");
5696
5697 update_package_sources(&path, "npm:foo", UpdateAction::Add, dir.path()).expect("add");
5698 let settings = read_settings_json(&path).expect("read");
5699 let packages = settings["packages"].as_array().expect("packages array");
5700 assert_eq!(packages, &vec![json!("npm:foo")]);
5701 }
5702
5703 #[test]
5704 fn update_package_sources_rejects_empty_source() {
5705 let dir = tempfile::tempdir().expect("tempdir");
5706 let path = dir.path().join("settings.json");
5707 fs::write(&path, "{}").expect("write initial");
5708
5709 let err = update_package_sources(&path, " ", UpdateAction::Add, dir.path())
5710 .expect_err("must fail");
5711 assert!(
5712 err.to_string()
5713 .contains("settings package source cannot be empty"),
5714 "unexpected error: {err}"
5715 );
5716
5717 let settings = read_settings_json(&path).expect("read");
5718 assert!(
5719 settings.get("packages").is_none(),
5720 "failed update must not mutate packages"
5721 );
5722 }
5723
5724 #[test]
5725 fn update_package_sources_deduplicates_equivalent_local_paths() {
5726 let dir = tempfile::tempdir().expect("tempdir");
5727 let path = dir.path().join("settings.json");
5728 fs::write(&path, "{}").expect("write initial");
5729
5730 update_package_sources(&path, "./foo/../bar", UpdateAction::Add, dir.path()).expect("add");
5731 update_package_sources(&path, "./bar", UpdateAction::Add, dir.path()).expect("add again");
5732
5733 let settings = read_settings_json(&path).expect("read");
5734 let packages = settings["packages"].as_array().expect("packages array");
5735 assert_eq!(
5736 packages.len(),
5737 1,
5738 "equivalent local paths should deduplicate"
5739 );
5740 assert_eq!(packages[0], json!("./foo/../bar"));
5741 }
5742
5743 #[test]
5744 fn update_package_sources_remove_matches_equivalent_local_paths() {
5745 let dir = tempfile::tempdir().expect("tempdir");
5746 let path = dir.path().join("settings.json");
5747 fs::write(&path, "{}").expect("write initial");
5748
5749 update_package_sources(&path, "./foo/../bar", UpdateAction::Add, dir.path()).expect("add");
5750 update_package_sources(&path, "./bar", UpdateAction::Remove, dir.path()).expect("remove");
5751
5752 let settings = read_settings_json(&path).expect("read");
5753 let packages = settings["packages"].as_array().expect("packages array");
5754 assert!(packages.is_empty(), "equivalent local paths should remove");
5755 }
5756
5757 #[test]
5762 fn list_packages_in_settings_reads_all_formats() {
5763 let dir = tempfile::tempdir().expect("tempdir");
5764 let path = dir.path().join("settings.json");
5765 let settings = json!({
5766 "packages": [
5767 "npm:foo",
5768 {"source": "git:github.com/user/repo", "extensions": ["a.js"]}
5769 ]
5770 });
5771 fs::write(&path, serde_json::to_string(&settings).unwrap()).expect("write");
5772 let packages = list_packages_in_settings(&path).expect("list");
5773 assert_eq!(packages.len(), 2);
5774 assert_eq!(packages[0].source, "npm:foo");
5775 assert!(packages[0].filter.is_none());
5776 assert_eq!(packages[1].source, "git:github.com/user/repo");
5777 assert!(packages[1].filter.is_some());
5778 }
5779
5780 #[test]
5785 fn read_pi_manifest_with_pi_field() {
5786 let dir = tempfile::tempdir().expect("tempdir");
5787 let manifest = json!({
5788 "name": "test-pkg",
5789 "version": "1.0.0",
5790 "pi": {
5791 "extensions": ["ext/a.js", "ext/b.js"],
5792 "skills": ["skills/foo.md"]
5793 }
5794 });
5795 fs::write(
5796 dir.path().join("package.json"),
5797 serde_json::to_string(&manifest).unwrap(),
5798 )
5799 .expect("write");
5800 let result = read_pi_manifest(dir.path()).expect("read manifest");
5801 assert!(result.is_some());
5802 let result = result.unwrap();
5803 assert_eq!(
5804 result.extensions,
5805 Some(vec!["ext/a.js".to_string(), "ext/b.js".to_string()])
5806 );
5807 assert_eq!(result.skills, Some(vec!["skills/foo.md".to_string()]));
5808 assert!(result.prompts.is_none());
5809 assert!(result.themes.is_none());
5810 }
5811
5812 #[test]
5813 fn read_pi_manifest_accepts_single_string_resource_entries() {
5814 let dir = tempfile::tempdir().expect("tempdir");
5815 let manifest = json!({
5816 "name": "test-pkg",
5817 "version": "1.0.0",
5818 "pi": {
5819 "extensions": "ext/a.js",
5820 "skills": "skills/foo.md",
5821 "prompts": "prompts/intro.md",
5822 "themes": "themes/default.json"
5823 }
5824 });
5825 fs::write(
5826 dir.path().join("package.json"),
5827 serde_json::to_string(&manifest).unwrap(),
5828 )
5829 .expect("write");
5830
5831 let result = read_pi_manifest(dir.path()).expect("read manifest");
5832 assert!(result.is_some());
5833 let result = result.unwrap();
5834 assert_eq!(result.extensions, Some(vec!["ext/a.js".to_string()]));
5835 assert_eq!(result.skills, Some(vec!["skills/foo.md".to_string()]));
5836 assert_eq!(result.prompts, Some(vec!["prompts/intro.md".to_string()]));
5837 assert_eq!(result.themes, Some(vec!["themes/default.json".to_string()]));
5838 }
5839
5840 #[test]
5841 fn read_pi_manifest_no_pi_field() {
5842 let dir = tempfile::tempdir().expect("tempdir");
5843 fs::write(
5844 dir.path().join("package.json"),
5845 r#"{"name": "test", "version": "1.0.0"}"#,
5846 )
5847 .expect("write");
5848 assert!(
5849 read_pi_manifest(dir.path())
5850 .expect("read manifest")
5851 .is_none()
5852 );
5853 }
5854
5855 #[test]
5856 fn read_pi_manifest_no_package_json() {
5857 let dir = tempfile::tempdir().expect("tempdir");
5858 assert!(
5859 read_pi_manifest(dir.path())
5860 .expect("read manifest")
5861 .is_none()
5862 );
5863 }
5864
5865 #[test]
5866 fn read_pi_manifest_errors_on_malformed_package_json() {
5867 let dir = tempfile::tempdir().expect("tempdir");
5868 let manifest_path = dir.path().join("package.json");
5869 fs::write(&manifest_path, "{ not valid json").expect("write malformed package.json");
5870
5871 let err = read_pi_manifest(dir.path()).expect_err("malformed package.json must error");
5872 let message = err.to_string();
5873 assert!(message.contains("Failed to parse package manifest"));
5874 assert!(message.contains(&manifest_path.display().to_string()));
5875 }
5876
5877 #[test]
5878 fn read_pi_manifest_errors_when_pi_field_is_not_object() {
5879 let dir = tempfile::tempdir().expect("tempdir");
5880 let manifest_path = dir.path().join("package.json");
5881 fs::write(&manifest_path, r#"{"name":"pkg","pi":"not-an-object"}"#)
5882 .expect("write invalid pi manifest");
5883
5884 let err = read_pi_manifest(dir.path()).expect_err("non-object `pi` field must error");
5885 let message = err.to_string();
5886 assert!(message.contains("Invalid package manifest"));
5887 assert!(message.contains("`pi` must be an object"));
5888 assert!(message.contains(&manifest_path.display().to_string()));
5889 }
5890
5891 #[test]
5892 fn read_pi_manifest_errors_when_resource_entries_are_not_string_lists() {
5893 let dir = tempfile::tempdir().expect("tempdir");
5894 let manifest_path = dir.path().join("package.json");
5895 fs::write(
5896 &manifest_path,
5897 r#"{"name":"pkg","pi":{"extensions":["ok",7]}}"#,
5898 )
5899 .expect("write invalid pi manifest");
5900
5901 let err = read_pi_manifest(dir.path()).expect_err("non-string manifest entries must error");
5902 let message = err.to_string();
5903 assert!(message.contains("Invalid package manifest"));
5904 assert!(message.contains("`pi.extensions` must be a string or array of strings"));
5905 assert!(message.contains(&manifest_path.display().to_string()));
5906 }
5907
5908 #[test]
5909 fn read_pi_manifest_errors_when_resource_entry_escapes_package_root() {
5910 let dir = tempfile::tempdir().expect("tempdir");
5911 let manifest_path = dir.path().join("package.json");
5912 fs::write(
5913 &manifest_path,
5914 r#"{"name":"pkg","pi":{"extensions":["../outside/index.ts"]}}"#,
5915 )
5916 .expect("write invalid pi manifest");
5917
5918 let err =
5919 read_pi_manifest(dir.path()).expect_err("outside-root manifest entries must error");
5920 let message = err.to_string();
5921 assert!(message.contains("Invalid package manifest"));
5922 assert!(message.contains("`pi.extensions` paths must stay within the package root"));
5923 assert!(message.contains(&manifest_path.display().to_string()));
5924 }
5925
5926 #[test]
5927 fn read_pi_manifest_errors_when_resource_pattern_escapes_package_root() {
5928 let dir = tempfile::tempdir().expect("tempdir");
5929 let manifest_path = dir.path().join("package.json");
5930 fs::write(
5931 &manifest_path,
5932 r#"{"name":"pkg","pi":{"extensions":["extensions","+../outside/index.ts"]}}"#,
5933 )
5934 .expect("write invalid pi manifest");
5935
5936 let err =
5937 read_pi_manifest(dir.path()).expect_err("outside-root override patterns must error");
5938 let message = err.to_string();
5939 assert!(message.contains("Invalid package manifest"));
5940 assert!(message.contains("`pi.extensions` paths must stay within the package root"));
5941 assert!(message.contains(&manifest_path.display().to_string()));
5942 }
5943
5944 #[test]
5949 fn temporary_dir_stable_hash() {
5950 let a = temporary_dir("npm", None);
5951 let b = temporary_dir("npm", None);
5952 assert_eq!(a, b, "same inputs should produce same path");
5953
5954 let c = temporary_dir("npm", Some("foo"));
5955 assert_ne!(a, c, "different suffix should produce different path");
5956 }
5957
5958 #[test]
5959 fn temporary_dir_includes_prefix() {
5960 let result = temporary_dir("git-github.com", Some("user/repo"));
5961 let path_str = result.to_string_lossy();
5962 assert!(path_str.contains("pi-extensions"));
5963 assert!(path_str.contains("git-github.com"));
5964 }
5965
5966 #[test]
5967 fn npm_prefix_root_matches_scope() {
5968 let dir = tempfile::tempdir().expect("tempdir");
5969 let manager = PackageManager::new(dir.path().to_path_buf());
5970
5971 assert_eq!(
5972 manager.npm_prefix_root(PackageScope::Project),
5973 Some(dir.path().join(Config::project_dir()).join("npm"))
5974 );
5975 assert_eq!(
5976 manager.npm_prefix_root(PackageScope::Temporary),
5977 Some(temporary_dir("npm", None))
5978 );
5979 assert_eq!(manager.npm_prefix_root(PackageScope::User), None);
5980 }
5981
5982 #[test]
5987 fn compat_scan_enabled_recognizes_truthy_values() {
5988 let truthy = ["1", "true", "yes", "on", "TRUE", "Yes", "ON"];
5990 for val in truthy {
5991 let lower = val.trim().to_ascii_lowercase();
5992 assert!(
5993 matches!(lower.as_str(), "1" | "true" | "yes" | "on"),
5994 "{val} should be truthy"
5995 );
5996 }
5997 let falsy = ["0", "false", "no", "off", "", "random"];
5998 for val in falsy {
5999 let lower = val.trim().to_ascii_lowercase();
6000 assert!(
6001 !matches!(lower.as_str(), "1" | "true" | "yes" | "on"),
6002 "{val} should be falsy"
6003 );
6004 }
6005 }
6006
6007 #[test]
6012 fn parse_source_npm_prefix() {
6013 let dir = tempfile::tempdir().expect("tempdir");
6014 match parse_source("npm:@scope/pkg@1.0", dir.path()) {
6015 ParsedSource::Npm { spec, name, pinned } => {
6016 assert_eq!(spec, "@scope/pkg@1.0");
6017 assert_eq!(name, "@scope/pkg");
6018 assert!(pinned);
6019 }
6020 other => panic!("Unexpected parsed source: {:?}", other),
6021 }
6022 }
6023
6024 #[test]
6025 fn parse_source_npm_unpinned() {
6026 let dir = tempfile::tempdir().expect("tempdir");
6027 match parse_source("npm:express", dir.path()) {
6028 ParsedSource::Npm { pinned, .. } => {
6029 assert!(!pinned);
6030 }
6031 other => panic!("Unexpected parsed source: {:?}", other),
6032 }
6033 }
6034
6035 #[test]
6036 fn parse_source_git_prefix() {
6037 let dir = tempfile::tempdir().expect("tempdir");
6038 match parse_source("git:github.com/user/repo@v2", dir.path()) {
6039 ParsedSource::Git {
6040 clone_source,
6041 repo,
6042 host,
6043 path,
6044 r#ref,
6045 pinned,
6046 } => {
6047 assert_eq!(clone_source, "github.com/user/repo");
6048 assert_eq!(repo, "github.com/user/repo");
6049 assert_eq!(host, "github.com");
6050 assert_eq!(path, "user/repo");
6051 assert_eq!(r#ref, Some("v2".to_string()));
6052 assert!(pinned);
6053 }
6054 other => panic!("Unexpected parsed source: {:?}", other),
6055 }
6056 }
6057
6058 #[test]
6059 fn parse_source_https_github_url() {
6060 let dir = tempfile::tempdir().expect("tempdir");
6061 match parse_source("https://github.com/user/repo.git", dir.path()) {
6062 ParsedSource::Git { repo, host, .. } => {
6063 assert_eq!(repo, "github.com/user/repo");
6064 assert_eq!(host, "github.com");
6065 }
6066 other => panic!(),
6067 }
6068 }
6069
6070 #[test]
6071 fn parse_source_https_github_url_with_userinfo_and_ref() {
6072 let dir = tempfile::tempdir().expect("tempdir");
6073 match parse_source(
6074 "git:https://token@github.com/user/repo.git@main",
6075 dir.path(),
6076 ) {
6077 ParsedSource::Git {
6078 clone_source,
6079 repo,
6080 host,
6081 path,
6082 r#ref,
6083 pinned,
6084 } => {
6085 assert_eq!(clone_source, "https://token@github.com/user/repo.git");
6086 assert_eq!(repo, "github.com/user/repo");
6087 assert_eq!(host, "github.com");
6088 assert_eq!(path, "user/repo");
6089 assert_eq!(r#ref, Some("main".to_string()));
6090 assert!(pinned);
6091 }
6092 other => panic!("Unexpected parsed source: {:?}", other),
6093 }
6094 }
6095
6096 #[test]
6097 fn parse_source_local_relative() {
6098 let dir = tempfile::tempdir().expect("tempdir");
6099 match parse_source("./my-ext", dir.path()) {
6100 ParsedSource::Local { path } => {
6101 assert_eq!(path, dir.path().join("my-ext"));
6102 }
6103 other => panic!(),
6104 }
6105 }
6106
6107 #[test]
6108 #[cfg(unix)]
6109 fn parse_source_local_absolute() {
6110 let dir = tempfile::tempdir().expect("tempdir");
6111 match parse_source("/abs/my-ext", dir.path()) {
6112 ParsedSource::Local { path } => {
6113 assert_eq!(path, PathBuf::from("/abs/my-ext"));
6114 }
6115 other => panic!(),
6116 }
6117 }
6118
6119 #[test]
6120 fn parse_source_windows_drive_absolute_stays_absolute() {
6121 let dir = tempfile::tempdir().expect("tempdir");
6122 match parse_source("C:\\packages\\demo", dir.path()) {
6123 ParsedSource::Local { path } => {
6124 assert_eq!(path, PathBuf::from("C:\\packages\\demo"));
6125 }
6126 other => panic!("Unexpected parsed source: {other:?}"),
6127 }
6128
6129 match parse_source("C:/packages/demo", dir.path()) {
6130 ParsedSource::Local { path } => {
6131 assert_eq!(path, PathBuf::from("C:/packages/demo"));
6132 }
6133 other => panic!("Unexpected parsed source: {other:?}"),
6134 }
6135 }
6136
6137 #[test]
6138 fn parse_git_source_windows_drive_path_treated_as_local_repo() {
6139 let dir = tempfile::tempdir().expect("tempdir");
6140 match parse_source("git:C:\\packages\\demo@main", dir.path()) {
6141 ParsedSource::Git {
6142 clone_source,
6143 repo,
6144 host,
6145 path,
6146 r#ref,
6147 pinned,
6148 } => {
6149 assert_eq!(clone_source, "C:\\packages\\demo");
6150 assert_eq!(repo, "C:\\packages\\demo");
6151 assert_eq!(host, "local");
6152 assert_eq!(path.len(), 16);
6153 assert_eq!(r#ref, Some("main".to_string()));
6154 assert!(pinned);
6155 }
6156 other => panic!("Unexpected parsed source: {other:?}"),
6157 }
6158 }
6159
6160 #[test]
6161 fn local_path_from_file_url_keeps_windows_drive_root() {
6162 let dir = tempfile::tempdir().expect("tempdir");
6163 assert_eq!(
6164 local_path_from_spec("file:///C:/packages/demo", dir.path()),
6165 PathBuf::from("C:/packages/demo")
6166 );
6167 assert_eq!(
6168 local_path_from_spec("file:///C:\\packages\\demo", dir.path()),
6169 PathBuf::from("C:\\packages\\demo")
6170 );
6171 assert_eq!(
6172 local_path_from_spec("file://C:/packages/demo", dir.path()),
6173 PathBuf::from("C:/packages/demo")
6174 );
6175 assert_eq!(
6176 local_path_from_spec("file://C:\\packages\\demo", dir.path()),
6177 PathBuf::from("C:\\packages\\demo")
6178 );
6179 }
6180
6181 #[test]
6182 fn local_path_from_file_url_localhost_keeps_absolute_root() {
6183 let cwd = Path::new("/home/user/project");
6184 assert_eq!(
6185 local_path_from_spec("file://localhost/tmp/repo", cwd),
6186 PathBuf::from("/tmp/repo")
6187 );
6188 assert_eq!(
6189 local_path_from_spec("file://localhost", cwd),
6190 PathBuf::from("/")
6191 );
6192 assert_eq!(
6193 local_path_from_spec("file://localhost/C:/packages/demo", cwd),
6194 PathBuf::from("C:/packages/demo")
6195 );
6196 assert_eq!(
6197 local_path_from_spec("file://localhost/C:\\packages\\demo", cwd),
6198 PathBuf::from("C:\\packages\\demo")
6199 );
6200 }
6201
6202 #[test]
6203 fn local_path_from_file_url_network_share_keeps_network_root() {
6204 let cwd = Path::new("/home/user/project");
6205 assert_eq!(
6206 local_path_from_spec("file://server/share/repo", cwd),
6207 PathBuf::from("//server/share/repo")
6208 );
6209 }
6210
6211 #[test]
6216 fn parse_git_source_local_path_hashes_deterministically() {
6217 let dir = tempfile::tempdir().expect("tempdir");
6218 let result1 = parse_git_source("./local-repo", dir.path());
6219 let result2 = parse_git_source("./local-repo", dir.path());
6220 match (&result1, &result2) {
6221 (
6222 ParsedSource::Git {
6223 clone_source: clone_a,
6224 path: p1,
6225 ..
6226 },
6227 ParsedSource::Git {
6228 clone_source: clone_b,
6229 path: p2,
6230 ..
6231 },
6232 ) => {
6233 assert_eq!(clone_a, &dir.path().join("local-repo").to_string_lossy());
6234 assert_eq!(clone_b, &dir.path().join("local-repo").to_string_lossy());
6235 assert_eq!(p1, p2, "same local source should produce same hash");
6236 }
6237 _ => panic!(),
6238 }
6239 }
6240
6241 #[test]
6246 fn dedupe_packages_project_wins_over_user() {
6247 let dir = tempfile::tempdir().expect("tempdir");
6248 let manager = PackageManager::new(dir.path().to_path_buf());
6249
6250 let packages = vec![
6251 ScopedPackage {
6252 pkg: PackageSpec {
6253 source: "npm:foo@1.0".to_string(),
6254 filter: None,
6255 },
6256 scope: PackageScope::User,
6257 },
6258 ScopedPackage {
6259 pkg: PackageSpec {
6260 source: "npm:foo@2.0".to_string(),
6261 filter: None,
6262 },
6263 scope: PackageScope::Project,
6264 },
6265 ];
6266
6267 let deduped = manager.dedupe_packages(packages);
6268 assert_eq!(deduped.len(), 1);
6269 assert_eq!(deduped[0].scope, PackageScope::Project);
6270 assert_eq!(deduped[0].pkg.source, "npm:foo@2.0");
6271 }
6272
6273 #[test]
6274 fn dedupe_packages_user_does_not_override_project() {
6275 let dir = tempfile::tempdir().expect("tempdir");
6276 let manager = PackageManager::new(dir.path().to_path_buf());
6277
6278 let packages = vec![
6279 ScopedPackage {
6280 pkg: PackageSpec {
6281 source: "npm:bar@1.0".to_string(),
6282 filter: None,
6283 },
6284 scope: PackageScope::Project,
6285 },
6286 ScopedPackage {
6287 pkg: PackageSpec {
6288 source: "npm:bar@2.0".to_string(),
6289 filter: None,
6290 },
6291 scope: PackageScope::User,
6292 },
6293 ];
6294
6295 let deduped = manager.dedupe_packages(packages);
6296 assert_eq!(deduped.len(), 1);
6297 assert_eq!(deduped[0].scope, PackageScope::Project);
6298 assert_eq!(deduped[0].pkg.source, "npm:bar@1.0");
6299 }
6300
6301 #[test]
6302 fn dedupe_packages_skips_blank_sources() {
6303 let dir = tempfile::tempdir().expect("tempdir");
6304 let manager = PackageManager::new(dir.path().to_path_buf());
6305
6306 let packages = vec![
6307 ScopedPackage {
6308 pkg: PackageSpec {
6309 source: " ".to_string(),
6310 filter: None,
6311 },
6312 scope: PackageScope::User,
6313 },
6314 ScopedPackage {
6315 pkg: PackageSpec {
6316 source: "npm:bar@1.0".to_string(),
6317 filter: None,
6318 },
6319 scope: PackageScope::Project,
6320 },
6321 ];
6322
6323 let deduped = manager.dedupe_packages(packages);
6324 assert_eq!(deduped.len(), 1);
6325 assert_eq!(deduped[0].pkg.source, "npm:bar@1.0");
6326 }
6327
6328 #[test]
6329 fn dedupe_packages_different_names_preserved() {
6330 let dir = tempfile::tempdir().expect("tempdir");
6331 let manager = PackageManager::new(dir.path().to_path_buf());
6332
6333 let packages = vec![
6334 ScopedPackage {
6335 pkg: PackageSpec {
6336 source: "npm:foo".to_string(),
6337 filter: None,
6338 },
6339 scope: PackageScope::User,
6340 },
6341 ScopedPackage {
6342 pkg: PackageSpec {
6343 source: "npm:bar".to_string(),
6344 filter: None,
6345 },
6346 scope: PackageScope::User,
6347 },
6348 ];
6349
6350 let deduped = manager.dedupe_packages(packages);
6351 assert_eq!(deduped.len(), 2);
6352 }
6353
6354 #[test]
6355 fn ensure_packages_installed_propagates_install_errors() {
6356 run_async(async {
6357 let dir = tempfile::tempdir().expect("tempdir");
6358 let manager = PackageManager::new(dir.path().to_path_buf());
6359 let missing_local_package = dir.path().join("missing-package");
6360
6361 let err = manager
6362 .ensure_package_entries_installed(vec![PackageEntry {
6363 scope: PackageScope::Project,
6364 source: missing_local_package.to_string_lossy().into_owned(),
6365 filter: None,
6366 }])
6367 .await
6368 .expect_err("missing package install should fail");
6369
6370 assert!(
6371 err.to_string().contains("does not exist"),
6372 "unexpected error: {err}"
6373 );
6374 });
6375 }
6376
6377 #[test]
6382 fn collect_auto_prompt_entries_finds_md_files() {
6383 let dir = tempfile::tempdir().expect("tempdir");
6384 let prompts_dir = dir.path().join("prompts");
6385 fs::create_dir_all(&prompts_dir).expect("create dir");
6386 fs::write(prompts_dir.join("hello.md"), "# Hello").expect("write");
6387 fs::write(prompts_dir.join("world.md"), "# World").expect("write");
6388 fs::write(prompts_dir.join("notmd.txt"), "text").expect("write");
6389 fs::write(prompts_dir.join(".hidden.md"), "hidden").expect("write");
6390
6391 let entries = collect_auto_prompt_entries(&prompts_dir);
6392 assert_eq!(entries.len(), 2);
6393 assert!(entries.iter().all(|p| p.extension().unwrap() == "md"));
6394 }
6395
6396 #[test]
6397 fn collect_auto_prompt_entries_nonexistent_dir() {
6398 let entries = collect_auto_prompt_entries(Path::new("/nonexistent"));
6399 assert!(entries.is_empty());
6400 }
6401
6402 #[test]
6403 fn collect_auto_theme_entries_finds_json_files() {
6404 let dir = tempfile::tempdir().expect("tempdir");
6405 let themes_dir = dir.path().join("themes");
6406 fs::create_dir_all(&themes_dir).expect("create dir");
6407 fs::write(themes_dir.join("dark.json"), "{}").expect("write");
6408 fs::write(themes_dir.join("light.json"), "{}").expect("write");
6409 fs::write(themes_dir.join("readme.md"), "text").expect("write");
6410
6411 let entries = collect_auto_theme_entries(&themes_dir);
6412 assert_eq!(entries.len(), 2);
6413 assert!(entries.iter().all(|p| p.extension().unwrap() == "json"));
6414 }
6415
6416 #[test]
6421 fn collect_auto_extension_entries_finds_native_descriptors() {
6422 let dir = tempfile::tempdir().expect("tempdir");
6423 let ext_dir = dir.path().join("extensions");
6424 fs::create_dir_all(&ext_dir).expect("create dir");
6425 fs::write(ext_dir.join("a.native.json"), "{}").expect("write");
6426 fs::write(ext_dir.join("b.native.json"), "{}").expect("write");
6427 fs::write(ext_dir.join("c.md"), "c").expect("write");
6428
6429 let entries = collect_auto_extension_entries(&ext_dir);
6430 assert!(entries.len() >= 2);
6431 let has_a = entries
6432 .iter()
6433 .any(|p| p.file_name().unwrap() == "a.native.json");
6434 let has_b = entries
6435 .iter()
6436 .any(|p| p.file_name().unwrap() == "b.native.json");
6437 let has_md = entries.iter().any(|p| p.file_name().unwrap() == "c.md");
6438 assert!(has_a, "should find native descriptor files");
6439 assert!(has_b, "should find native descriptor files");
6440 assert!(!has_md, "should not find .md files");
6441 }
6442
6443 #[test]
6444 fn collect_auto_extension_entries_finds_root_js_entry_file() {
6445 let dir = tempfile::tempdir().expect("tempdir");
6446 let ext_dir = dir.path().join("extensions");
6447 fs::create_dir_all(&ext_dir).expect("create dir");
6448 fs::write(ext_dir.join("my_extension.ts"), "export default {}").expect("write");
6449
6450 let entries = collect_auto_extension_entries(&ext_dir);
6451 assert!(
6452 entries
6453 .iter()
6454 .any(|p| p.file_name().unwrap() == "my_extension.ts")
6455 );
6456 }
6457
6458 #[test]
6459 fn collect_auto_extension_entries_deduplicates_index_entry() {
6460 let dir = tempfile::tempdir().expect("tempdir");
6461 let ext_dir = dir.path().join("extensions");
6462 fs::create_dir_all(&ext_dir).expect("create dir");
6463 fs::write(ext_dir.join("index.ts"), "export default {}").expect("write");
6464
6465 let entries = collect_auto_extension_entries(&ext_dir);
6466 let count = entries
6467 .iter()
6468 .filter(|p| p.file_name().unwrap() == "index.ts")
6469 .count();
6470 assert_eq!(count, 1, "index.ts should only be present once");
6471 }
6472
6473 #[test]
6474 fn collect_auto_extension_entries_fail_closed_on_malformed_root_package_manifest() {
6475 let dir = tempfile::tempdir().expect("tempdir");
6476 let ext_dir = dir.path().join("extensions");
6477 fs::create_dir_all(&ext_dir).expect("create dir");
6478 fs::write(ext_dir.join("package.json"), "{ not valid json")
6479 .expect("write malformed package.json");
6480 fs::write(ext_dir.join("index.ts"), "export default {}").expect("write index.ts");
6481
6482 let entries = collect_auto_extension_entries(&ext_dir);
6483 assert!(
6484 entries.is_empty(),
6485 "malformed root package.json must not fall back to conventional entrypoints"
6486 );
6487 }
6488
6489 #[test]
6490 fn collect_auto_extension_entries_fail_closed_on_malformed_root_extension_manifest() {
6491 let dir = tempfile::tempdir().expect("tempdir");
6492 let ext_dir = dir.path().join("extensions");
6493 fs::create_dir_all(&ext_dir).expect("create dir");
6494 fs::write(ext_dir.join("extension.json"), "{ not valid json")
6495 .expect("write malformed extension.json");
6496 fs::write(ext_dir.join("index.ts"), "export default {}").expect("write index.ts");
6497
6498 let entries = collect_auto_extension_entries(&ext_dir);
6499 assert!(
6500 entries.is_empty(),
6501 "malformed root extension.json must not fall back to conventional entrypoints"
6502 );
6503 }
6504
6505 #[test]
6506 fn collect_auto_extension_entries_respects_empty_root_manifest_extensions() {
6507 let dir = tempfile::tempdir().expect("tempdir");
6508 let ext_dir = dir.path().join("extensions");
6509 fs::create_dir_all(&ext_dir).expect("create dir");
6510 fs::write(
6511 ext_dir.join("package.json"),
6512 serde_json::to_string_pretty(&json!({
6513 "name": "test-pkg",
6514 "pi": {
6515 "extensions": []
6516 }
6517 }))
6518 .expect("serialize package.json"),
6519 )
6520 .expect("write package.json");
6521 fs::write(ext_dir.join("index.ts"), "export default {}").expect("write index.ts");
6522
6523 let entries = collect_auto_extension_entries(&ext_dir);
6524 assert!(
6525 entries.is_empty(),
6526 "explicit empty root package.json#pi.extensions must disable conventional fallback"
6527 );
6528 }
6529
6530 #[test]
6531 fn collect_auto_extension_entries_respects_missing_root_manifest_targets() {
6532 let dir = tempfile::tempdir().expect("tempdir");
6533 let ext_dir = dir.path().join("extensions");
6534 fs::create_dir_all(&ext_dir).expect("create dir");
6535 fs::write(
6536 ext_dir.join("package.json"),
6537 serde_json::to_string_pretty(&json!({
6538 "name": "test-pkg",
6539 "pi": {
6540 "extensions": ["missing/index.ts"]
6541 }
6542 }))
6543 .expect("serialize package.json"),
6544 )
6545 .expect("write package.json");
6546 fs::write(ext_dir.join("index.ts"), "export default {}").expect("write index.ts");
6547
6548 let entries = collect_auto_extension_entries(&ext_dir);
6549 assert!(
6550 entries.is_empty(),
6551 "missing root package.json#pi.extensions targets must disable conventional fallback"
6552 );
6553 }
6554
6555 #[test]
6560 fn resolve_extension_entries_finds_index_native_json() {
6561 let dir = tempfile::tempdir().expect("tempdir");
6562 let ext_dir = dir.path().join("ext");
6563 fs::create_dir_all(&ext_dir).expect("create dir");
6564 fs::write(ext_dir.join("index.native.json"), "{}").expect("write");
6565
6566 let entries = resolve_extension_entries(&ext_dir).expect("entries");
6567 assert_eq!(entries, vec![ext_dir.join("index.native.json")]);
6568 }
6569
6570 #[test]
6571 fn resolve_extension_entries_finds_index_ts() {
6572 let dir = tempfile::tempdir().expect("tempdir");
6573 let ext_dir = dir.path().join("ext");
6574 fs::create_dir_all(&ext_dir).expect("create dir");
6575 fs::write(ext_dir.join("index.ts"), "export default {}").expect("write");
6576
6577 let entries = resolve_extension_entries(&ext_dir).expect("entries");
6578 assert_eq!(entries, vec![ext_dir.join("index.ts")]);
6579 }
6580
6581 #[test]
6582 fn resolve_extension_entries_finds_index_js() {
6583 let dir = tempfile::tempdir().expect("tempdir");
6584 let ext_dir = dir.path().join("ext");
6585 fs::create_dir_all(&ext_dir).expect("create dir");
6586 fs::write(ext_dir.join("index.js"), "export default {}").expect("write");
6587
6588 let entries = resolve_extension_entries(&ext_dir).expect("entries");
6589 assert_eq!(entries, vec![ext_dir.join("index.js")]);
6590 }
6591
6592 #[test]
6593 fn resolve_extension_entries_prefers_manifest_over_index() {
6594 let dir = tempfile::tempdir().expect("tempdir");
6595 let ext_dir = dir.path().join("ext");
6596 fs::create_dir_all(&ext_dir).expect("create dir");
6597 fs::write(
6598 ext_dir.join("extension.json"),
6599 serde_json::to_string_pretty(&json!({
6600 "schema": "pi.ext.manifest.v1",
6601 "extension_id": "test.ext",
6602 "name": "Test",
6603 "version": "0.1.0",
6604 "api_version": "1.0",
6605 "runtime": "native-rust",
6606 "entrypoint": "main.native.json",
6607 "capabilities": []
6608 }))
6609 .unwrap(),
6610 )
6611 .expect("write manifest");
6612 fs::write(ext_dir.join("main.native.json"), "{}").expect("write main");
6613 fs::write(ext_dir.join("index.ts"), "index").expect("write index");
6614
6615 let entries = resolve_extension_entries(&ext_dir).expect("entries");
6616 assert_eq!(entries, vec![ext_dir]);
6618 }
6619
6620 #[test]
6621 fn resolve_extension_entries_empty_manifest_extensions_fail_closed_without_index_fallback() {
6622 let dir = tempfile::tempdir().expect("tempdir");
6623 let ext_dir = dir.path().join("ext");
6624 fs::create_dir_all(&ext_dir).expect("create dir");
6625 fs::write(
6626 ext_dir.join("package.json"),
6627 serde_json::to_string_pretty(&json!({
6628 "name": "test-pkg",
6629 "pi": {
6630 "extensions": []
6631 }
6632 }))
6633 .expect("serialize package.json"),
6634 )
6635 .expect("write package.json");
6636 fs::write(ext_dir.join("index.ts"), "export default {}").expect("write index.ts");
6637
6638 let entries = resolve_extension_entries(&ext_dir).expect("entries");
6639 assert!(
6640 entries.is_empty(),
6641 "explicit empty package.json#pi.extensions must not fall back to index.*"
6642 );
6643 }
6644
6645 #[test]
6646 fn resolve_extension_entries_missing_manifest_targets_fail_closed_without_index_fallback() {
6647 let dir = tempfile::tempdir().expect("tempdir");
6648 let ext_dir = dir.path().join("ext");
6649 fs::create_dir_all(&ext_dir).expect("create dir");
6650 fs::write(
6651 ext_dir.join("package.json"),
6652 serde_json::to_string_pretty(&json!({
6653 "name": "test-pkg",
6654 "pi": {
6655 "extensions": ["extensions/missing.js"]
6656 }
6657 }))
6658 .expect("serialize package.json"),
6659 )
6660 .expect("write package.json");
6661 fs::write(ext_dir.join("index.ts"), "export default {}").expect("write index.ts");
6662
6663 let entries = resolve_extension_entries(&ext_dir).expect("entries");
6664 assert!(
6665 entries.is_empty(),
6666 "missing package.json#pi.extensions targets must not fall back to index.*"
6667 );
6668 }
6669
6670 #[test]
6671 fn resolve_extension_entries_applies_manifest_pattern_overrides() {
6672 let dir = tempfile::tempdir().expect("tempdir");
6673 let ext_dir = dir.path().join("ext");
6674 let nested_dir = ext_dir.join("extensions");
6675 fs::create_dir_all(&nested_dir).expect("create nested dir");
6676 fs::write(nested_dir.join("a.native.json"), "{}").expect("write a.native.json");
6677 fs::write(nested_dir.join("b.native.json"), "{}").expect("write b.native.json");
6678 fs::write(
6679 ext_dir.join("package.json"),
6680 serde_json::to_string_pretty(&json!({
6681 "name": "test-pkg",
6682 "pi": {
6683 "extensions": ["extensions", "!extensions/b.native.json"]
6684 }
6685 }))
6686 .expect("serialize package.json"),
6687 )
6688 .expect("write package.json");
6689
6690 let entries = resolve_extension_entries(&ext_dir).expect("entries");
6691 assert_eq!(entries, vec![nested_dir.join("a.native.json")]);
6692 }
6693
6694 #[test]
6695 fn collect_auto_extension_entries_fail_closed_on_outside_root_manifest_target() {
6696 let dir = tempfile::tempdir().expect("tempdir");
6697 let ext_dir = dir.path().join("ext");
6698 let escaped_dir = dir.path().join("escaped");
6699 fs::create_dir_all(&ext_dir).expect("create ext dir");
6700 fs::create_dir_all(&escaped_dir).expect("create escaped dir");
6701 fs::write(escaped_dir.join("index.ts"), "export default {};").expect("write escaped entry");
6702 fs::write(ext_dir.join("index.ts"), "export default {};").expect("write fallback entry");
6703 fs::write(
6704 ext_dir.join("package.json"),
6705 serde_json::to_string_pretty(&json!({
6706 "name": "test-pkg",
6707 "pi": {
6708 "extensions": ["../escaped/index.ts"]
6709 }
6710 }))
6711 .expect("serialize package.json"),
6712 )
6713 .expect("write package.json");
6714
6715 let entries = collect_auto_extension_entries(&ext_dir);
6716 assert!(
6717 entries.is_empty(),
6718 "outside-root package.json#pi.extensions must disable conventional fallback"
6719 );
6720 }
6721
6722 #[test]
6723 fn resolve_extension_entries_empty_dir_returns_none() {
6724 let dir = tempfile::tempdir().expect("tempdir");
6725 let ext_dir = dir.path().join("ext");
6726 fs::create_dir_all(&ext_dir).expect("create dir");
6727 assert!(resolve_extension_entries(&ext_dir).is_none());
6728 }
6729
6730 #[test]
6731 fn collect_default_resources_respects_empty_manifest_entries() {
6732 let dir = tempfile::tempdir().expect("tempdir");
6733 let package_root = dir.path().join("pkg");
6734 let skills_dir = package_root.join("skills").join("my-skill");
6735 fs::create_dir_all(&skills_dir).expect("create skills dir");
6736 fs::write(skills_dir.join("SKILL.md"), "# Skill").expect("write skill");
6737 fs::write(
6738 package_root.join("package.json"),
6739 serde_json::to_string_pretty(&json!({
6740 "name": "pkg",
6741 "pi": {
6742 "skills": []
6743 }
6744 }))
6745 .expect("serialize package.json"),
6746 )
6747 .expect("write package.json");
6748
6749 let mut target = ResourceList::default();
6750 let metadata = PathMetadata {
6751 source: package_root.display().to_string(),
6752 scope: PackageScope::Project,
6753 origin: ResourceOrigin::Package,
6754 base_dir: Some(package_root.clone()),
6755 };
6756
6757 PackageManager::collect_default_resources(
6758 &package_root,
6759 ResourceType::Skills,
6760 &mut target,
6761 &metadata,
6762 )
6763 .expect("collect default resources");
6764
6765 assert!(
6766 target.items.is_empty(),
6767 "explicit empty package.json#pi.skills must disable convention fallback"
6768 );
6769 }
6770
6771 #[test]
6772 fn collect_manifest_files_respects_empty_manifest_entries() {
6773 let dir = tempfile::tempdir().expect("tempdir");
6774 let package_root = dir.path().join("pkg");
6775 let extensions_dir = package_root.join("extensions");
6776 fs::create_dir_all(&extensions_dir).expect("create extensions dir");
6777 fs::write(extensions_dir.join("index.ts"), "export default {}").expect("write index.ts");
6778 fs::write(
6779 package_root.join("package.json"),
6780 serde_json::to_string_pretty(&json!({
6781 "name": "pkg",
6782 "pi": {
6783 "extensions": []
6784 }
6785 }))
6786 .expect("serialize package.json"),
6787 )
6788 .expect("write package.json");
6789
6790 let (all_files, enabled) =
6791 PackageManager::collect_manifest_files(&package_root, ResourceType::Extensions)
6792 .expect("collect manifest files");
6793
6794 assert!(all_files.is_empty());
6795 assert!(enabled.is_empty());
6796 }
6797
6798 #[test]
6803 fn collect_skill_entries_finds_skill_md_in_subdirs() {
6804 let dir = tempfile::tempdir().expect("tempdir");
6805 let skills_dir = dir.path().join("skills");
6806 fs::create_dir_all(skills_dir.join("my-skill")).expect("create dir");
6807 fs::write(skills_dir.join("my-skill/SKILL.md"), "# Skill").expect("write skill");
6808 fs::write(skills_dir.join("top-level.md"), "# Top").expect("write top");
6809 fs::write(skills_dir.join("readme.txt"), "text").expect("write txt");
6810
6811 let entries = collect_skill_entries(&skills_dir);
6812 assert!(entries.iter().any(|p| p.file_name().unwrap() == "SKILL.md"));
6813 assert!(
6814 entries
6815 .iter()
6816 .any(|p| p.file_name().unwrap() == "top-level.md")
6817 );
6818 assert!(
6819 !entries
6820 .iter()
6821 .any(|p| p.file_name().unwrap() == "readme.txt")
6822 );
6823 }
6824
6825 #[cfg(unix)]
6826 #[test]
6827 fn collect_skill_entries_ignores_symlink_cycles() {
6828 let dir = tempfile::tempdir().expect("tempdir");
6829 let skills_dir = dir.path().join("skills");
6830 let skill_dir = skills_dir.join("my-skill");
6831 fs::create_dir_all(&skill_dir).expect("create dir");
6832 fs::write(skill_dir.join("SKILL.md"), "# Skill").expect("write skill");
6833
6834 let loop_link = skill_dir.join("loop");
6835 std::os::unix::fs::symlink(&skill_dir, &loop_link).expect("create symlink loop");
6836
6837 let entries = collect_skill_entries(&skills_dir);
6838 assert_eq!(entries.len(), 1);
6839 assert_eq!(entries[0], skill_dir.join("SKILL.md"));
6840 }
6841
6842 #[cfg(unix)]
6843 #[test]
6844 fn collect_skill_entries_dedupes_alias_symlink_to_same_skill_tree() {
6845 let dir = tempfile::tempdir().expect("tempdir");
6846 let skills_dir = dir.path().join("skills");
6847 let real_root = skills_dir.join("real");
6848 let skill_dir = real_root.join("my-skill");
6849 fs::create_dir_all(&skill_dir).expect("create dir");
6850 fs::write(skill_dir.join("SKILL.md"), "# Skill").expect("write skill");
6851
6852 std::os::unix::fs::symlink(&real_root, skills_dir.join("alias"))
6853 .expect("create alias symlink");
6854
6855 let entries = collect_skill_entries(&skills_dir);
6856 assert_eq!(entries.len(), 1);
6857 let canonical_entries = entries
6858 .iter()
6859 .map(|path| canonical_identity_path(path))
6860 .collect::<std::collections::HashSet<_>>();
6861 assert_eq!(canonical_entries.len(), 1);
6862 assert!(canonical_entries.contains(&canonical_identity_path(&skill_dir.join("SKILL.md"))));
6863 }
6864
6865 #[test]
6870 fn prune_empty_git_parents_removes_empty_ancestors() {
6871 let dir = tempfile::tempdir().expect("tempdir");
6872 let root = dir.path().join("git");
6873 let deep = root.join("github.com/user/repo");
6874 fs::create_dir_all(&deep).expect("create dirs");
6875
6876 fs::remove_dir(&deep).expect("remove repo dir");
6878
6879 prune_empty_git_parents(&deep, &root);
6880
6881 assert!(!root.join("github.com/user").exists());
6883 assert!(!root.join("github.com").exists());
6884 assert!(root.exists());
6886 }
6887
6888 #[test]
6893 fn ensure_npm_project_creates_package_json() {
6894 let dir = tempfile::tempdir().expect("tempdir");
6895 let root = dir.path().join("npm");
6896 ensure_npm_project(&root).expect("ensure");
6897 assert!(root.join("package.json").exists());
6898 assert!(root.join(".gitignore").exists());
6899
6900 let content = fs::read_to_string(root.join("package.json")).expect("read");
6901 let json: Value = serde_json::from_str(&content).expect("parse");
6902 assert_eq!(json["name"], "pi-packages");
6903 assert_eq!(json["private"], true);
6904 }
6905
6906 #[test]
6907 fn ensure_npm_project_does_not_overwrite_existing() {
6908 let dir = tempfile::tempdir().expect("tempdir");
6909 let root = dir.path().join("npm");
6910 fs::create_dir_all(&root).expect("create dir");
6911 fs::write(root.join("package.json"), r#"{"name":"existing"}"#).expect("write");
6912 ensure_npm_project(&root).expect("ensure");
6913 let content = fs::read_to_string(root.join("package.json")).expect("read");
6914 assert!(content.contains("existing"), "should not overwrite");
6915 }
6916
6917 #[test]
6918 fn ensure_git_ignore_creates_gitignore() {
6919 let dir = tempfile::tempdir().expect("tempdir");
6920 let root = dir.path().join("git");
6921 ensure_git_ignore(&root).expect("ensure");
6922 let content = fs::read_to_string(root.join(".gitignore")).expect("read");
6923 assert!(content.contains('*'));
6924 assert!(content.contains("!.gitignore"));
6925 }
6926
6927 #[test]
6932 fn pi_manifest_entries_for_returns_cloned_vectors() {
6933 let manifest = PiManifest {
6934 extensions: Some(vec!["a.js".to_string()]),
6935 skills: None,
6936 prompts: Some(vec!["p.md".to_string()]),
6937 themes: None,
6938 };
6939 assert_eq!(
6940 manifest.entries_for(ResourceType::Extensions),
6941 Some(vec!["a.js".to_string()])
6942 );
6943 assert!(manifest.entries_for(ResourceType::Skills).is_none());
6944 assert_eq!(
6945 manifest.entries_for(ResourceType::Prompts),
6946 Some(vec!["p.md".to_string()])
6947 );
6948 assert!(manifest.entries_for(ResourceType::Themes).is_none());
6949 }
6950
6951 #[test]
6956 fn resource_type_all_and_as_str() {
6957 let all = ResourceType::all();
6958 assert_eq!(all.len(), 4);
6959 assert_eq!(ResourceType::Extensions.as_str(), "extensions");
6960 assert_eq!(ResourceType::Skills.as_str(), "skills");
6961 assert_eq!(ResourceType::Prompts.as_str(), "prompts");
6962 assert_eq!(ResourceType::Themes.as_str(), "themes");
6963 }
6964
6965 #[test]
6970 fn read_installed_npm_version_parses_package_json() {
6971 let dir = tempfile::tempdir().expect("tempdir");
6972 fs::write(
6973 dir.path().join("package.json"),
6974 r#"{"name":"foo","version":"1.2.3"}"#,
6975 )
6976 .expect("write");
6977 let version = read_installed_npm_version(dir.path());
6978 assert_eq!(version, Some("1.2.3".to_string()));
6979 }
6980
6981 #[test]
6982 fn read_installed_npm_version_missing_version_field() {
6983 let dir = tempfile::tempdir().expect("tempdir");
6984 fs::write(dir.path().join("package.json"), r#"{"name":"foo"}"#).expect("write");
6985 assert!(read_installed_npm_version(dir.path()).is_none());
6986 }
6987
6988 #[test]
6989 fn read_installed_npm_version_no_package_json() {
6990 let dir = tempfile::tempdir().expect("tempdir");
6991 assert!(read_installed_npm_version(dir.path()).is_none());
6992 }
6993
6994 #[test]
6999 fn extract_package_source_string_value() {
7000 let (source, is_obj) = extract_package_source(&json!("npm:foo")).unwrap();
7001 assert_eq!(source, "npm:foo");
7002 assert!(!is_obj);
7003 }
7004
7005 #[test]
7006 fn extract_package_source_object_value() {
7007 let val = json!({"source": "git:repo"});
7008 let (source, is_obj) = extract_package_source(&val).unwrap();
7009 assert_eq!(source, "git:repo");
7010 assert!(is_obj);
7011 }
7012
7013 #[test]
7014 fn extract_package_source_invalid_returns_none() {
7015 assert!(extract_package_source(&json!(42)).is_none());
7016 assert!(extract_package_source(&json!(null)).is_none());
7017 }
7018
7019 #[test]
7024 fn auto_dirs_constructs_correct_paths() {
7025 let base = Path::new("/home/user/.pi/agent");
7026 let dirs = AutoDirs::new(base);
7027 assert_eq!(dirs.extensions, base.join("extensions"));
7028 assert_eq!(dirs.skills, base.join("skills"));
7029 assert_eq!(dirs.prompts, base.join("prompts"));
7030 assert_eq!(dirs.themes, base.join("themes"));
7031 }
7032
7033 #[test]
7038 fn get_override_patterns_filters_correctly() {
7039 let entries = vec![
7040 "a.js".to_string(),
7041 "!excluded.js".to_string(),
7042 "+forced.js".to_string(),
7043 "-removed.js".to_string(),
7044 "b.js".to_string(),
7045 ];
7046 let overrides = get_override_patterns(&entries);
7047 assert_eq!(overrides.len(), 3);
7048 assert!(overrides.contains(&"!excluded.js".to_string()));
7049 assert!(overrides.contains(&"+forced.js".to_string()));
7050 assert!(overrides.contains(&"-removed.js".to_string()));
7051 }
7052
7053 #[test]
7058 fn local_path_from_spec_file_url() {
7059 let cwd = Path::new("/home/user/project");
7060 let result = local_path_from_spec("file:///abs/repo", cwd);
7061 assert_eq!(result, PathBuf::from("/abs/repo"));
7062 }
7063
7064 #[test]
7065 fn local_path_from_spec_relative() {
7066 let cwd = Path::new("/home/user/project");
7067 let result = local_path_from_spec("./my-repo", cwd);
7068 assert_eq!(result, PathBuf::from("/home/user/project/my-repo"));
7069 }
7070
7071 #[test]
7072 fn parse_git_source_sanitizes_paths() {
7073 let dir = tempfile::tempdir().expect("tempdir");
7074
7075 match parse_git_source("../../evil/repo", dir.path()) {
7078 ParsedSource::Git { host, .. } => {
7079 assert_eq!(host, "local");
7082 }
7083 other => panic!(),
7084 }
7085
7086 match parse_git_source("github.com/../../user/repo", dir.path()) {
7088 ParsedSource::Git { host, path, .. } => {
7089 assert_eq!(host, "github.com");
7091 assert_eq!(path, "user/repo");
7092 }
7093 other => panic!(),
7094 }
7095
7096 match parse_git_source("..", dir.path()) {
7098 ParsedSource::Git { host, .. } => {
7099 assert_eq!(host, "local");
7101 }
7102 other => panic!(),
7103 }
7104
7105 let (_, host, path) = normalize_remote_git_repo("git@evil.com:../../etc/passwd");
7107 assert_eq!(host, "evil.com");
7108 assert_eq!(
7109 path, "etc/passwd",
7110 "dot-dot segments must be stripped from SCP-like paths"
7111 );
7112 assert!(
7113 !path.contains(".."),
7114 "path must not contain traversal segments"
7115 );
7116
7117 let (_, _, path) = normalize_remote_git_repo("git@github.com:../user/repo");
7119 assert!(!path.contains(".."));
7120 assert_eq!(path, "user/repo");
7121 }
7122
7123 fn sample_npm_lock_entry(
7124 source: &str,
7125 requested_spec: &str,
7126 requested_version: Option<&str>,
7127 installed_version: &str,
7128 digest: &str,
7129 pinned: bool,
7130 ) -> PackageLockEntry {
7131 PackageLockEntry {
7132 identity: "npm:demo-pkg".to_string(),
7133 source: source.to_string(),
7134 source_kind: PackageSourceKind::Npm,
7135 resolved: PackageResolvedProvenance::Npm {
7136 name: "demo-pkg".to_string(),
7137 requested_spec: requested_spec.to_string(),
7138 requested_version: requested_version.map(str::to_string),
7139 installed_version: installed_version.to_string(),
7140 pinned,
7141 },
7142 digest_sha256: digest.to_string(),
7143 trust_state: PackageEntryTrustState::Trusted,
7144 }
7145 }
7146
7147 #[test]
7148 fn evaluate_lock_transition_rejects_install_digest_mismatch() {
7149 let existing = sample_npm_lock_entry(
7150 "npm:demo-pkg@1.0.0",
7151 "demo-pkg@1.0.0",
7152 Some("1.0.0"),
7153 "1.0.0",
7154 "aaaaaaaa",
7155 true,
7156 );
7157 let candidate = sample_npm_lock_entry(
7158 "npm:demo-pkg@1.0.0",
7159 "demo-pkg@1.0.0",
7160 Some("1.0.0"),
7161 "1.0.0",
7162 "bbbbbbbb",
7163 true,
7164 );
7165
7166 let mismatch =
7167 evaluate_lock_transition(Some(&existing), &candidate, PackageLockAction::Install)
7168 .expect_err("install should fail closed on digest mismatch");
7169 assert_eq!(mismatch.code, "digest_mismatch");
7170 }
7171
7172 #[test]
7173 fn evaluate_lock_transition_allows_unpinned_update_changes() {
7174 let existing =
7175 sample_npm_lock_entry("npm:demo-pkg", "demo-pkg", None, "1.0.0", "aaaaaaaa", false);
7176 let candidate =
7177 sample_npm_lock_entry("npm:demo-pkg", "demo-pkg", None, "1.1.0", "bbbbbbbb", false);
7178
7179 let transition =
7180 evaluate_lock_transition(Some(&existing), &candidate, PackageLockAction::Update)
7181 .expect("unpinned update should permit provenance/digest rotation");
7182 assert!(
7183 transition
7184 .reason_codes
7185 .contains(&"provenance_changed".to_string()),
7186 "expected provenance_changed reason code"
7187 );
7188 assert!(
7189 transition
7190 .reason_codes
7191 .contains(&"digest_changed".to_string()),
7192 "expected digest_changed reason code"
7193 );
7194 }
7195
7196 #[test]
7197 fn digest_package_path_ignores_git_metadata() {
7198 let dir = tempfile::tempdir().expect("tempdir");
7199 let package_root = dir.path().join("pkg");
7200 fs::create_dir_all(package_root.join(".git")).expect("create .git dir");
7201 fs::write(package_root.join("index.js"), "export const ok = true;\n")
7202 .expect("write index.js");
7203 fs::write(package_root.join(".git/HEAD"), "ref: refs/heads/main\n")
7204 .expect("write .git/HEAD");
7205
7206 let digest_before = digest_package_path(&package_root).expect("digest before");
7207
7208 fs::write(package_root.join(".git/HEAD"), "ref: refs/heads/feature\n")
7209 .expect("rewrite .git/HEAD");
7210 fs::write(
7211 package_root.join(".git/config"),
7212 "[core]\nrepositoryformatversion = 0\n",
7213 )
7214 .expect("write .git/config");
7215
7216 let digest_after = digest_package_path(&package_root).expect("digest after");
7217 assert_eq!(
7218 digest_before, digest_after,
7219 "digest should ignore .git metadata"
7220 );
7221 }
7222
7223 #[test]
7224 fn verify_and_record_lock_is_deterministic_for_same_inputs() {
7225 let dir = tempfile::tempdir().expect("tempdir");
7226 let cwd = dir.path().to_path_buf();
7227 let pkg = cwd.join("local-pkg");
7228 fs::create_dir_all(&pkg).expect("create local package dir");
7229 fs::write(pkg.join("index.js"), "export const stable = true;\n")
7230 .expect("write extension file");
7231
7232 let manager = PackageManager::new(cwd.clone());
7233 manager
7234 .verify_and_record_lock(
7235 "./local-pkg",
7236 PackageScope::Project,
7237 PackageLockAction::Install,
7238 )
7239 .expect("first lock verification");
7240
7241 let lockfile_path = cwd.join(".pi").join("packages.lock.json");
7242 let first = fs::read_to_string(&lockfile_path).expect("read first lockfile");
7243
7244 manager
7245 .verify_and_record_lock(
7246 "./local-pkg",
7247 PackageScope::Project,
7248 PackageLockAction::Install,
7249 )
7250 .expect("second lock verification");
7251 let second = fs::read_to_string(&lockfile_path).expect("read second lockfile");
7252
7253 assert_eq!(
7254 first, second,
7255 "same inputs should produce identical lockfile artifacts"
7256 );
7257 }
7258
7259 #[test]
7260 fn reconcile_lockfile_prunes_entries_missing_from_settings() {
7261 let dir = tempfile::tempdir().expect("tempdir");
7265 let cwd = dir.path().to_path_buf();
7266
7267 for name in ["pkg1", "pkg2"] {
7270 let pkg = cwd.join(name);
7271 fs::create_dir_all(&pkg).expect("create pkg dir");
7272 fs::write(pkg.join("index.js"), "export const ok = true;\n").expect("write entry");
7273 }
7274
7275 let settings_path = cwd.join(".pi").join("settings.json");
7276 fs::create_dir_all(settings_path.parent().unwrap()).expect("mkdir .pi");
7277 fs::write(
7278 &settings_path,
7279 json!({ "packages": ["./pkg1", "./pkg2"] }).to_string(),
7280 )
7281 .expect("seed settings");
7282
7283 let manager = PackageManager::new(cwd.clone());
7284 manager
7285 .verify_and_record_lock("./pkg1", PackageScope::Project, PackageLockAction::Install)
7286 .expect("lock pkg1");
7287 manager
7288 .verify_and_record_lock("./pkg2", PackageScope::Project, PackageLockAction::Install)
7289 .expect("lock pkg2");
7290
7291 let lockfile_path = cwd.join(".pi").join("packages.lock.json");
7292 let before = read_package_lockfile(&lockfile_path).expect("read pre-reconcile lockfile");
7293 assert_eq!(
7294 before.entries.len(),
7295 2,
7296 "both pkgs should be locked initially"
7297 );
7298
7299 fs::write(
7303 &settings_path,
7304 json!({ "packages": ["./pkg1"] }).to_string(),
7305 )
7306 .expect("rewrite settings");
7307
7308 let pruned = manager
7309 .reconcile_lockfile_sync(PackageScope::Project)
7310 .expect("reconcile");
7311
7312 assert_eq!(pruned.len(), 1, "exactly one entry should be pruned");
7313 assert_eq!(
7314 pruned[0].source, "./pkg2",
7315 "pruned entry must be pkg2 (the one removed from settings), not pkg1"
7316 );
7317 assert!(
7318 pruned[0].identity.ends_with("pkg2"),
7319 "pruned identity should be the pkg2 absolute path, got {:?}",
7320 pruned[0].identity
7321 );
7322
7323 let after = read_package_lockfile(&lockfile_path).expect("read post-reconcile lockfile");
7324 assert_eq!(
7325 after.entries.len(),
7326 1,
7327 "lockfile should have one remaining entry"
7328 );
7329 assert!(
7330 after.entries.iter().any(|e| e.source == "./pkg1"),
7331 "pkg1 should remain after reconcile"
7332 );
7333
7334 let again = manager
7336 .reconcile_lockfile_sync(PackageScope::Project)
7337 .expect("reconcile idempotent");
7338 assert!(again.is_empty(), "second reconcile should prune nothing");
7339
7340 let audit_path = cwd.join(".pi").join("package-trust-audit.jsonl");
7342 let audit = fs::read_to_string(&audit_path).expect("read audit log");
7343 assert!(
7344 audit.lines().any(|line| line.contains("\"reconciled\"")),
7345 "trust-audit log should contain a reconciled remove event, got:\n{audit}"
7346 );
7347 }
7348
7349 #[test]
7350 fn reconcile_lockfile_noop_when_lockfile_absent() {
7351 let dir = tempfile::tempdir().expect("tempdir");
7352 let manager = PackageManager::new(dir.path().to_path_buf());
7353 let pruned = manager
7354 .reconcile_lockfile_sync(PackageScope::Project)
7355 .expect("reconcile should not error on missing lockfile");
7356 assert!(pruned.is_empty());
7357 }
7358
7359 #[test]
7360 fn reconcile_lockfile_preserves_entries_when_settings_file_is_missing() {
7361 let dir = tempfile::tempdir().expect("tempdir");
7367 let cwd = dir.path().to_path_buf();
7368
7369 let pkg = cwd.join("keep-me");
7372 fs::create_dir_all(&pkg).expect("mkdir keep-me");
7373 fs::write(pkg.join("index.js"), "export const ok = true;\n").expect("write entry");
7374
7375 let settings_path = cwd.join(".pi").join("settings.json");
7376 fs::create_dir_all(settings_path.parent().unwrap()).expect("mkdir .pi");
7377 fs::write(
7378 &settings_path,
7379 json!({ "packages": ["./keep-me"] }).to_string(),
7380 )
7381 .expect("seed settings");
7382
7383 let manager = PackageManager::new(cwd.clone());
7384 manager
7385 .verify_and_record_lock(
7386 "./keep-me",
7387 PackageScope::Project,
7388 PackageLockAction::Install,
7389 )
7390 .expect("lock keep-me");
7391
7392 let lockfile_path = cwd.join(".pi").join("packages.lock.json");
7393 let before_len = read_package_lockfile(&lockfile_path)
7394 .expect("read lockfile")
7395 .entries
7396 .len();
7397 assert_eq!(before_len, 1, "precondition: one locked entry");
7398
7399 fs::remove_file(&settings_path).expect("delete settings");
7403
7404 let pruned = manager
7405 .reconcile_lockfile_sync(PackageScope::Project)
7406 .expect("reconcile should not error on missing settings file");
7407 assert!(
7408 pruned.is_empty(),
7409 "reconcile must NOT prune entries when settings.json is missing — we have no authoritative live set to compare against"
7410 );
7411
7412 let after_len = read_package_lockfile(&lockfile_path)
7413 .expect("read lockfile post-reconcile")
7414 .entries
7415 .len();
7416 assert_eq!(
7417 after_len, before_len,
7418 "lockfile must be preserved when settings file is missing"
7419 );
7420 }
7421
7422 mod proptest_package_manager {
7423 use super::*;
7424 use proptest::prelude::*;
7425
7426 proptest! {
7427 #[test]
7429 fn parse_npm_spec_never_panics(s in ".*") {
7430 let _ = parse_npm_spec(&s);
7431 }
7432
7433 #[test]
7435 fn unscoped_no_at_returns_no_version(name in "[a-z][a-z0-9-]{0,20}") {
7436 let (parsed_name, version) = parse_npm_spec(&name);
7437 assert_eq!(parsed_name, name);
7438 assert!(version.is_none());
7439 }
7440
7441 #[test]
7443 fn scoped_no_version(scope in "[a-z]{1,10}", pkg in "[a-z]{1,10}") {
7444 let input = format!("@{scope}/{pkg}");
7445 let (parsed_name, version) = parse_npm_spec(&input);
7446 assert_eq!(parsed_name, input);
7447 assert!(version.is_none());
7448 }
7449
7450 #[test]
7452 fn scoped_with_version(
7453 scope in "[a-z]{1,10}",
7454 pkg in "[a-z]{1,10}",
7455 ver in "[0-9]{1,3}\\.[0-9]{1,3}\\.[0-9]{1,3}",
7456 ) {
7457 let input = format!("@{scope}/{pkg}@{ver}");
7458 let (parsed_name, version) = parse_npm_spec(&input);
7459 assert_eq!(parsed_name, format!("@{scope}/{pkg}"));
7460 assert_eq!(version, Some(ver));
7461 }
7462
7463 #[test]
7465 fn is_pattern_detects_prefix(
7466 prefix_idx in 0..3usize,
7467 suffix in "[a-z.]{1,20}",
7468 ) {
7469 let prefix = ["!", "+", "-"][prefix_idx];
7470 let input = format!("{prefix}{suffix}");
7471 assert!(is_pattern(&input));
7472 }
7473
7474 #[test]
7476 fn is_pattern_detects_wildcards(
7477 prefix in "[a-z]{0,5}",
7478 wild_idx in 0..2usize,
7479 suffix in "[a-z]{0,5}",
7480 ) {
7481 let wild = ["*", "?"][wild_idx];
7482 let input = format!("{prefix}{wild}{suffix}");
7483 assert!(is_pattern(&input));
7484 }
7485
7486 #[test]
7488 fn plain_strings_not_patterns(s in "[a-z0-9]{1,20}") {
7489 assert!(!is_pattern(&s));
7490 }
7491
7492 #[test]
7494 fn split_patterns_partition_is_complete(
7495 plains in prop::collection::vec("[a-z]{1,10}", 0..5),
7496 patterns in prop::collection::vec("[!+*][a-z]{1,10}", 0..5),
7497 ) {
7498 let mut all = plains;
7499 all.extend(patterns);
7500 let (split_plain, split_patterns) = split_patterns(&all);
7501 assert_eq!(
7502 split_plain.len() + split_patterns.len(),
7503 all.len(),
7504 "partition should be complete"
7505 );
7506 for p in &split_patterns {
7507 assert!(is_pattern(p));
7508 }
7509 }
7510
7511 #[test]
7513 fn hex_encode_output_length(bytes in prop::collection::vec(any::<u8>(), 0..64)) {
7514 let hex = hex_encode(&bytes);
7515 assert_eq!(hex.len(), bytes.len() * 2);
7516 }
7517
7518 #[test]
7520 fn hex_encode_is_lowercase_hex(bytes in prop::collection::vec(any::<u8>(), 0..64)) {
7521 let hex = hex_encode(&bytes);
7522 assert!(hex.chars().all(|c| c.is_ascii_hexdigit() && !c.is_ascii_uppercase()));
7523 }
7524
7525 #[test]
7527 fn posix_string_no_backslashes(segments in prop::collection::vec("[a-z]{1,5}", 1..5)) {
7528 let path = PathBuf::from(segments.join("/"));
7529 let result = posix_string(&path);
7530 assert!(!result.contains('\\'));
7531 }
7532
7533 #[test]
7535 fn posix_string_idempotent(segments in prop::collection::vec("[a-z]{1,5}", 1..5)) {
7536 let path = PathBuf::from(segments.join("/"));
7537 let first = posix_string(&path);
7538 let second = posix_string(&PathBuf::from(&first));
7539 assert_eq!(first, second);
7540 }
7541
7542 #[test]
7544 fn normalize_exact_pattern_strips_dot_slash(suffix in "[a-z]{1,15}") {
7545 let with_prefix = format!("./{suffix}");
7546 assert_eq!(normalize_exact_pattern(&with_prefix), suffix.as_str());
7547 }
7548
7549 #[test]
7551 fn normalize_exact_pattern_strips_at_most_one(suffix in "[a-z]{1,15}") {
7552 let input = format!("./{suffix}");
7553 let result = normalize_exact_pattern(&input);
7554 assert_eq!(result, suffix.as_str());
7555 assert_eq!(normalize_exact_pattern(&suffix), suffix.as_str());
7557 }
7558
7559 #[test]
7561 fn looks_like_git_url_known_hosts(
7562 host_idx in 0..4usize,
7563 path in "[a-z]{1,10}/[a-z]{1,10}",
7564 ) {
7565 let host = ["github.com", "gitlab.com", "bitbucket.org", "codeberg.org"][host_idx];
7566 let url = format!("{host}/{path}");
7567 assert!(looks_like_git_url(&url));
7568 }
7569
7570 #[test]
7572 fn looks_like_git_url_rejects_plain(name in "[a-z]{1,15}") {
7573 assert!(!looks_like_git_url(&name));
7574 }
7575
7576 #[test]
7578 fn looks_like_local_path_detects_relative(suffix in "[a-z]{1,10}") {
7579 assert!(looks_like_local_path(&format!("./{suffix}")));
7580 assert!(looks_like_local_path(&format!("../{suffix}")));
7581 assert!(looks_like_local_path(&format!("~/{suffix}")));
7582 }
7583
7584 #[test]
7586 fn normalize_dot_segments_idempotent(
7587 segments in prop::collection::vec("[a-z]{1,5}", 1..5),
7588 ) {
7589 let path = PathBuf::from(segments.join("/"));
7590 let first = normalize_dot_segments(&path);
7591 let second = normalize_dot_segments(&first);
7592 assert_eq!(first, second);
7593 }
7594
7595 #[test]
7597 fn normalize_source_empty_returns_none(s in "[ \\t]{0,10}") {
7598 assert!(normalize_source(&s).is_none());
7599 }
7600
7601 #[test]
7603 fn sources_match_reflexive(source in "[a-z]{1,15}") {
7604 assert!(sources_match(&source, &source));
7605 }
7606
7607 #[test]
7609 fn sources_match_symmetric(a in "[a-z]{1,10}", b in "[a-z]{1,10}") {
7610 assert_eq!(sources_match(&a, &b), sources_match(&b, &a));
7611 }
7612
7613 #[test]
7615 fn sort_lock_entries_produces_sorted(
7616 identities in prop::collection::vec("[a-z]{1,10}", 1..10),
7617 ) {
7618 let mut entries: Vec<PackageLockEntry> = identities
7619 .iter()
7620 .map(|id| PackageLockEntry {
7621 identity: id.clone(),
7622 source: format!("npm:{id}"),
7623 source_kind: PackageSourceKind::Npm,
7624 resolved: PackageResolvedProvenance::Npm {
7625 name: id.clone(),
7626 requested_spec: id.clone(),
7627 requested_version: None,
7628 installed_version: "1.0.0".to_string(),
7629 pinned: false,
7630 },
7631 digest_sha256: "abcd".to_string(),
7632 trust_state: PackageEntryTrustState::Trusted,
7633 })
7634 .collect();
7635 sort_lock_entries(&mut entries);
7636 for pair in entries.windows(2) {
7637 assert!(pair[0].identity <= pair[1].identity);
7638 }
7639 }
7640 }
7641 }
7642
7643 #[test]
7644 fn verify_and_record_lock_fails_closed_on_local_digest_mismatch() {
7645 let dir = tempfile::tempdir().expect("tempdir");
7646 let cwd = dir.path().to_path_buf();
7647 let pkg = cwd.join("local-pkg");
7648 fs::create_dir_all(&pkg).expect("create local package dir");
7649 let file_path = pkg.join("index.js");
7650 fs::write(&file_path, "export const version = 1;\n").expect("write extension file");
7651
7652 let manager = PackageManager::new(cwd);
7653 manager
7654 .verify_and_record_lock(
7655 "./local-pkg",
7656 PackageScope::Project,
7657 PackageLockAction::Install,
7658 )
7659 .expect("initial lock verification");
7660
7661 fs::write(&file_path, "export const version = 2;\n").expect("tamper package file");
7662
7663 let err = manager
7664 .verify_and_record_lock(
7665 "./local-pkg",
7666 PackageScope::Project,
7667 PackageLockAction::Install,
7668 )
7669 .expect_err("install verification should fail on digest mismatch");
7670 let message = err.to_string();
7671 assert!(
7672 message.contains("digest_mismatch"),
7673 "expected digest_mismatch error, got: {message}"
7674 );
7675 }
7676}