1#![allow(dead_code)]
26
27use std::collections::BTreeMap;
28use std::fs;
29use std::path::{Path, PathBuf};
30use std::process::Command;
31use std::sync::{Arc, RwLock};
32use std::time::SystemTime;
33
34use anyhow::{anyhow, Context, Result};
35use serde::{Deserialize, Serialize};
36use serde_json::json;
37
38fn validate_repo_name(name: &str) -> Result<()> {
40 let mut parts = name.split('/');
41 let org = parts.next().unwrap_or("");
42 let repo = parts.next().unwrap_or("");
43 if parts.next().is_some() || org.is_empty() || repo.is_empty() {
44 return Err(anyhow!(
45 "Invalid repo name {name:?}. Expected 'org/repo' (exactly one slash)."
46 ));
47 }
48 let valid = |s: &str| {
49 !s.is_empty()
50 && s.chars()
51 .all(|c| c.is_ascii_alphanumeric() || matches!(c, '.' | '-' | '_'))
52 };
53 if !valid(org) || !valid(repo) {
54 return Err(anyhow!(
55 "Invalid repo name {name:?}. Letters/digits/dots/hyphens/underscores only."
56 ));
57 }
58 Ok(())
59}
60
61pub type PostActivateHook = Arc<dyn Fn(&Path, &str) -> Result<()> + Send + Sync>;
65
66#[derive(Debug, Clone, Serialize, Deserialize)]
68struct InventoryEntry {
69 cloned_at: String,
70 last_accessed: String,
71 #[serde(default)]
72 access_count: u64,
73 #[serde(default)]
74 stale: bool,
75 #[serde(default, skip_serializing_if = "Option::is_none")]
81 last_built_sha: Option<String>,
82}
83
84pub use crate::server::manifest::WorkspaceKind;
87
88#[derive(Clone)]
90pub struct Workspace {
91 inner: Arc<WorkspaceInner>,
92}
93
94struct WorkspaceInner {
95 kind: WorkspaceKind,
96 workspace_dir: PathBuf,
97 stale_after_days: u32,
98 state: RwLock<WorkspaceState>,
99 post_activate: Option<PostActivateHook>,
100}
101
102#[derive(Debug, Default)]
103struct WorkspaceState {
104 active_repo_name: Option<String>,
105 active_repo_path: Option<PathBuf>,
106}
107
108impl Workspace {
109 pub fn open(
111 workspace_dir: PathBuf,
112 stale_after_days: u32,
113 post_activate: Option<PostActivateHook>,
114 ) -> Result<Self> {
115 if !workspace_dir.is_dir() {
116 fs::create_dir_all(&workspace_dir).with_context(|| {
117 format!("failed to create workspace dir {}", workspace_dir.display())
118 })?;
119 }
120 let repos_dir = workspace_dir.join("repos");
121 if !repos_dir.is_dir() {
122 fs::create_dir_all(&repos_dir)
123 .with_context(|| format!("failed to create repos dir {}", repos_dir.display()))?;
124 }
125 let ws = Self {
126 inner: Arc::new(WorkspaceInner {
127 kind: WorkspaceKind::Github,
128 workspace_dir,
129 stale_after_days,
130 state: RwLock::new(WorkspaceState::default()),
131 post_activate,
132 }),
133 };
134 ws.reconcile_inventory()?;
135 Ok(ws)
136 }
137
138 pub fn open_local(root: PathBuf, post_activate: Option<PostActivateHook>) -> Result<Self> {
146 if !root.is_dir() {
147 anyhow::bail!(
148 "local workspace root does not exist or is not a directory: {}",
149 root.display()
150 );
151 }
152 let canon_root = root
153 .canonicalize()
154 .with_context(|| format!("failed to canonicalize local root {}", root.display()))?;
155 let inv_dir = canon_root.join(".mcp-workspace");
158 if !inv_dir.is_dir() {
159 fs::create_dir_all(&inv_dir).with_context(|| {
160 format!("failed to create local-workspace dir {}", inv_dir.display())
161 })?;
162 }
163 let mut state = WorkspaceState::default();
164 let synthetic_name = synthesize_local_name(&canon_root);
165 state.active_repo_name = Some(synthetic_name);
166 state.active_repo_path = Some(canon_root.clone());
167 Ok(Self {
168 inner: Arc::new(WorkspaceInner {
169 kind: WorkspaceKind::Local,
170 workspace_dir: canon_root,
171 stale_after_days: u32::MAX, state: RwLock::new(state),
173 post_activate,
174 }),
175 })
176 }
177
178 pub fn kind(&self) -> WorkspaceKind {
179 self.inner.kind
180 }
181
182 pub fn workspace_dir(&self) -> &Path {
183 &self.inner.workspace_dir
184 }
185
186 pub fn repos_dir(&self) -> PathBuf {
187 self.inner.workspace_dir.join("repos")
188 }
189
190 fn inventory_path(&self) -> PathBuf {
191 match self.inner.kind {
192 WorkspaceKind::Github => self.inner.workspace_dir.join("inventory.json"),
193 WorkspaceKind::Local => self
194 .inner
195 .workspace_dir
196 .join(".mcp-workspace")
197 .join("inventory.json"),
198 }
199 }
200
201 pub fn active_repo_name(&self) -> Option<String> {
203 self.inner.state.read().unwrap().active_repo_name.clone()
204 }
205
206 pub fn active_repo_path(&self) -> Option<PathBuf> {
208 self.inner.state.read().unwrap().active_repo_path.clone()
209 }
210
211 fn load_inventory(&self) -> BTreeMap<String, InventoryEntry> {
216 let path = self.inventory_path();
217 let Ok(text) = fs::read_to_string(&path) else {
218 return BTreeMap::new();
219 };
220 serde_json::from_str(&text).unwrap_or_default()
221 }
222
223 fn save_inventory(&self, inv: &BTreeMap<String, InventoryEntry>) -> Result<()> {
224 let path = self.inventory_path();
225 let body = serde_json::to_string_pretty(inv).context("failed to serialise inventory")?;
226 fs::write(&path, body).with_context(|| format!("failed to write {}", path.display()))?;
227 Ok(())
228 }
229
230 fn reconcile_inventory(&self) -> Result<()> {
231 let mut inv = self.load_inventory();
232 let mut on_disk: Vec<String> = Vec::new();
233 if self.repos_dir().is_dir() {
234 for org_entry in fs::read_dir(self.repos_dir())? {
235 let Ok(org_entry) = org_entry else { continue };
236 if !org_entry.path().is_dir() {
237 continue;
238 }
239 let org = org_entry.file_name().to_string_lossy().into_owned();
240 if org.starts_with('.') {
241 continue;
242 }
243 for repo_entry in fs::read_dir(org_entry.path())? {
244 let Ok(repo_entry) = repo_entry else { continue };
245 if !repo_entry.path().is_dir() {
246 continue;
247 }
248 let repo = repo_entry.file_name().to_string_lossy().into_owned();
249 if repo.starts_with('.') {
250 continue;
251 }
252 let rname = format!("{org}/{repo}");
253 on_disk.push(rname.clone());
254 inv.entry(rname).or_insert_with(|| {
255 let mtime = repo_entry
256 .metadata()
257 .ok()
258 .and_then(|m| m.modified().ok())
259 .map(format_iso)
260 .unwrap_or_else(now_iso);
261 InventoryEntry {
262 cloned_at: mtime.clone(),
263 last_accessed: mtime,
264 access_count: 0,
265 stale: false,
266 last_built_sha: None,
267 }
268 });
269 }
270 }
271 }
272 for (rname, entry) in inv.iter_mut() {
273 if !on_disk.contains(rname) && !entry.stale {
274 entry.stale = true;
275 }
276 }
277 self.save_inventory(&inv)?;
278 Ok(())
279 }
280
281 fn bump_access(&self, name: &str, action: &str) {
282 let mut inv = self.load_inventory();
283 let now = now_iso();
284 let entry = inv
285 .entry(name.to_string())
286 .or_insert_with(|| InventoryEntry {
287 cloned_at: now.clone(),
288 last_accessed: now.clone(),
289 access_count: 0,
290 stale: false,
291 last_built_sha: None,
292 });
293 entry.last_accessed = now.clone();
294 entry.access_count += 1;
295 entry.stale = false;
296 if action == "cloned" || entry.cloned_at.is_empty() {
297 entry.cloned_at = now;
298 }
299 let _ = self.save_inventory(&inv);
300 }
301
302 fn mark_stale(&self, name: &str) {
303 let mut inv = self.load_inventory();
304 if let Some(entry) = inv.get_mut(name) {
305 entry.stale = true;
306 let _ = self.save_inventory(&inv);
307 }
308 }
309
310 fn sweep_stale(&self) -> Vec<String> {
311 if matches!(self.inner.kind, WorkspaceKind::Local) {
313 return Vec::new();
314 }
315 let mut inv = self.load_inventory();
316 let cutoff = SystemTime::now()
317 - std::time::Duration::from_secs(self.inner.stale_after_days as u64 * 86_400);
318 let active = self.active_repo_name();
319 let mut swept: Vec<String> = Vec::new();
320 for (rname, entry) in inv.iter_mut() {
321 if entry.stale {
322 continue;
323 }
324 if Some(rname.as_str()) == active.as_deref() {
325 continue;
326 }
327 let last = parse_iso(&entry.last_accessed).unwrap_or(SystemTime::UNIX_EPOCH);
328 if last >= cutoff {
329 continue;
330 }
331 let parts: Vec<&str> = rname.splitn(2, '/').collect();
332 if parts.len() != 2 {
333 continue;
334 }
335 let repo_path = self.repos_dir().join(parts[0]).join(parts[1]);
336 if repo_path.exists() {
337 let _ = fs::remove_dir_all(&repo_path);
338 }
339 entry.stale = true;
340 swept.push(rname.clone());
341 }
342 if !swept.is_empty() {
343 let _ = self.save_inventory(&inv);
344 self.prune_empty_org_dirs();
345 }
346 swept
347 }
348
349 fn prune_empty_org_dirs(&self) {
350 let Ok(entries) = fs::read_dir(self.repos_dir()) else {
351 return;
352 };
353 for entry in entries.flatten() {
354 let path = entry.path();
355 if !path.is_dir() {
356 continue;
357 }
358 if let Ok(children) = fs::read_dir(&path) {
359 let real: Vec<_> = children
360 .flatten()
361 .filter(|c| !c.file_name().to_string_lossy().starts_with('.'))
362 .collect();
363 if real.is_empty() {
364 let _ = fs::remove_dir_all(&path);
365 }
366 }
367 }
368 }
369
370 fn clone_or_update(&self, name: &str) -> Result<(String, PathBuf, String)> {
381 if matches!(self.inner.kind, WorkspaceKind::Local) {
382 let root = self
392 .inner
393 .state
394 .read()
395 .unwrap()
396 .active_repo_path
397 .clone()
398 .unwrap_or_else(|| self.inner.workspace_dir.clone());
399 let prev_sha = self.last_built_sha(name);
400 let fingerprint = fingerprint_dir(&root);
401 let action = match prev_sha {
402 Some(p) if p == fingerprint => "current",
403 None => "cloned", Some(_) => "updated",
405 };
406 return Ok((action.to_string(), root, fingerprint));
407 }
408 let parts: Vec<&str> = name.splitn(2, '/').collect();
409 let repo_path = self.repos_dir().join(parts[0]).join(parts[1]);
410 if !repo_path.exists() {
411 fs::create_dir_all(repo_path.parent().unwrap()).ok();
412 let url = format!("https://github.com/{name}.git");
413 let out = Command::new("git")
414 .args(["clone", "--depth", "1", &url, repo_path.to_str().unwrap()])
415 .output()
416 .context("failed to spawn `git clone`")?;
417 if !out.status.success() {
418 anyhow::bail!(
419 "git clone failed: {}",
420 String::from_utf8_lossy(&out.stderr).trim()
421 );
422 }
423 let sha = git_rev_parse(&repo_path, "HEAD")?;
424 return Ok(("cloned".to_string(), repo_path, sha));
425 }
426
427 Command::new("git")
429 .args(["fetch", "--depth", "1", "origin"])
430 .current_dir(&repo_path)
431 .output()
432 .context("git fetch failed")?;
433 let local = git_rev_parse(&repo_path, "HEAD")?;
434 let remote = git_rev_parse(&repo_path, "FETCH_HEAD")?;
435 if local != remote {
436 Command::new("git")
437 .args(["reset", "--hard", "FETCH_HEAD"])
438 .current_dir(&repo_path)
439 .output()
440 .context("git reset failed")?;
441 let sha = git_rev_parse(&repo_path, "HEAD")?;
442 return Ok(("updated".to_string(), repo_path, sha));
443 }
444 Ok(("current".to_string(), repo_path, local))
445 }
446
447 fn activate(&self, name: &str, force_rebuild: bool) -> Result<String> {
460 let prev_built_sha = self.last_built_sha(name);
461 let (action, repo_path, head_sha) = self.clone_or_update(name)?;
462 self.bump_access(name, &action);
463 {
464 let mut state = self.inner.state.write().unwrap();
465 state.active_repo_name = Some(name.to_string());
466 state.active_repo_path = Some(repo_path.clone());
467 }
468
469 let already_built = !force_rebuild
470 && action == "current"
471 && prev_built_sha.as_deref() == Some(head_sha.as_str());
472 let mut hook_skipped = false;
473 let hook_ok = if already_built {
474 hook_skipped = true;
475 true
476 } else if let Some(hook) = &self.inner.post_activate {
477 match hook(&repo_path, name) {
478 Ok(()) => true,
479 Err(e) => {
480 tracing::warn!("post-activate hook for {name} failed: {e}");
481 false
482 }
483 }
484 } else {
485 true
488 };
489 if hook_ok {
490 self.record_built_sha(name, &head_sha);
491 }
492 let verb = match action.as_str() {
493 "cloned" => "Cloned",
494 "updated" => "Updated",
495 "current" => "Activated (already up to date)",
496 other => other,
497 };
498 let suffix = if hook_skipped {
499 " [build skipped: HEAD matches last-built SHA]"
500 } else {
501 ""
502 };
503 Ok(format!(
504 "{verb} '{name}' at {}.{suffix}",
505 repo_path.display()
506 ))
507 }
508
509 fn record_built_sha(&self, name: &str, sha: &str) {
510 let mut inv = self.load_inventory();
511 if let Some(entry) = inv.get_mut(name) {
512 entry.last_built_sha = Some(sha.to_string());
513 let _ = self.save_inventory(&inv);
514 }
515 }
516
517 pub fn last_built_sha(&self, name: &str) -> Option<String> {
522 self.load_inventory()
523 .get(name)
524 .and_then(|e| e.last_built_sha.clone())
525 }
526
527 fn delete(&self, name: &str) -> Result<String> {
528 let parts: Vec<&str> = name.splitn(2, '/').collect();
529 if parts.len() != 2 {
530 anyhow::bail!("Invalid repo name");
531 }
532 let repo_path = self.repos_dir().join(parts[0]).join(parts[1]);
533 let mut deleted = Vec::new();
534 if repo_path.exists() {
535 fs::remove_dir_all(&repo_path).context("failed to remove repo dir")?;
536 deleted.push("repo");
537 }
538 self.mark_stale(name);
539 self.prune_empty_org_dirs();
540 if deleted.is_empty() {
541 return Ok(format!("Nothing to delete — '{name}' not found."));
542 }
543 let mut state = self.inner.state.write().unwrap();
544 if state.active_repo_name.as_deref() == Some(name) {
545 state.active_repo_name = None;
546 state.active_repo_path = None;
547 return Ok(format!(
548 "Deleted {}. Active repo cleared.",
549 deleted.join(", ")
550 ));
551 }
552 Ok(format!("Deleted {}.", deleted.join(", ")))
553 }
554
555 fn list(&self) -> String {
556 let inv = self.load_inventory();
557 if inv.is_empty() {
558 return "No repos cloned yet. Call repo_management('org/repo') to clone one."
559 .to_string();
560 }
561 let active = self.active_repo_name();
562 let mut live: Vec<String> = Vec::new();
563 let mut stale_lines: Vec<String> = Vec::new();
564 for (rname, entry) in &inv {
565 let marker = if Some(rname.as_str()) == active.as_deref() {
566 " [active]"
567 } else {
568 ""
569 };
570 let access = format!(
571 "{} access{}, last {}",
572 entry.access_count,
573 if entry.access_count == 1 { "" } else { "es" },
574 relative_time(&entry.last_accessed)
575 );
576 if entry.stale {
577 stale_lines.push(format!(
578 " {rname} [STALE — re-fetch with repo_management('{rname}')] ({access})"
579 ));
580 } else {
581 live.push(format!(" {rname}{marker} ({access})"));
582 }
583 }
584 let mut out = String::new();
585 if !live.is_empty() {
586 out.push_str(&format!(
587 "{} live repo(s):\n{}",
588 live.len(),
589 live.join("\n")
590 ));
591 }
592 if !stale_lines.is_empty() {
593 if !out.is_empty() {
594 out.push_str("\n\n");
595 }
596 out.push_str(&format!(
597 "{} stale repo(s):\n{}",
598 stale_lines.len(),
599 stale_lines.join("\n")
600 ));
601 }
602 out
603 }
604
605 pub fn repo_management(
619 &self,
620 name: Option<&str>,
621 delete: bool,
622 update: bool,
623 force_rebuild: bool,
624 ) -> String {
625 if matches!(self.inner.kind, WorkspaceKind::Local) {
627 if name.is_some() {
628 return "Local-workspace mode does not accept a repo name. Use `set_root_dir(path)` \
629 to switch the active root, or pass `update=true` / `force_rebuild=true` \
630 to rebuild against the current root."
631 .to_string();
632 }
633 if delete {
634 return "Local-workspace mode does not support `delete`. The root is owned by the \
635 operator; remove it manually."
636 .to_string();
637 }
638 let active = match self.active_repo_name() {
639 Some(n) => n,
640 None => return "No active local root.".to_string(),
641 };
642 let _ = update; return self
649 .activate(&active, force_rebuild)
650 .unwrap_or_else(|e| format!("rebuild failed: {e}"));
651 }
652
653 let swept = self.sweep_stale();
654 let prefix = if swept.is_empty() {
655 String::new()
656 } else {
657 format!(
658 "[Swept {} idle repo(s) (>{}d): {}]\n\n",
659 swept.len(),
660 self.inner.stale_after_days,
661 swept.join(", ")
662 )
663 };
664
665 if name.is_none() && !update {
666 return prefix + &self.list();
667 }
668
669 if update {
670 let Some(active) = self.active_repo_name() else {
671 return prefix + "No active repository. Call repo_management('org/repo') first.";
672 };
673 return prefix
674 + &self
675 .activate(&active, force_rebuild)
676 .unwrap_or_else(|e| format!("update failed: {e}"));
677 }
678
679 let Some(name) = name else {
680 return prefix + "Provide a repo name (e.g. repo_management('org/repo')).";
681 };
682 if let Err(e) = validate_repo_name(name) {
683 return prefix + &e.to_string();
684 }
685 if delete {
686 return prefix
687 + &self
688 .delete(name)
689 .unwrap_or_else(|e| format!("delete failed: {e}"));
690 }
691 prefix
692 + &self
693 .activate(name, force_rebuild)
694 .unwrap_or_else(|e| format!("activate failed: {e}"))
695 }
696
697 pub fn set_root_dir(&self, new_root: &Path) -> String {
700 if !matches!(self.inner.kind, WorkspaceKind::Local) {
701 return "set_root_dir is only valid in local-workspace mode.".to_string();
702 }
703 if !new_root.is_dir() {
704 return format!(
705 "Path does not exist or is not a directory: {}",
706 new_root.display()
707 );
708 }
709 let canon = match new_root.canonicalize() {
710 Ok(p) => p,
711 Err(e) => return format!("canonicalize failed: {e}"),
712 };
713 let synthetic = synthesize_local_name(&canon);
714 {
715 let mut state = self.inner.state.write().unwrap();
716 state.active_repo_name = Some(synthetic.clone());
717 state.active_repo_path = Some(canon.clone());
718 }
719 self.activate(&synthetic, false)
723 .unwrap_or_else(|e| format!("set_root_dir failed: {e}"))
724 }
725}
726
727fn synthesize_local_name(root: &Path) -> String {
731 let name = root
732 .file_name()
733 .map(|s| s.to_string_lossy().into_owned())
734 .unwrap_or_else(|| "local".to_string());
735 format!("local/{name}")
736}
737
738fn fingerprint_dir(root: &Path) -> String {
743 use std::hash::{Hash, Hasher};
744 let mut hasher = std::collections::hash_map::DefaultHasher::new();
745 let walker = ignore::WalkBuilder::new(root)
746 .standard_filters(true)
747 .hidden(true)
748 .git_ignore(true)
749 .build();
750 for entry in walker.flatten() {
751 if !entry.path().is_file() {
752 continue;
753 }
754 let Ok(meta) = entry.metadata() else { continue };
755 let mtime = meta
756 .modified()
757 .ok()
758 .and_then(|t| t.duration_since(SystemTime::UNIX_EPOCH).ok())
759 .map(|d| d.as_secs())
760 .unwrap_or(0);
761 entry.path().to_string_lossy().hash(&mut hasher);
762 mtime.hash(&mut hasher);
763 meta.len().hash(&mut hasher);
764 }
765 format!("local-{:016x}", hasher.finish())
766}
767
768fn git_rev_parse(repo_path: &Path, refspec: &str) -> Result<String> {
769 let out = Command::new("git")
770 .args(["rev-parse", refspec])
771 .current_dir(repo_path)
772 .output()
773 .context("git rev-parse failed")?;
774 Ok(String::from_utf8_lossy(&out.stdout).trim().to_string())
775}
776
777fn now_iso() -> String {
778 format_iso(SystemTime::now())
779}
780
781fn format_iso(t: SystemTime) -> String {
782 let secs = t
783 .duration_since(SystemTime::UNIX_EPOCH)
784 .map(|d| d.as_secs())
785 .unwrap_or(0);
786 chrono_lite::format_secs(secs)
788}
789
790fn parse_iso(s: &str) -> Option<SystemTime> {
791 let secs = chrono_lite::parse_secs(s)?;
792 SystemTime::UNIX_EPOCH.checked_add(std::time::Duration::from_secs(secs))
793}
794
795fn relative_time(iso: &str) -> String {
796 let Some(t) = parse_iso(iso) else {
797 return "unknown".to_string();
798 };
799 let now = SystemTime::now();
800 let delta = now.duration_since(t).unwrap_or_default().as_secs();
801 if delta < 3600 {
802 "just now".to_string()
803 } else if delta < 86_400 {
804 format!("{}h ago", delta / 3600)
805 } else {
806 format!("{}d ago", delta / 86_400)
807 }
808}
809
810mod chrono_lite {
813 pub fn format_secs(secs: u64) -> String {
814 let days = (secs / 86_400) as i64;
816 let time = secs % 86_400;
817 let (y, mo, d) = days_to_civil(days + 719_468);
818 let h = time / 3600;
819 let m = (time / 60) % 60;
820 let s = time % 60;
821 format!("{y:04}-{mo:02}-{d:02}T{h:02}:{m:02}:{s:02}")
822 }
823
824 pub fn parse_secs(s: &str) -> Option<u64> {
825 let bytes = s.as_bytes();
828 if bytes.len() < 19 {
829 return None;
830 }
831 let y: i64 = s.get(0..4)?.parse().ok()?;
832 let mo: u32 = s.get(5..7)?.parse().ok()?;
833 let d: u32 = s.get(8..10)?.parse().ok()?;
834 let h: u64 = s.get(11..13)?.parse().ok()?;
835 let m: u64 = s.get(14..16)?.parse().ok()?;
836 let sc: u64 = s.get(17..19)?.parse().ok()?;
837 let days = civil_to_days(y, mo, d) - 719_468;
838 Some((days * 86_400) as u64 + h * 3600 + m * 60 + sc)
839 }
840
841 fn days_to_civil(z: i64) -> (i64, u32, u32) {
842 let era = if z >= 0 { z } else { z - 146_096 } / 146_097;
843 let doe = (z - era * 146_097) as u64;
844 let yoe = (doe - doe / 1460 + doe / 36_524 - doe / 146_096) / 365;
845 let y = (yoe as i64) + era * 400;
846 let doy = doe - (365 * yoe + yoe / 4 - yoe / 100);
847 let mp = (5 * doy + 2) / 153;
848 let d = (doy - (153 * mp + 2) / 5 + 1) as u32;
849 let m = (if mp < 10 { mp + 3 } else { mp - 9 }) as u32;
850 let y = if m <= 2 { y + 1 } else { y };
851 (y, m, d)
852 }
853
854 fn civil_to_days(y: i64, m: u32, d: u32) -> i64 {
855 let y = if m <= 2 { y - 1 } else { y };
856 let era = if y >= 0 { y } else { y - 399 } / 400;
857 let yoe = (y - era * 400) as u64;
858 let doy = (153 * (if m > 2 { m - 3 } else { m + 9 }) as u64 + 2) / 5 + d as u64 - 1;
859 let doe = yoe * 365 + yoe / 4 - yoe / 100 + doy;
860 era * 146_097 + doe as i64
861 }
862}
863
864#[allow(dead_code)]
866fn _json_keepalive() {
867 let _ = json!({});
868}
869
870#[cfg(test)]
871mod tests {
872 use super::*;
873
874 #[test]
875 fn validates_repo_names() {
876 assert!(validate_repo_name("pydata/xarray").is_ok());
877 assert!(validate_repo_name("my-org.x/repo_v2").is_ok());
878 assert!(validate_repo_name("xarray").is_err());
879 assert!(validate_repo_name("a/b/c").is_err());
880 assert!(validate_repo_name("foo/bar; rm -rf").is_err());
881 }
882
883 #[test]
884 fn open_creates_layout() {
885 let dir = tempfile::tempdir().unwrap();
886 let ws = Workspace::open(dir.path().to_path_buf(), 7, None).unwrap();
887 assert!(ws.repos_dir().is_dir());
888 }
889
890 #[test]
891 fn empty_list() {
892 let dir = tempfile::tempdir().unwrap();
893 let ws = Workspace::open(dir.path().to_path_buf(), 7, None).unwrap();
894 let out = ws.repo_management(None, false, false, false);
895 assert!(out.contains("No repos cloned yet"));
896 }
897
898 #[test]
899 fn invalid_repo_name_rejected() {
900 let dir = tempfile::tempdir().unwrap();
901 let ws = Workspace::open(dir.path().to_path_buf(), 7, None).unwrap();
902 let out = ws.repo_management(Some("bad name with spaces"), false, false, false);
903 assert!(out.contains("Invalid repo name"));
904 }
905
906 #[test]
907 fn delete_unknown() {
908 let dir = tempfile::tempdir().unwrap();
909 let ws = Workspace::open(dir.path().to_path_buf(), 7, None).unwrap();
910 let out = ws.repo_management(Some("nope/none"), true, false, false);
911 assert!(out.contains("Nothing to delete"));
912 }
913
914 #[test]
915 fn iso_round_trip() {
916 let now = SystemTime::now()
917 .duration_since(SystemTime::UNIX_EPOCH)
918 .unwrap()
919 .as_secs();
920 let s = chrono_lite::format_secs(now);
921 let back = chrono_lite::parse_secs(&s).unwrap();
922 assert_eq!(now, back);
923 }
924
925 #[test]
926 fn last_built_sha_round_trip() {
927 let dir = tempfile::tempdir().unwrap();
928 let ws = Workspace::open(dir.path().to_path_buf(), 7, None).unwrap();
929 ws.bump_access("acme/widgets", "cloned");
931 assert_eq!(ws.last_built_sha("acme/widgets"), None);
932 ws.record_built_sha("acme/widgets", "abc1234deadbeef");
933 assert_eq!(
934 ws.last_built_sha("acme/widgets").as_deref(),
935 Some("abc1234deadbeef")
936 );
937 let ws2 = Workspace::open(dir.path().to_path_buf(), 7, None).unwrap();
939 assert_eq!(
940 ws2.last_built_sha("acme/widgets").as_deref(),
941 Some("abc1234deadbeef")
942 );
943 }
944
945 #[test]
946 fn inventory_loads_legacy_entries_without_sha_field() {
947 let dir = tempfile::tempdir().unwrap();
948 let ws = Workspace::open(dir.path().to_path_buf(), 7, None).unwrap();
949 let legacy = r#"{
951 "old/repo": {
952 "cloned_at": "2024-01-01T00:00:00",
953 "last_accessed": "2024-01-01T00:00:00",
954 "access_count": 5,
955 "stale": false
956 }
957 }"#;
958 std::fs::write(dir.path().join("inventory.json"), legacy).unwrap();
959 let ws2 = Workspace::open(dir.path().to_path_buf(), 7, None).unwrap();
961 assert_eq!(ws2.last_built_sha("old/repo"), None);
962 let _ = ws;
963 }
964
965 #[test]
966 fn auto_rebuild_gate_skips_when_sha_matches() {
967 use std::sync::atomic::{AtomicUsize, Ordering};
968 let dir = tempfile::tempdir().unwrap();
969 let calls = Arc::new(AtomicUsize::new(0));
970 let calls_h = calls.clone();
971 let hook: PostActivateHook = Arc::new(move |_path, _name| {
972 calls_h.fetch_add(1, Ordering::SeqCst);
973 Ok(())
974 });
975 let ws = Workspace::open(dir.path().to_path_buf(), 7, Some(hook)).unwrap();
981 ws.bump_access("acme/widgets", "cloned");
983 ws.record_built_sha("acme/widgets", "sha_one");
984 assert_eq!(
985 ws.last_built_sha("acme/widgets").as_deref(),
986 Some("sha_one")
987 );
988 ws.record_built_sha("acme/widgets", "sha_one");
991 assert_eq!(
992 ws.last_built_sha("acme/widgets").as_deref(),
993 Some("sha_one")
994 );
995 assert_eq!(calls.load(Ordering::SeqCst), 0);
998 }
999
1000 #[test]
1001 fn local_workspace_binds_root_immediately() {
1002 let dir = tempfile::tempdir().unwrap();
1003 let ws = Workspace::open_local(dir.path().to_path_buf(), None).unwrap();
1004 assert_eq!(ws.kind(), WorkspaceKind::Local);
1005 assert!(ws.active_repo_path().is_some());
1006 assert!(ws.active_repo_name().unwrap().starts_with("local/"));
1007 }
1008
1009 #[test]
1010 fn local_workspace_rejects_github_ops() {
1011 let dir = tempfile::tempdir().unwrap();
1012 let ws = Workspace::open_local(dir.path().to_path_buf(), None).unwrap();
1013 let out = ws.repo_management(Some("acme/widgets"), false, false, false);
1014 assert!(out.contains("does not accept a repo name"));
1015 let out = ws.repo_management(None, true, false, false);
1016 assert!(out.contains("does not support `delete`"));
1017 }
1018
1019 #[test]
1020 fn local_workspace_update_rebuilds() {
1021 use std::sync::atomic::{AtomicUsize, Ordering};
1022 let dir = tempfile::tempdir().unwrap();
1023 std::fs::write(dir.path().join("x.txt"), b"hi").unwrap();
1025 let calls = Arc::new(AtomicUsize::new(0));
1026 let calls_h = calls.clone();
1027 let hook: PostActivateHook = Arc::new(move |_p, _n| {
1028 calls_h.fetch_add(1, Ordering::SeqCst);
1029 Ok(())
1030 });
1031 let ws = Workspace::open_local(dir.path().to_path_buf(), Some(hook)).unwrap();
1032 let _ = ws.repo_management(None, false, true, false);
1034 assert_eq!(calls.load(Ordering::SeqCst), 1);
1035 let out = ws.repo_management(None, false, true, false);
1037 assert_eq!(
1038 calls.load(Ordering::SeqCst),
1039 1,
1040 "auto-rebuild gate must skip"
1041 );
1042 assert!(out.contains("build skipped"));
1043 }
1044
1045 #[test]
1046 fn set_root_dir_only_in_local_mode() {
1047 let dir = tempfile::tempdir().unwrap();
1048 let ws = Workspace::open(dir.path().to_path_buf(), 7, None).unwrap();
1049 let out = ws.set_root_dir(dir.path());
1050 assert!(out.contains("only valid in local-workspace"));
1051 }
1052
1053 #[test]
1054 fn update_with_no_active_repo() {
1055 let dir = tempfile::tempdir().unwrap();
1056 let ws = Workspace::open(dir.path().to_path_buf(), 7, None).unwrap();
1057 let out = ws.repo_management(None, false, true, false);
1058 assert!(out.contains("No active repository"));
1059 }
1060
1061 #[test]
1062 fn set_root_dir_updates_active_path() {
1063 let dir = tempfile::tempdir().unwrap();
1064 let child = dir.path().join("child");
1065 std::fs::create_dir_all(&child).unwrap();
1066 let ws = Workspace::open_local(dir.path().to_path_buf(), None).unwrap();
1067 let _ = ws.set_root_dir(&child);
1068 assert_eq!(
1069 ws.active_repo_path().unwrap(),
1070 child.canonicalize().unwrap(),
1071 "set_root_dir didn't update active_repo_path"
1072 );
1073 }
1074
1075 #[test]
1076 fn set_root_dir_post_activate_fires_against_new_root() {
1077 let dir = tempfile::tempdir().unwrap();
1078 let child = dir.path().join("child");
1079 std::fs::create_dir_all(&child).unwrap();
1080 std::fs::write(child.join("a.txt"), b"hi").unwrap();
1081 let seen_path: Arc<std::sync::Mutex<Option<PathBuf>>> = Arc::new(Default::default());
1082 let seen = seen_path.clone();
1083 let hook: PostActivateHook = Arc::new(move |p, _n| {
1084 *seen.lock().unwrap() = Some(p.to_path_buf());
1085 Ok(())
1086 });
1087 let ws = Workspace::open_local(dir.path().to_path_buf(), Some(hook)).unwrap();
1088 let _ = ws.set_root_dir(&child);
1089 assert_eq!(
1090 seen_path.lock().unwrap().clone().unwrap(),
1091 child.canonicalize().unwrap(),
1092 "post_activate hook saw the wrong root after set_root_dir"
1093 );
1094 }
1095}