1use std::collections::HashSet;
7use std::path::{Path, PathBuf};
8
9use git2::{Cred, RemoteCallbacks};
10use octocrab::Octocrab;
11use tracing::{debug, info, warn};
12use vibe_graph_core::{
13 detect_references, GitChangeSnapshot, SourceCodeGraph, SourceCodeGraphBuilder,
14};
15use vibe_graph_git::get_git_changes;
16
17use crate::config::Config;
18use crate::error::{OpsError, OpsResult};
19use crate::project::{Project, ProjectSource, Repository};
20use crate::requests::*;
21use crate::responses::*;
22use crate::scan::scan_directory;
23use crate::store::Store;
24use crate::workspace::{SyncSource, WorkspaceInfo, WorkspaceKind};
25
26#[derive(Debug, Clone)]
31pub struct OpsContext {
32 pub config: Config,
34}
35
36impl OpsContext {
37 pub fn new(config: Config) -> Self {
39 Self { config }
40 }
41
42 pub fn default_config() -> OpsResult<Self> {
44 Ok(Self::new(Config::load()?))
45 }
46
47 pub async fn sync(&self, request: SyncRequest) -> OpsResult<SyncResponse> {
56 match &request.source {
57 SyncSource::Local { path } => self.sync_local(path, &request).await,
58 SyncSource::GitHubOrg { org } => self.sync_github_org(org, &request).await,
59 SyncSource::GitHubRepo { owner, repo } => {
60 self.sync_github_repo(owner, repo, &request).await
61 }
62 }
63 }
64
65 async fn sync_local(&self, path: &Path, request: &SyncRequest) -> OpsResult<SyncResponse> {
67 let workspace = WorkspaceInfo::detect(path)?;
68 let store = Store::new(&workspace.root);
69
70 if !request.force && store.exists() && !request.no_save {
72 if let Some(project) = store.load()? {
73 info!(name = %project.name, "Using cached project from .self");
74 return Ok(SyncResponse {
75 project,
76 workspace: workspace.clone(),
77 path: workspace.root.clone(),
78 snapshot_created: None,
79 remote: store.load_manifest()?.and_then(|m| m.remote),
80 });
81 }
82 }
83
84 let mut project = match &workspace.kind {
86 WorkspaceKind::SingleRepo => self.sync_single_repo(&workspace)?,
87 WorkspaceKind::MultiRepo { .. } => self.sync_multi_repo(&workspace)?,
88 WorkspaceKind::PlainDirectory => self.sync_single_repo(&workspace)?,
89 };
90
91 let max_size = self.config.max_content_size_kb * 1024;
93 project.expand_content(|source| {
94 source.size.map(|s| s < max_size).unwrap_or(false) && source.is_text()
95 })?;
96
97 let remote = if workspace.is_single_repo() {
99 detect_git_remote(&workspace.root)
100 } else {
101 None
102 };
103
104 let mut snapshot_path = None;
106 if !request.no_save {
107 store.save(&project, &workspace.kind, remote.clone())?;
108
109 if request.snapshot {
110 snapshot_path = Some(store.snapshot(&project)?);
111 }
112 }
113
114 Ok(SyncResponse {
115 project,
116 workspace: workspace.clone(),
117 path: workspace.root.clone(),
118 snapshot_created: snapshot_path,
119 remote,
120 })
121 }
122
123 fn sync_single_repo(&self, workspace: &WorkspaceInfo) -> OpsResult<Project> {
124 let root = &workspace.root;
125 let name = &workspace.name;
126
127 let source = ProjectSource::LocalPath { path: root.clone() };
128
129 let mut project = Project {
130 name: name.clone(),
131 source,
132 repositories: vec![],
133 };
134
135 let mut repo = Repository::new(name, root.display().to_string(), root.clone());
136 scan_directory(&mut repo, root)?;
137 project.repositories.push(repo);
138
139 Ok(project)
140 }
141
142 fn sync_multi_repo(&self, workspace: &WorkspaceInfo) -> OpsResult<Project> {
143 let mut project = Project {
144 name: workspace.name.clone(),
145 source: ProjectSource::LocalPaths {
146 paths: workspace.repo_paths.clone(),
147 },
148 repositories: vec![],
149 };
150
151 for repo_path in &workspace.repo_paths {
152 let repo_name = repo_path
153 .file_name()
154 .map(|s| s.to_string_lossy().to_string())
155 .unwrap_or_else(|| "repo".to_string());
156
157 let mut repo = Repository::new(
158 &repo_name,
159 repo_path.display().to_string(),
160 repo_path.clone(),
161 );
162 scan_directory(&mut repo, repo_path)?;
163 project.repositories.push(repo);
164 }
165
166 Ok(project)
167 }
168
169 async fn sync_github_org(&self, org: &str, request: &SyncRequest) -> OpsResult<SyncResponse> {
171 self.config.validate_github()?;
172
173 let username = self.config.github_username.clone().unwrap();
174 let token = self.config.github_token.clone().unwrap();
175
176 let octocrab = Octocrab::builder()
177 .personal_token(token.clone())
178 .build()
179 .map_err(|e| OpsError::GitHubApiError {
180 resource: org.to_string(),
181 message: e.to_string(),
182 })?;
183
184 info!(org = %org, "Fetching organization repositories");
185
186 let mut all_repos = Vec::new();
188 let mut page = 1u32;
189
190 loop {
191 let repos = octocrab
192 .orgs(org)
193 .list_repos()
194 .per_page(100)
195 .page(page)
196 .send()
197 .await
198 .map_err(|e| OpsError::GitHubApiError {
199 resource: format!("{}/repos", org),
200 message: e.to_string(),
201 })?;
202
203 if repos.items.is_empty() {
204 break;
205 }
206
207 all_repos.extend(repos.items);
208 page += 1;
209
210 if page > 10 {
211 warn!("Truncated at 1000 repositories");
212 break;
213 }
214 }
215
216 let org_dir = if request.use_cache {
218 self.config.org_cache_dir(org)
219 } else {
220 PathBuf::from(org)
221 };
222
223 std::fs::create_dir_all(&org_dir)?;
224
225 let mut project = Project {
226 name: org.to_string(),
227 source: ProjectSource::GitHubOrg {
228 organization: org.to_string(),
229 },
230 repositories: vec![],
231 };
232
233 for repo in &all_repos {
234 let repo_name = &repo.name;
235
236 if request.ignore.iter().any(|s| s == repo_name) {
238 debug!(repo = %repo_name, "Skipping ignored repository");
239 continue;
240 }
241
242 let clone_url = repo
243 .clone_url
244 .as_ref()
245 .map(|u| u.to_string())
246 .unwrap_or_else(|| format!("https://github.com/{}/{}.git", org, repo_name));
247
248 let repo_path = org_dir.join(repo_name);
249
250 if needs_clone(&repo_path) {
252 if repo_path.exists() {
253 std::fs::remove_dir_all(&repo_path)?;
254 }
255
256 if let Err(e) = clone_repository(&clone_url, &repo_path, &username, &token) {
257 warn!(repo = %repo_name, error = %e, "Failed to clone repository");
258 continue;
259 }
260 }
261
262 let mut repository = Repository::new(repo_name, &clone_url, repo_path.clone());
264 scan_directory(&mut repository, &repo_path)?;
265 project.repositories.push(repository);
266 }
267
268 let max_size = self.config.max_content_size_kb * 1024;
270 project.expand_content(|source| {
271 source.size.map(|s| s < max_size).unwrap_or(false) && source.is_text()
272 })?;
273
274 let workspace = WorkspaceInfo {
276 root: org_dir.clone(),
277 kind: WorkspaceKind::MultiRepo {
278 repo_count: project.repositories.len(),
279 },
280 repo_paths: project
281 .repositories
282 .iter()
283 .map(|r| r.local_path.clone())
284 .collect(),
285 name: org.to_string(),
286 };
287
288 if !request.no_save {
290 let store = Store::new(&org_dir);
291 store.save(&project, &workspace.kind, None)?;
292 }
293
294 Ok(SyncResponse {
295 project,
296 workspace,
297 path: org_dir,
298 snapshot_created: None,
299 remote: Some(format!("https://github.com/{}", org)),
300 })
301 }
302
303 async fn sync_github_repo(
305 &self,
306 owner: &str,
307 repo_name: &str,
308 request: &SyncRequest,
309 ) -> OpsResult<SyncResponse> {
310 self.config.validate_github()?;
311
312 let username = self.config.github_username.clone().unwrap();
313 let token = self.config.github_token.clone().unwrap();
314
315 let repo_path = if request.use_cache {
317 let cache_dir = self.config.org_cache_dir(owner);
318 std::fs::create_dir_all(&cache_dir)?;
319 cache_dir.join(repo_name)
320 } else {
321 PathBuf::from(repo_name)
322 };
323
324 let clone_url = format!("https://github.com/{}/{}.git", owner, repo_name);
325
326 if needs_clone(&repo_path) {
328 if repo_path.exists() {
329 std::fs::remove_dir_all(&repo_path)?;
330 }
331 clone_repository(&clone_url, &repo_path, &username, &token)?;
332 }
333
334 let mut project = Project {
336 name: repo_name.to_string(),
337 source: ProjectSource::GitHubRepo {
338 owner: owner.to_string(),
339 repo: repo_name.to_string(),
340 },
341 repositories: vec![],
342 };
343
344 let mut repository = Repository::new(repo_name, &clone_url, repo_path.clone());
345 scan_directory(&mut repository, &repo_path)?;
346 project.repositories.push(repository);
347
348 let max_size = self.config.max_content_size_kb * 1024;
350 project.expand_content(|source| {
351 source.size.map(|s| s < max_size).unwrap_or(false) && source.is_text()
352 })?;
353
354 let workspace = WorkspaceInfo {
356 root: repo_path.clone(),
357 kind: WorkspaceKind::SingleRepo,
358 repo_paths: vec![repo_path.clone()],
359 name: repo_name.to_string(),
360 };
361
362 if !request.no_save {
364 let store = Store::new(&repo_path);
365 store.save(
366 &project,
367 &workspace.kind,
368 Some(format!("https://github.com/{}/{}", owner, repo_name)),
369 )?;
370 }
371
372 Ok(SyncResponse {
373 project,
374 workspace,
375 path: repo_path,
376 snapshot_created: None,
377 remote: Some(format!("https://github.com/{}/{}", owner, repo_name)),
378 })
379 }
380
381 pub async fn graph(&self, request: GraphRequest) -> OpsResult<GraphResponse> {
387 let path = request
388 .path
389 .canonicalize()
390 .unwrap_or_else(|_| request.path.clone());
391 let store = Store::new(&path);
392
393 if !store.exists() {
394 return Err(OpsError::StoreNotFound { path });
395 }
396
397 if !request.force {
399 if let Some(graph) = store.load_graph()? {
400 return Ok(GraphResponse {
401 graph,
402 saved_path: store.self_dir().join("graph.json"),
403 output_path: request.output,
404 from_cache: true,
405 });
406 }
407 }
408
409 let project = store.load()?.ok_or(OpsError::ProjectNotFound)?;
411
412 let graph = self.build_source_graph(&project)?;
413
414 let saved_path = store.save_graph(&graph)?;
416
417 if let Some(ref output_path) = request.output {
419 let json = serde_json::to_string_pretty(&graph)?;
420 std::fs::write(output_path, &json)?;
421 }
422
423 Ok(GraphResponse {
424 graph,
425 saved_path,
426 output_path: request.output,
427 from_cache: false,
428 })
429 }
430
431 pub fn build_source_graph(&self, project: &Project) -> OpsResult<SourceCodeGraph> {
433 let mut builder = SourceCodeGraphBuilder::new()
434 .with_metadata("name", &project.name)
435 .with_metadata("type", "source_code_graph");
436
437 let mut all_dirs: HashSet<PathBuf> = HashSet::new();
439
440 let workspace_root = find_workspace_root(&project.repositories);
442 if let Some(ref root) = workspace_root {
443 all_dirs.insert(root.clone());
444 }
445
446 for repo in &project.repositories {
448 all_dirs.insert(repo.local_path.clone());
449
450 if let Some(ref ws_root) = workspace_root {
451 let mut current = repo.local_path.parent();
452 while let Some(dir_path) = current {
453 if dir_path == ws_root.as_path() {
454 break;
455 }
456 all_dirs.insert(dir_path.to_path_buf());
457 current = dir_path.parent();
458 }
459 }
460
461 for source in &repo.sources {
462 let mut current = source.path.parent();
463 while let Some(dir_path) = current {
464 all_dirs.insert(dir_path.to_path_buf());
465 if dir_path == repo.local_path || dir_path.parent().is_none() {
466 break;
467 }
468 current = dir_path.parent();
469 }
470 }
471 }
472
473 for dir_path in &all_dirs {
475 builder.add_directory(dir_path);
476 }
477
478 for repo in &project.repositories {
480 for source in &repo.sources {
481 builder.add_file(&source.path, &source.relative_path);
482 }
483 }
484
485 for repo in &project.repositories {
487 for source in &repo.sources {
488 if let Some(parent_dir) = source.path.parent() {
489 builder.add_hierarchy_edge(parent_dir, &source.path);
490 }
491 }
492 }
493
494 for dir_path in &all_dirs {
496 if let Some(parent_dir) = dir_path.parent() {
497 if all_dirs.contains(parent_dir) || parent_dir.exists() {
498 builder.add_hierarchy_edge(parent_dir, dir_path);
499 }
500 }
501 }
502
503 let max_size = self.config.max_content_size_kb * 1024;
505
506 for repo in &project.repositories {
507 for source in &repo.sources {
508 if !source.is_text() || source.size.map(|s| s > max_size).unwrap_or(true) {
509 continue;
510 }
511
512 let content = match &source.content {
513 Some(c) => c.clone(),
514 None => match std::fs::read_to_string(&source.path) {
515 Ok(c) => c,
516 Err(_) => continue,
517 },
518 };
519
520 let refs = detect_references(&content, &source.path);
521
522 for reference in refs {
523 if let Some(source_id) = builder.get_node_id(&reference.source_path) {
524 if let Some(target_id) =
525 builder.find_node_by_path_suffix(&reference.target_route)
526 {
527 if source_id != target_id {
528 builder.add_edge(source_id, target_id, reference.kind);
529 }
530 }
531 }
532 }
533 }
534 }
535
536 info!(
537 nodes = builder.node_count(),
538 edges = builder.edge_count(),
539 "Built SourceCodeGraph"
540 );
541
542 Ok(builder.build())
543 }
544
545 pub async fn status(&self, request: StatusRequest) -> OpsResult<StatusResponse> {
551 let workspace = WorkspaceInfo::detect(&request.path)?;
552 let store = Store::new(&workspace.root);
553 let stats = store.stats()?;
554
555 let repositories = if request.detailed && !workspace.repo_paths.is_empty() {
556 workspace
557 .repo_paths
558 .iter()
559 .filter_map(|p| p.file_name().map(|n| n.to_string_lossy().to_string()))
560 .collect()
561 } else {
562 vec![]
563 };
564
565 Ok(StatusResponse {
566 workspace,
567 store_exists: stats.exists,
568 manifest: stats.manifest,
569 snapshot_count: stats.snapshot_count,
570 store_size: stats.total_size,
571 repositories,
572 })
573 }
574
575 pub async fn load(&self, request: LoadRequest) -> OpsResult<LoadResponse> {
581 let path = request
582 .path
583 .canonicalize()
584 .unwrap_or_else(|_| request.path.clone());
585 let store = Store::new(&path);
586
587 if !store.exists() {
588 return Err(OpsError::StoreNotFound { path });
589 }
590
591 let project = store.load()?.ok_or(OpsError::ProjectNotFound)?;
592
593 let manifest = store.load_manifest()?.ok_or(OpsError::ProjectNotFound)?;
594
595 Ok(LoadResponse { project, manifest })
596 }
597
598 pub async fn clean(&self, request: CleanRequest) -> OpsResult<CleanResponse> {
604 let path = request
605 .path
606 .canonicalize()
607 .unwrap_or_else(|_| request.path.clone());
608 let store = Store::new(&path);
609
610 let cleaned = store.exists();
611 if cleaned {
612 store.clean()?;
613 }
614
615 Ok(CleanResponse { path, cleaned })
616 }
617
618 pub async fn git_changes(&self, request: GitChangesRequest) -> OpsResult<GitChangesResponse> {
624 let path = request
625 .path
626 .canonicalize()
627 .unwrap_or_else(|_| request.path.clone());
628 let store = Store::new(&path);
629
630 let changes = if store.exists() {
631 if let Some(project) = store.load()? {
632 git_changes_from_project(&project)
633 } else {
634 get_single_repo_changes(&path)
635 }
636 } else {
637 get_single_repo_changes(&path)
638 };
639
640 let change_count = changes.changes.len();
641
642 Ok(GitChangesResponse {
643 changes,
644 change_count,
645 })
646 }
647}
648
649fn detect_git_remote(path: &Path) -> Option<String> {
655 let repo = git2::Repository::open(path).ok()?;
656 let remote = repo.find_remote("origin").ok()?;
657 remote.url().map(|s| s.to_string())
658}
659
660fn needs_clone(repo_path: &Path) -> bool {
662 if !repo_path.exists() {
663 return true;
664 }
665 !repo_path.join(".git").exists()
666}
667
668fn clone_repository(url: &str, path: &Path, username: &str, token: &str) -> OpsResult<()> {
670 let mut callbacks = RemoteCallbacks::new();
671 callbacks.credentials(|_url, _username_from_url, _allowed_types| {
672 Cred::userpass_plaintext(username, token)
673 });
674
675 let mut fetch_options = git2::FetchOptions::new();
676 fetch_options.remote_callbacks(callbacks);
677
678 git2::build::RepoBuilder::new()
679 .fetch_options(fetch_options)
680 .clone(url, path)
681 .map_err(|e| OpsError::CloneFailed {
682 repo: url.to_string(),
683 message: e.to_string(),
684 })?;
685
686 Ok(())
687}
688
689fn find_workspace_root(repositories: &[Repository]) -> Option<PathBuf> {
691 if repositories.is_empty() {
692 return None;
693 }
694
695 if repositories.len() == 1 {
696 return Some(repositories[0].local_path.clone());
697 }
698
699 let mut common: Option<PathBuf> = None;
700
701 for repo in repositories {
702 let path = &repo.local_path;
703 match &common {
704 None => {
705 common = path.parent().map(|p| p.to_path_buf());
706 }
707 Some(current_common) => {
708 let mut new_common = PathBuf::new();
709 let common_components: Vec<_> = current_common.components().collect();
710 let path_components: Vec<_> = path.components().collect();
711
712 for (c1, c2) in common_components.iter().zip(path_components.iter()) {
713 if c1 == c2 {
714 new_common.push(c1.as_os_str());
715 } else {
716 break;
717 }
718 }
719
720 if new_common.as_os_str().is_empty() {
721 return None;
722 }
723 common = Some(new_common);
724 }
725 }
726 }
727
728 common
729}
730
731fn git_changes_from_project(project: &Project) -> GitChangeSnapshot {
733 use vibe_graph_core::GitFileChange;
734
735 let mut all_changes: Vec<GitFileChange> = Vec::new();
736
737 for repo in &project.repositories {
738 if let Ok(snapshot) = get_git_changes(&repo.local_path) {
739 for mut change in snapshot.changes {
740 change.path = repo.local_path.join(&change.path);
741 all_changes.push(change);
742 }
743 }
744 }
745
746 GitChangeSnapshot {
747 changes: all_changes,
748 captured_at: Some(std::time::Instant::now()),
749 }
750}
751
752fn get_single_repo_changes(path: &Path) -> GitChangeSnapshot {
754 match get_git_changes(path) {
755 Ok(mut changes) => {
756 for change in &mut changes.changes {
758 change.path = path.join(&change.path);
759 }
760 changes
761 }
762 Err(_) => GitChangeSnapshot::default(),
763 }
764}