1use std::collections::HashSet;
7use std::path::{Path, PathBuf};
8
9use git2::{Cred, RemoteCallbacks};
10use octocrab::Octocrab;
11use tracing::{debug, info, warn};
12use vibe_graph_core::{
13 detect_references, GitChangeSnapshot, SourceCodeGraph, SourceCodeGraphBuilder,
14};
15use vibe_graph_git::get_git_changes;
16
17use crate::config::Config;
18use crate::error::{OpsError, OpsResult};
19use crate::project::{Project, ProjectSource, Repository};
20use crate::requests::*;
21use crate::responses::*;
22use crate::scan::scan_directory;
23use crate::store::Store;
24use crate::workspace::{SyncSource, WorkspaceInfo, WorkspaceKind};
25
26#[derive(Debug, Clone)]
31pub struct OpsContext {
32 pub config: Config,
34}
35
36impl OpsContext {
37 pub fn new(config: Config) -> Self {
39 Self { config }
40 }
41
42 pub fn default_config() -> OpsResult<Self> {
44 Ok(Self::new(Config::load()?))
45 }
46
47 pub async fn sync(&self, request: SyncRequest) -> OpsResult<SyncResponse> {
56 match &request.source {
57 SyncSource::Local { path } => self.sync_local(path, &request).await,
58 SyncSource::GitHubOrg { org } => self.sync_github_org(org, &request).await,
59 SyncSource::GitHubRepo { owner, repo } => {
60 self.sync_github_repo(owner, repo, &request).await
61 }
62 }
63 }
64
65 async fn sync_local(&self, path: &Path, request: &SyncRequest) -> OpsResult<SyncResponse> {
67 let workspace = WorkspaceInfo::detect(path)?;
68 let store = Store::new(&workspace.root);
69
70 if !request.force && store.exists() && !request.no_save {
72 if let Some(project) = store.load()? {
73 info!(name = %project.name, "Using cached project from .self");
74 return Ok(SyncResponse {
75 project,
76 workspace: workspace.clone(),
77 path: workspace.root.clone(),
78 snapshot_created: None,
79 remote: store.load_manifest()?.and_then(|m| m.remote),
80 });
81 }
82 }
83
84 let mut project = match &workspace.kind {
86 WorkspaceKind::SingleRepo => self.sync_single_repo(&workspace)?,
87 WorkspaceKind::MultiRepo { .. } => self.sync_multi_repo(&workspace)?,
88 WorkspaceKind::PlainDirectory => self.sync_single_repo(&workspace)?,
89 };
90
91 let max_size = self.config.max_content_size_kb * 1024;
93 project.expand_content(|source| {
94 source.size.map(|s| s < max_size).unwrap_or(false) && source.is_text()
95 })?;
96
97 let remote = if workspace.is_single_repo() {
99 detect_git_remote(&workspace.root)
100 } else {
101 None
102 };
103
104 let mut snapshot_path = None;
106 if !request.no_save {
107 store.save(&project, &workspace.kind, remote.clone())?;
108
109 if request.snapshot {
110 snapshot_path = Some(store.snapshot(&project)?);
111 }
112 }
113
114 Ok(SyncResponse {
115 project,
116 workspace: workspace.clone(),
117 path: workspace.root.clone(),
118 snapshot_created: snapshot_path,
119 remote,
120 })
121 }
122
123 fn sync_single_repo(&self, workspace: &WorkspaceInfo) -> OpsResult<Project> {
124 let root = &workspace.root;
125 let name = &workspace.name;
126
127 let source = ProjectSource::LocalPath { path: root.clone() };
128
129 let mut project = Project {
130 name: name.clone(),
131 source,
132 repositories: vec![],
133 };
134
135 let mut repo = Repository::new(name, root.display().to_string(), root.clone());
136 scan_directory(&mut repo, root)?;
137 project.repositories.push(repo);
138
139 Ok(project)
140 }
141
142 fn sync_multi_repo(&self, workspace: &WorkspaceInfo) -> OpsResult<Project> {
143 let mut project = Project {
144 name: workspace.name.clone(),
145 source: ProjectSource::LocalPaths {
146 paths: workspace.repo_paths.clone(),
147 },
148 repositories: vec![],
149 };
150
151 for repo_path in &workspace.repo_paths {
152 let repo_name = repo_path
153 .file_name()
154 .map(|s| s.to_string_lossy().to_string())
155 .unwrap_or_else(|| "repo".to_string());
156
157 let mut repo = Repository::new(
158 &repo_name,
159 repo_path.display().to_string(),
160 repo_path.clone(),
161 );
162 scan_directory(&mut repo, repo_path)?;
163 project.repositories.push(repo);
164 }
165
166 Ok(project)
167 }
168
169 async fn sync_github_org(&self, org: &str, request: &SyncRequest) -> OpsResult<SyncResponse> {
171 self.config.validate_github()?;
172
173 let username = self.config.github_username.clone().unwrap();
174 let token = self.config.github_token.clone().unwrap();
175
176 let octocrab = Octocrab::builder()
177 .personal_token(token.clone())
178 .build()
179 .map_err(|e| OpsError::GitHubApiError {
180 resource: org.to_string(),
181 message: e.to_string(),
182 })?;
183
184 info!(org = %org, "Fetching organization repositories");
185
186 let mut all_repos = Vec::new();
188 let mut page = 1u32;
189
190 loop {
191 let repos = octocrab
192 .orgs(org)
193 .list_repos()
194 .per_page(100)
195 .page(page)
196 .send()
197 .await
198 .map_err(|e| OpsError::GitHubApiError {
199 resource: format!("{}/repos", org),
200 message: e.to_string(),
201 })?;
202
203 if repos.items.is_empty() {
204 break;
205 }
206
207 all_repos.extend(repos.items);
208 page += 1;
209
210 if page > 10 {
211 warn!("Truncated at 1000 repositories");
212 break;
213 }
214 }
215
216 let org_dir = if request.use_cache {
218 self.config.org_cache_dir(org)
219 } else {
220 PathBuf::from(org)
221 };
222
223 std::fs::create_dir_all(&org_dir)?;
224
225 let mut project = Project {
226 name: org.to_string(),
227 source: ProjectSource::GitHubOrg {
228 organization: org.to_string(),
229 },
230 repositories: vec![],
231 };
232
233 for repo in &all_repos {
234 let repo_name = &repo.name;
235
236 if request.ignore.iter().any(|s| s == repo_name) {
238 debug!(repo = %repo_name, "Skipping ignored repository");
239 continue;
240 }
241
242 let clone_url = repo
243 .clone_url
244 .as_ref()
245 .map(|u| u.to_string())
246 .unwrap_or_else(|| format!("https://github.com/{}/{}.git", org, repo_name));
247
248 let repo_path = org_dir.join(repo_name);
249
250 if needs_clone(&repo_path) {
252 if repo_path.exists() {
253 std::fs::remove_dir_all(&repo_path)?;
254 }
255
256 if let Err(e) = clone_repository(&clone_url, &repo_path, &username, &token) {
257 warn!(repo = %repo_name, error = %e, "Failed to clone repository");
258 continue;
259 }
260 }
261
262 let mut repository = Repository::new(repo_name, &clone_url, repo_path.clone());
264 scan_directory(&mut repository, &repo_path)?;
265 project.repositories.push(repository);
266 }
267
268 let max_size = self.config.max_content_size_kb * 1024;
270 project.expand_content(|source| {
271 source.size.map(|s| s < max_size).unwrap_or(false) && source.is_text()
272 })?;
273
274 let workspace = WorkspaceInfo {
276 root: org_dir.clone(),
277 kind: WorkspaceKind::MultiRepo {
278 repo_count: project.repositories.len(),
279 },
280 repo_paths: project
281 .repositories
282 .iter()
283 .map(|r| r.local_path.clone())
284 .collect(),
285 name: org.to_string(),
286 };
287
288 if !request.no_save {
290 let store = Store::new(&org_dir);
291 store.save(&project, &workspace.kind, None)?;
292 }
293
294 Ok(SyncResponse {
295 project,
296 workspace,
297 path: org_dir,
298 snapshot_created: None,
299 remote: Some(format!("https://github.com/{}", org)),
300 })
301 }
302
303 async fn sync_github_repo(
305 &self,
306 owner: &str,
307 repo_name: &str,
308 request: &SyncRequest,
309 ) -> OpsResult<SyncResponse> {
310 self.config.validate_github()?;
311
312 let username = self.config.github_username.clone().unwrap();
313 let token = self.config.github_token.clone().unwrap();
314
315 let repo_path = if request.use_cache {
317 let cache_dir = self.config.org_cache_dir(owner);
318 std::fs::create_dir_all(&cache_dir)?;
319 cache_dir.join(repo_name)
320 } else {
321 PathBuf::from(repo_name)
322 };
323
324 let clone_url = format!("https://github.com/{}/{}.git", owner, repo_name);
325
326 if needs_clone(&repo_path) {
328 if repo_path.exists() {
329 std::fs::remove_dir_all(&repo_path)?;
330 }
331 clone_repository(&clone_url, &repo_path, &username, &token)?;
332 }
333
334 let mut project = Project {
336 name: repo_name.to_string(),
337 source: ProjectSource::GitHubRepo {
338 owner: owner.to_string(),
339 repo: repo_name.to_string(),
340 },
341 repositories: vec![],
342 };
343
344 let mut repository = Repository::new(repo_name, &clone_url, repo_path.clone());
345 scan_directory(&mut repository, &repo_path)?;
346 project.repositories.push(repository);
347
348 let max_size = self.config.max_content_size_kb * 1024;
350 project.expand_content(|source| {
351 source.size.map(|s| s < max_size).unwrap_or(false) && source.is_text()
352 })?;
353
354 let workspace = WorkspaceInfo {
356 root: repo_path.clone(),
357 kind: WorkspaceKind::SingleRepo,
358 repo_paths: vec![repo_path.clone()],
359 name: repo_name.to_string(),
360 };
361
362 if !request.no_save {
364 let store = Store::new(&repo_path);
365 store.save(
366 &project,
367 &workspace.kind,
368 Some(format!("https://github.com/{}/{}", owner, repo_name)),
369 )?;
370 }
371
372 Ok(SyncResponse {
373 project,
374 workspace,
375 path: repo_path,
376 snapshot_created: None,
377 remote: Some(format!("https://github.com/{}/{}", owner, repo_name)),
378 })
379 }
380
381 pub async fn graph(&self, request: GraphRequest) -> OpsResult<GraphResponse> {
387 let path = request
388 .path
389 .canonicalize()
390 .unwrap_or_else(|_| request.path.clone());
391 let store = Store::new(&path);
392
393 if !store.exists() {
394 return Err(OpsError::StoreNotFound { path });
395 }
396
397 if !request.force {
399 if let Some(graph) = store.load_graph()? {
400 return Ok(GraphResponse {
401 graph,
402 saved_path: store.self_dir().join("graph.json"),
403 output_path: request.output,
404 from_cache: true,
405 });
406 }
407 }
408
409 let project = store.load()?.ok_or(OpsError::ProjectNotFound)?;
411
412 let graph = self.build_source_graph(&project)?;
413
414 let saved_path = store.save_graph(&graph)?;
416
417 if let Some(ref output_path) = request.output {
419 let json = serde_json::to_string_pretty(&graph)?;
420 std::fs::write(output_path, &json)?;
421 }
422
423 Ok(GraphResponse {
424 graph,
425 saved_path,
426 output_path: request.output,
427 from_cache: false,
428 })
429 }
430
431 pub fn build_source_graph(&self, project: &Project) -> OpsResult<SourceCodeGraph> {
433 let mut builder = SourceCodeGraphBuilder::new()
434 .with_metadata("name", &project.name)
435 .with_metadata("type", "source_code_graph");
436
437 let mut all_dirs: HashSet<PathBuf> = HashSet::new();
439
440 let workspace_root = find_workspace_root(&project.repositories);
442 if let Some(ref root) = workspace_root {
443 all_dirs.insert(root.clone());
444 }
445
446 for repo in &project.repositories {
448 all_dirs.insert(repo.local_path.clone());
449
450 if let Some(ref ws_root) = workspace_root {
451 let mut current = repo.local_path.parent();
452 while let Some(dir_path) = current {
453 if dir_path == ws_root.as_path() {
454 break;
455 }
456 all_dirs.insert(dir_path.to_path_buf());
457 current = dir_path.parent();
458 }
459 }
460
461 for source in &repo.sources {
462 let mut current = source.path.parent();
463 while let Some(dir_path) = current {
464 all_dirs.insert(dir_path.to_path_buf());
465 if dir_path == repo.local_path || dir_path.parent().is_none() {
466 break;
467 }
468 current = dir_path.parent();
469 }
470 }
471 }
472
473 for dir_path in &all_dirs {
475 builder.add_directory(dir_path);
476 }
477
478 for repo in &project.repositories {
480 for source in &repo.sources {
481 builder.add_file(&source.path, &source.relative_path);
482 }
483 }
484
485 for repo in &project.repositories {
487 for source in &repo.sources {
488 if let Some(parent_dir) = source.path.parent() {
489 builder.add_hierarchy_edge(parent_dir, &source.path);
490 }
491 }
492 }
493
494 for dir_path in &all_dirs {
496 if let Some(parent_dir) = dir_path.parent() {
497 if all_dirs.contains(parent_dir) || parent_dir.exists() {
498 builder.add_hierarchy_edge(parent_dir, dir_path);
499 }
500 }
501 }
502
503 let max_size = self.config.max_content_size_kb * 1024;
505
506 for repo in &project.repositories {
507 for source in &repo.sources {
508 if !source.is_text() || source.size.map(|s| s > max_size).unwrap_or(true) {
509 continue;
510 }
511
512 let content = match &source.content {
513 Some(c) => c.clone(),
514 None => match std::fs::read_to_string(&source.path) {
515 Ok(c) => c,
516 Err(_) => continue,
517 },
518 };
519
520 if let Some(node_id) = builder.get_node_id(&source.path) {
522 if has_inline_tests(&content, &source.path) {
523 builder.set_node_metadata(node_id, "has_tests", "true");
524 }
525 }
526
527 let refs = detect_references(&content, &source.path);
528
529 for reference in refs {
530 if let Some(source_id) = builder.get_node_id(&reference.source_path) {
531 if let Some(target_id) =
532 builder.find_node_by_path_suffix(&reference.target_route)
533 {
534 if source_id != target_id {
535 builder.add_edge(source_id, target_id, reference.kind);
536 }
537 }
538 }
539 }
540 }
541 }
542
543 info!(
544 nodes = builder.node_count(),
545 edges = builder.edge_count(),
546 "Built SourceCodeGraph"
547 );
548
549 Ok(builder.build())
550 }
551
552 pub async fn status(&self, request: StatusRequest) -> OpsResult<StatusResponse> {
558 let workspace = WorkspaceInfo::detect(&request.path)?;
559 let store = Store::new(&workspace.root);
560 let stats = store.stats()?;
561
562 let repositories = if request.detailed && !workspace.repo_paths.is_empty() {
563 workspace
564 .repo_paths
565 .iter()
566 .filter_map(|p| p.file_name().map(|n| n.to_string_lossy().to_string()))
567 .collect()
568 } else {
569 vec![]
570 };
571
572 Ok(StatusResponse {
573 workspace,
574 store_exists: stats.exists,
575 manifest: stats.manifest,
576 snapshot_count: stats.snapshot_count,
577 store_size: stats.total_size,
578 repositories,
579 })
580 }
581
582 pub async fn load(&self, request: LoadRequest) -> OpsResult<LoadResponse> {
588 let path = request
589 .path
590 .canonicalize()
591 .unwrap_or_else(|_| request.path.clone());
592 let store = Store::new(&path);
593
594 if !store.exists() {
595 return Err(OpsError::StoreNotFound { path });
596 }
597
598 let project = store.load()?.ok_or(OpsError::ProjectNotFound)?;
599
600 let manifest = store.load_manifest()?.ok_or(OpsError::ProjectNotFound)?;
601
602 Ok(LoadResponse { project, manifest })
603 }
604
605 pub async fn clean(&self, request: CleanRequest) -> OpsResult<CleanResponse> {
611 let path = request
612 .path
613 .canonicalize()
614 .unwrap_or_else(|_| request.path.clone());
615 let store = Store::new(&path);
616
617 let cleaned = store.exists();
618 if cleaned {
619 store.clean()?;
620 }
621
622 Ok(CleanResponse { path, cleaned })
623 }
624
625 pub async fn git_changes(&self, request: GitChangesRequest) -> OpsResult<GitChangesResponse> {
631 let path = request
632 .path
633 .canonicalize()
634 .unwrap_or_else(|_| request.path.clone());
635 let store = Store::new(&path);
636
637 let changes = if store.exists() {
638 if let Some(project) = store.load()? {
639 git_changes_from_project(&project)
640 } else {
641 get_single_repo_changes(&path)
642 }
643 } else {
644 get_single_repo_changes(&path)
645 };
646
647 let change_count = changes.changes.len();
648
649 Ok(GitChangesResponse {
650 changes,
651 change_count,
652 })
653 }
654}
655
656fn detect_git_remote(path: &Path) -> Option<String> {
662 let repo = git2::Repository::open(path).ok()?;
663 let remote = repo.find_remote("origin").ok()?;
664 remote.url().map(|s| s.to_string())
665}
666
667fn needs_clone(repo_path: &Path) -> bool {
669 if !repo_path.exists() {
670 return true;
671 }
672 !repo_path.join(".git").exists()
673}
674
675fn clone_repository(url: &str, path: &Path, username: &str, token: &str) -> OpsResult<()> {
677 let mut callbacks = RemoteCallbacks::new();
678 callbacks.credentials(|_url, _username_from_url, _allowed_types| {
679 Cred::userpass_plaintext(username, token)
680 });
681
682 let mut fetch_options = git2::FetchOptions::new();
683 fetch_options.remote_callbacks(callbacks);
684
685 git2::build::RepoBuilder::new()
686 .fetch_options(fetch_options)
687 .clone(url, path)
688 .map_err(|e| OpsError::CloneFailed {
689 repo: url.to_string(),
690 message: e.to_string(),
691 })?;
692
693 Ok(())
694}
695
696fn has_inline_tests(content: &str, path: &Path) -> bool {
699 let ext = path.extension().and_then(|e| e.to_str()).unwrap_or("");
700 match ext {
701 "rs" => content.contains("#[cfg(test)]") || content.contains("#[test]"),
702 "py" => {
703 content.contains("def test_")
704 || content.contains("class Test")
705 || content.contains("unittest.TestCase")
706 }
707 "ts" | "tsx" | "js" | "jsx" => {
708 content.contains("describe(") || content.contains("it(") || content.contains("test(")
709 }
710 "go" => content.contains("func Test"),
711 _ => false,
712 }
713}
714
715fn find_workspace_root(repositories: &[Repository]) -> Option<PathBuf> {
716 if repositories.is_empty() {
717 return None;
718 }
719
720 if repositories.len() == 1 {
721 return Some(repositories[0].local_path.clone());
722 }
723
724 let mut common: Option<PathBuf> = None;
725
726 for repo in repositories {
727 let path = &repo.local_path;
728 match &common {
729 None => {
730 common = path.parent().map(|p| p.to_path_buf());
731 }
732 Some(current_common) => {
733 let mut new_common = PathBuf::new();
734 let common_components: Vec<_> = current_common.components().collect();
735 let path_components: Vec<_> = path.components().collect();
736
737 for (c1, c2) in common_components.iter().zip(path_components.iter()) {
738 if c1 == c2 {
739 new_common.push(c1.as_os_str());
740 } else {
741 break;
742 }
743 }
744
745 if new_common.as_os_str().is_empty() {
746 return None;
747 }
748 common = Some(new_common);
749 }
750 }
751 }
752
753 common
754}
755
756fn git_changes_from_project(project: &Project) -> GitChangeSnapshot {
758 use vibe_graph_core::GitFileChange;
759
760 let mut all_changes: Vec<GitFileChange> = Vec::new();
761
762 for repo in &project.repositories {
763 if let Ok(snapshot) = get_git_changes(&repo.local_path) {
764 for mut change in snapshot.changes {
765 change.path = repo.local_path.join(&change.path);
766 all_changes.push(change);
767 }
768 }
769 }
770
771 GitChangeSnapshot {
772 changes: all_changes,
773 captured_at: Some(std::time::Instant::now()),
774 }
775}
776
777fn get_single_repo_changes(path: &Path) -> GitChangeSnapshot {
779 match get_git_changes(path) {
780 Ok(mut changes) => {
781 for change in &mut changes.changes {
783 change.path = path.join(&change.path);
784 }
785 changes
786 }
787 Err(_) => GitChangeSnapshot::default(),
788 }
789}