1use std::collections::HashMap;
4use std::sync::LazyLock;
5use std::time::{Duration, Instant};
6use tokio::process::Command;
7use tokio::sync::RwLock;
8
9const GH_TIMEOUT: Duration = Duration::from_secs(10);
11
12const CACHE_TTL: Duration = Duration::from_secs(30);
14
15struct CacheEntry<T> {
17 data: T,
18 fetched_at: Instant,
19}
20
21struct GhCache {
23 prs: RwLock<HashMap<String, CacheEntry<HashMap<String, PrInfo>>>>,
24 issues: RwLock<HashMap<String, CacheEntry<Vec<IssueInfo>>>>,
25}
26
27impl GhCache {
28 fn new() -> Self {
29 Self {
30 prs: RwLock::new(HashMap::new()),
31 issues: RwLock::new(HashMap::new()),
32 }
33 }
34}
35
36static GH_CACHE: LazyLock<GhCache> = LazyLock::new(GhCache::new);
38
39#[derive(Debug, Clone, serde::Serialize, serde::Deserialize)]
41#[serde(rename_all = "SCREAMING_SNAKE_CASE")]
42pub enum ReviewDecision {
43 Approved,
44 ChangesRequested,
45 ReviewRequired,
46 #[serde(other)]
47 Unknown,
48}
49
50#[derive(Debug, Clone, serde::Serialize, serde::Deserialize)]
52#[serde(rename_all = "SCREAMING_SNAKE_CASE")]
53pub enum CheckStatus {
54 Success,
55 Failure,
56 Pending,
57 #[serde(other)]
58 Unknown,
59}
60
61#[derive(Debug, Clone, serde::Serialize)]
63pub struct PrInfo {
64 pub number: u64,
65 pub title: String,
66 pub state: String,
67 pub head_branch: String,
68 pub head_sha: String,
69 pub base_branch: String,
70 pub url: String,
71 pub review_decision: Option<ReviewDecision>,
72 pub check_status: Option<CheckStatus>,
73 pub is_draft: bool,
74 pub additions: u64,
75 pub deletions: u64,
76 pub comments: u64,
78 pub reviews: u64,
80 #[serde(skip_serializing_if = "Option::is_none")]
82 pub merge_commit_sha: Option<String>,
83}
84
85#[derive(Debug, serde::Deserialize)]
87#[serde(rename_all = "camelCase")]
88struct GhPrEntry {
89 number: u64,
90 title: String,
91 state: String,
92 head_ref_name: String,
93 head_ref_oid: String,
94 base_ref_name: String,
95 url: String,
96 review_decision: Option<String>,
97 status_check_rollup: Option<Vec<GhCheckRun>>,
98 is_draft: bool,
99 additions: Option<u64>,
100 deletions: Option<u64>,
101 comments: Option<Vec<serde_json::Value>>,
102 reviews: Option<Vec<serde_json::Value>>,
103}
104
105#[derive(Debug, serde::Deserialize)]
107struct GhCheckRun {
108 conclusion: Option<String>,
109 status: Option<String>,
110}
111
112pub async fn list_open_prs(repo_dir: &str) -> Option<HashMap<String, PrInfo>> {
116 {
118 let cache_read = GH_CACHE.prs.read().await;
119 if let Some(entry) = cache_read.get(repo_dir) {
120 if entry.fetched_at.elapsed() < CACHE_TTL {
121 return Some(entry.data.clone());
122 }
123 }
124 }
125
126 let output = tokio::time::timeout(
127 GH_TIMEOUT,
128 Command::new("gh")
129 .args([
130 "pr",
131 "list",
132 "--state",
133 "open",
134 "--json",
135 "number,title,state,headRefName,headRefOid,baseRefName,url,reviewDecision,statusCheckRollup,isDraft,additions,deletions,comments,reviews",
136 "--limit",
137 "50",
138 ])
139 .current_dir(repo_dir)
140 .output(),
141 )
142 .await
143 .ok()
144 .and_then(|r| r.ok())?;
145
146 if !output.status.success() {
147 return None;
148 }
149
150 let entries: Vec<GhPrEntry> = serde_json::from_slice(&output.stdout).ok()?;
151
152 let mut map = HashMap::new();
153 for entry in entries {
154 let check_status = entry.status_check_rollup.as_ref().map(|checks| {
155 if checks.is_empty() {
156 return CheckStatus::Unknown;
157 }
158 let has_failure = checks.iter().any(|c| {
159 c.conclusion.as_deref() == Some("FAILURE")
160 || c.conclusion.as_deref() == Some("TIMED_OUT")
161 || c.conclusion.as_deref() == Some("CANCELLED")
162 });
163 if has_failure {
164 return CheckStatus::Failure;
165 }
166 let has_pending = checks.iter().any(|c| {
167 matches!(
168 c.status.as_deref(),
169 Some("IN_PROGRESS")
170 | Some("QUEUED")
171 | Some("WAITING")
172 | Some("PENDING")
173 | Some("REQUESTED")
174 )
175 });
176 if has_pending {
177 return CheckStatus::Pending;
178 }
179 CheckStatus::Success
180 });
181
182 let review_decision = entry.review_decision.as_deref().and_then(|s| match s {
183 "APPROVED" => Some(ReviewDecision::Approved),
184 "CHANGES_REQUESTED" => Some(ReviewDecision::ChangesRequested),
185 "REVIEW_REQUIRED" => Some(ReviewDecision::ReviewRequired),
186 _ => None,
187 });
188
189 let pr = PrInfo {
190 number: entry.number,
191 title: entry.title,
192 state: entry.state,
193 head_branch: entry.head_ref_name.clone(),
194 head_sha: entry.head_ref_oid.clone(),
195 url: entry.url,
196 base_branch: entry.base_ref_name.clone(),
197 review_decision,
198 check_status,
199 is_draft: entry.is_draft,
200 additions: entry.additions.unwrap_or(0),
201 deletions: entry.deletions.unwrap_or(0),
202 comments: entry.comments.map(|c| c.len() as u64).unwrap_or(0),
203 reviews: entry.reviews.map(|r| r.len() as u64).unwrap_or(0),
204 merge_commit_sha: None,
205 };
206 map.insert(entry.head_ref_name, pr);
207 }
208
209 {
211 let mut cache_write = GH_CACHE.prs.write().await;
212 cache_write.insert(
213 repo_dir.to_string(),
214 CacheEntry {
215 data: map.clone(),
216 fetched_at: Instant::now(),
217 },
218 );
219 }
220
221 Some(map)
222}
223
224#[derive(Debug, serde::Deserialize)]
226#[serde(rename_all = "camelCase")]
227struct GhMergedPrEntry {
228 number: u64,
229 title: String,
230 head_ref_name: String,
231 head_ref_oid: String,
232 base_ref_name: String,
233 url: String,
234 merge_commit: Option<GhMergeCommit>,
235}
236
237#[derive(Debug, serde::Deserialize)]
239struct GhMergeCommit {
240 oid: String,
241}
242
243pub async fn list_merged_prs(
249 repo_dir: &str,
250 local_branches: &[String],
251) -> Option<HashMap<String, PrInfo>> {
252 if local_branches.is_empty() {
253 return Some(HashMap::new());
254 }
255
256 let output = tokio::time::timeout(
257 GH_TIMEOUT,
258 Command::new("gh")
259 .args([
260 "pr",
261 "list",
262 "--state",
263 "merged",
264 "--json",
265 "number,title,headRefName,headRefOid,baseRefName,url,mergeCommit",
266 "--limit",
267 "30",
268 ])
269 .current_dir(repo_dir)
270 .output(),
271 )
272 .await
273 .ok()
274 .and_then(|r| r.ok())?;
275
276 if !output.status.success() {
277 return None;
278 }
279
280 let entries: Vec<GhMergedPrEntry> = serde_json::from_slice(&output.stdout).ok()?;
281
282 let branch_set: std::collections::HashSet<&str> =
284 local_branches.iter().map(|s| s.as_str()).collect();
285
286 let mut map = HashMap::new();
287 for entry in entries {
288 if !branch_set.contains(entry.head_ref_name.as_str()) {
290 continue;
291 }
292
293 let merge_commit_sha = entry.merge_commit.map(|mc| mc.oid);
294
295 let pr = PrInfo {
296 number: entry.number,
297 title: entry.title,
298 state: "MERGED".to_string(),
299 head_branch: entry.head_ref_name.clone(),
300 head_sha: entry.head_ref_oid,
301 base_branch: entry.base_ref_name,
302 url: entry.url,
303 review_decision: None,
304 check_status: None,
305 is_draft: false,
306 additions: 0,
307 deletions: 0,
308 comments: 0,
309 reviews: 0,
310 merge_commit_sha,
311 };
312 map.insert(entry.head_ref_name, pr);
313 }
314
315 Some(map)
316}
317
318#[derive(Debug, Clone, serde::Serialize)]
320pub struct CiCheck {
321 pub name: String,
322 pub status: String,
323 pub conclusion: Option<String>,
324 pub url: String,
325 pub started_at: Option<String>,
326 pub completed_at: Option<String>,
327 pub run_id: Option<u64>,
328}
329
330#[derive(Debug, Clone, serde::Serialize)]
332pub struct PrComment {
333 pub author: String,
334 pub body: String,
335 pub created_at: String,
336 pub url: String,
337 pub comment_type: String,
339 pub path: Option<String>,
341 pub diff_hunk: Option<String>,
343}
344
345#[derive(Debug, Clone, serde::Serialize)]
347pub struct PrChangedFile {
348 pub path: String,
349 pub additions: u64,
350 pub deletions: u64,
351}
352
353#[derive(Debug, Clone, serde::Serialize)]
355pub struct PrMergeStatus {
356 pub mergeable: String,
357 pub merge_state_status: String,
358 pub review_decision: Option<String>,
359 pub check_status: Option<String>,
360}
361
362#[derive(Debug, Clone, serde::Serialize)]
364pub struct CiFailureLog {
365 pub run_id: u64,
366 pub log_text: String,
367}
368
369#[derive(Debug, Clone, serde::Serialize)]
371pub struct CiSummary {
372 pub branch: String,
373 pub checks: Vec<CiCheck>,
374 pub rollup: CheckStatus,
375}
376
377#[derive(Debug, serde::Deserialize)]
379#[serde(rename_all = "camelCase")]
380#[allow(dead_code)]
381struct GhRunEntry {
382 name: String,
383 status: String,
384 conclusion: Option<String>,
385 url: String,
386 head_branch: String,
387 created_at: Option<String>,
388 updated_at: Option<String>,
389 database_id: Option<u64>,
390}
391
392pub async fn list_checks(repo_dir: &str, branch: &str) -> Option<CiSummary> {
396 if branch.is_empty() || branch.starts_with('-') {
397 return None;
398 }
399
400 let output = tokio::time::timeout(
401 GH_TIMEOUT,
402 Command::new("gh")
403 .args([
404 "run",
405 "list",
406 "--branch",
407 branch,
408 "--json",
409 "name,status,conclusion,url,headBranch,createdAt,updatedAt,databaseId",
410 "--limit",
411 "10",
412 ])
413 .current_dir(repo_dir)
414 .output(),
415 )
416 .await
417 .ok()
418 .and_then(|r| r.ok())?;
419
420 if !output.status.success() {
421 return None;
422 }
423
424 let entries: Vec<GhRunEntry> = serde_json::from_slice(&output.stdout).ok()?;
425
426 let mut seen = std::collections::HashSet::new();
428 let checks: Vec<CiCheck> = entries
429 .into_iter()
430 .filter(|e| seen.insert(e.name.clone()))
431 .map(|e| CiCheck {
432 name: e.name,
433 status: e.status,
434 conclusion: e.conclusion,
435 url: e.url,
436 started_at: e.created_at,
437 completed_at: e.updated_at,
438 run_id: e.database_id,
439 })
440 .collect();
441
442 let rollup = compute_rollup(&checks);
444
445 Some(CiSummary {
446 branch: branch.to_string(),
447 checks,
448 rollup,
449 })
450}
451
452fn compute_rollup(checks: &[CiCheck]) -> CheckStatus {
454 if checks.is_empty() {
455 return CheckStatus::Unknown;
456 }
457 let has_failure = checks.iter().any(|c| {
458 matches!(
459 c.conclusion.as_deref(),
460 Some("failure") | Some("timed_out") | Some("cancelled")
461 )
462 });
463 if has_failure {
464 return CheckStatus::Failure;
465 }
466 let has_pending = checks.iter().any(|c| {
467 matches!(
468 c.status.as_str(),
469 "in_progress" | "queued" | "waiting" | "pending" | "requested"
470 )
471 });
472 if has_pending {
473 return CheckStatus::Pending;
474 }
475 CheckStatus::Success
476}
477
478#[derive(Debug, Clone, serde::Serialize, serde::Deserialize)]
480pub struct IssueLabel {
481 pub name: String,
482 pub color: String,
483}
484
485#[derive(Debug, Clone, serde::Serialize)]
487pub struct IssueInfo {
488 pub number: u64,
489 pub title: String,
490 pub state: String,
491 pub url: String,
492 pub labels: Vec<IssueLabel>,
493 pub assignees: Vec<String>,
494}
495
496#[derive(Debug, serde::Deserialize)]
498struct GhAssignee {
499 login: String,
500}
501
502#[derive(Debug, serde::Deserialize)]
504struct GhIssueEntry {
505 number: u64,
506 title: String,
507 state: String,
508 url: String,
509 labels: Vec<IssueLabel>,
510 #[serde(default)]
511 assignees: Vec<GhAssignee>,
512}
513
514pub async fn list_issues(repo_dir: &str) -> Option<Vec<IssueInfo>> {
516 {
518 let cache_read = GH_CACHE.issues.read().await;
519 if let Some(entry) = cache_read.get(repo_dir) {
520 if entry.fetched_at.elapsed() < CACHE_TTL {
521 return Some(entry.data.clone());
522 }
523 }
524 }
525
526 let output = tokio::time::timeout(
527 GH_TIMEOUT,
528 Command::new("gh")
529 .args([
530 "issue",
531 "list",
532 "--state",
533 "open",
534 "--json",
535 "number,title,state,url,labels,assignees",
536 "--limit",
537 "50",
538 ])
539 .current_dir(repo_dir)
540 .output(),
541 )
542 .await
543 .ok()
544 .and_then(|r| r.ok())?;
545
546 if !output.status.success() {
547 return None;
548 }
549
550 let entries: Vec<GhIssueEntry> = serde_json::from_slice(&output.stdout).ok()?;
551
552 let issues: Vec<IssueInfo> = entries
553 .into_iter()
554 .map(|e| IssueInfo {
555 number: e.number,
556 title: e.title,
557 state: e.state,
558 url: e.url,
559 labels: e.labels,
560 assignees: e.assignees.into_iter().map(|a| a.login).collect(),
561 })
562 .collect();
563
564 {
566 let mut cache_write = GH_CACHE.issues.write().await;
567 cache_write.insert(
568 repo_dir.to_string(),
569 CacheEntry {
570 data: issues.clone(),
571 fetched_at: Instant::now(),
572 },
573 );
574 }
575
576 Some(issues)
577}
578
579pub async fn get_pr_comments(repo_dir: &str, pr_number: u64) -> Option<Vec<PrComment>> {
584 let output = tokio::time::timeout(
585 GH_TIMEOUT,
586 Command::new("gh")
587 .args([
588 "pr",
589 "view",
590 &pr_number.to_string(),
591 "--json",
592 "comments,reviews",
593 ])
594 .current_dir(repo_dir)
595 .output(),
596 )
597 .await
598 .ok()
599 .and_then(|r| r.ok())?;
600
601 if !output.status.success() {
602 return None;
603 }
604
605 let json: serde_json::Value = serde_json::from_slice(&output.stdout).ok()?;
606
607 let mut result = Vec::new();
608
609 if let Some(comments) = json.get("comments").and_then(|v| v.as_array()) {
611 for c in comments {
612 let author = c
613 .pointer("/author/login")
614 .and_then(|v| v.as_str())
615 .unwrap_or("unknown")
616 .to_string();
617 let body = c
618 .get("body")
619 .and_then(|v| v.as_str())
620 .unwrap_or("")
621 .to_string();
622 let created_at = c
623 .get("createdAt")
624 .and_then(|v| v.as_str())
625 .unwrap_or("")
626 .to_string();
627 let url = c
628 .get("url")
629 .and_then(|v| v.as_str())
630 .unwrap_or("")
631 .to_string();
632 result.push(PrComment {
633 author,
634 body,
635 created_at,
636 url,
637 comment_type: "comment".to_string(),
638 path: None,
639 diff_hunk: None,
640 });
641 }
642 }
643
644 if let Some(reviews) = json.get("reviews").and_then(|v| v.as_array()) {
646 for r in reviews {
647 let review_author = r
648 .pointer("/author/login")
649 .and_then(|v| v.as_str())
650 .unwrap_or("unknown")
651 .to_string();
652
653 let review_body = r
655 .get("body")
656 .and_then(|v| v.as_str())
657 .unwrap_or("")
658 .to_string();
659 if !review_body.is_empty() {
660 let review_state = r
661 .get("state")
662 .and_then(|v| v.as_str())
663 .unwrap_or("")
664 .to_string();
665 let created_at = r
666 .get("submittedAt")
667 .or_else(|| r.get("createdAt"))
668 .and_then(|v| v.as_str())
669 .unwrap_or("")
670 .to_string();
671 result.push(PrComment {
672 author: review_author.clone(),
673 body: format!("[{}] {}", review_state, review_body),
674 created_at,
675 url: String::new(),
676 comment_type: "review".to_string(),
677 path: None,
678 diff_hunk: None,
679 });
680 }
681
682 if let Some(comments) = r.get("comments").and_then(|v| v.as_array()) {
684 for c in comments {
685 let body = c
686 .get("body")
687 .and_then(|v| v.as_str())
688 .unwrap_or("")
689 .to_string();
690 let created_at = c
691 .get("createdAt")
692 .and_then(|v| v.as_str())
693 .unwrap_or("")
694 .to_string();
695 let url = c
696 .get("url")
697 .and_then(|v| v.as_str())
698 .unwrap_or("")
699 .to_string();
700 let path = c
701 .get("path")
702 .and_then(|v| v.as_str())
703 .map(|s| s.to_string());
704 let diff_hunk = c
705 .get("diffHunk")
706 .and_then(|v| v.as_str())
707 .map(|s| s.to_string());
708 result.push(PrComment {
709 author: review_author.clone(),
710 body,
711 created_at,
712 url,
713 comment_type: "review".to_string(),
714 path,
715 diff_hunk,
716 });
717 }
718 }
719 }
720 }
721
722 result.sort_by(|a, b| a.created_at.cmp(&b.created_at));
724
725 Some(result)
726}
727
728pub async fn get_pr_files(repo_dir: &str, pr_number: u64) -> Option<Vec<PrChangedFile>> {
730 let output = tokio::time::timeout(
731 GH_TIMEOUT,
732 Command::new("gh")
733 .args(["pr", "view", &pr_number.to_string(), "--json", "files"])
734 .current_dir(repo_dir)
735 .output(),
736 )
737 .await
738 .ok()
739 .and_then(|r| r.ok())?;
740
741 if !output.status.success() {
742 return None;
743 }
744
745 #[derive(serde::Deserialize)]
746 struct FilesResponse {
747 files: Vec<GhFileEntry>,
748 }
749
750 #[derive(serde::Deserialize)]
751 struct GhFileEntry {
752 path: String,
753 additions: u64,
754 deletions: u64,
755 }
756
757 let resp: FilesResponse = serde_json::from_slice(&output.stdout).ok()?;
758
759 Some(
760 resp.files
761 .into_iter()
762 .map(|f| PrChangedFile {
763 path: f.path,
764 additions: f.additions,
765 deletions: f.deletions,
766 })
767 .collect(),
768 )
769}
770
771pub async fn get_pr_merge_status(repo_dir: &str, pr_number: u64) -> Option<PrMergeStatus> {
773 let output = tokio::time::timeout(
774 GH_TIMEOUT,
775 Command::new("gh")
776 .args([
777 "pr",
778 "view",
779 &pr_number.to_string(),
780 "--json",
781 "mergeable,mergeStateStatus,reviewDecision,statusCheckRollup",
782 ])
783 .current_dir(repo_dir)
784 .output(),
785 )
786 .await
787 .ok()
788 .and_then(|r| r.ok())?;
789
790 if !output.status.success() {
791 return None;
792 }
793
794 let json: serde_json::Value = serde_json::from_slice(&output.stdout).ok()?;
795
796 let mergeable = json
797 .get("mergeable")
798 .and_then(|v| v.as_str())
799 .unwrap_or("UNKNOWN")
800 .to_string();
801
802 let merge_state_status = json
803 .get("mergeStateStatus")
804 .and_then(|v| v.as_str())
805 .unwrap_or("UNKNOWN")
806 .to_string();
807
808 let review_decision = json
809 .get("reviewDecision")
810 .and_then(|v| v.as_str())
811 .map(|s| s.to_string());
812
813 let check_status = json
815 .get("statusCheckRollup")
816 .and_then(|v| v.as_array())
817 .map(|checks| {
818 if checks.is_empty() {
819 return "UNKNOWN".to_string();
820 }
821 let has_failure = checks.iter().any(|c| {
822 matches!(
823 c.get("conclusion").and_then(|v| v.as_str()),
824 Some("FAILURE") | Some("TIMED_OUT") | Some("CANCELLED")
825 )
826 });
827 if has_failure {
828 return "FAILURE".to_string();
829 }
830 let has_pending = checks.iter().any(|c| {
831 matches!(
832 c.get("status").and_then(|v| v.as_str()),
833 Some("IN_PROGRESS")
834 | Some("QUEUED")
835 | Some("WAITING")
836 | Some("PENDING")
837 | Some("REQUESTED")
838 )
839 });
840 if has_pending {
841 return "PENDING".to_string();
842 }
843 "SUCCESS".to_string()
844 });
845
846 Some(PrMergeStatus {
847 mergeable,
848 merge_state_status,
849 review_decision,
850 check_status,
851 })
852}
853
854const CI_LOG_MAX_BYTES: usize = 50 * 1024;
856
857pub async fn get_ci_failure_log(repo_dir: &str, run_id: u64) -> Option<CiFailureLog> {
862 let output = tokio::time::timeout(
863 Duration::from_secs(30), Command::new("gh")
865 .args(["run", "view", &run_id.to_string(), "--log-failed"])
866 .current_dir(repo_dir)
867 .output(),
868 )
869 .await
870 .ok()
871 .and_then(|r| r.ok())?;
872
873 if !output.status.success() {
874 return None;
875 }
876
877 if output.stdout.is_empty() {
878 return None;
879 }
880
881 let text = if output.stdout.len() > CI_LOG_MAX_BYTES {
883 let truncated = &output.stdout[..CI_LOG_MAX_BYTES];
884 let s = String::from_utf8_lossy(truncated);
886 format!("{}\n\n... (truncated, showing first 50KB)", s)
887 } else {
888 String::from_utf8_lossy(&output.stdout).to_string()
889 };
890
891 Some(CiFailureLog {
892 run_id,
893 log_text: text,
894 })
895}
896
897pub async fn rerun_failed_checks(repo_dir: &str, run_id: u64) -> Option<()> {
901 let output = tokio::time::timeout(
902 GH_TIMEOUT,
903 Command::new("gh")
904 .args(["run", "rerun", &run_id.to_string(), "--failed"])
905 .current_dir(repo_dir)
906 .output(),
907 )
908 .await
909 .ok()
910 .and_then(|r| r.ok())?;
911
912 if output.status.success() {
913 Some(())
914 } else {
915 None
916 }
917}
918
919pub fn extract_issue_numbers(branch: &str) -> Vec<u64> {
923 let mut numbers = Vec::new();
924 for part in branch.split(&['/', '-', '_'][..]) {
925 if let Ok(n) = part.parse::<u64>() {
926 if n > 0 && n < 100_000 {
927 numbers.push(n);
928 }
929 }
930 }
931 numbers
932}
933
934#[cfg(test)]
935mod tests {
936 use super::*;
937
938 #[test]
939 fn test_extract_issue_numbers() {
940 assert_eq!(extract_issue_numbers("fix/123-login-bug"), vec![123]);
941 assert_eq!(extract_issue_numbers("feat/42"), vec![42]);
942 assert_eq!(extract_issue_numbers("issue-7-auth"), vec![7]);
943 assert_eq!(extract_issue_numbers("gh-99"), vec![99]);
944 assert_eq!(extract_issue_numbers("main"), Vec::<u64>::new());
945 assert_eq!(extract_issue_numbers("feat/no-number"), Vec::<u64>::new());
946 assert_eq!(extract_issue_numbers("fix/0-test"), Vec::<u64>::new());
948 }
949}