1use async_trait::async_trait;
4use devboy_core::{
5 AssetCapabilities, AssetMeta, CodePosition, Comment, ContextCapabilities, CreateCommentInput,
6 CreateIssueInput, CreateMergeRequestInput, Discussion, Error, FailedJob, FileDiff,
7 GetPipelineInput, Issue, IssueFilter, IssueProvider, JobLogMode, JobLogOptions, JobLogOutput,
8 MergeRequest, MergeRequestProvider, MrFilter, PipelineInfo, PipelineJob, PipelineProvider,
9 PipelineStage, PipelineStatus, PipelineSummary, Provider, ProviderResult, Result,
10 UpdateIssueInput, UpdateMergeRequestInput, User, parse_markdown_attachments,
11};
12use secrecy::{ExposeSecret, SecretString};
13use serde::Deserialize;
14use tracing::{debug, warn};
15
16use crate::DEFAULT_GITHUB_URL;
17use crate::types::{
18 CreateCommentRequest, CreateIssueRequest, CreatePullRequestRequest, CreateReviewCommentRequest,
19 GitHubComment, GitHubFile, GitHubIssue, GitHubLabel, GitHubPullRequest, GitHubReview,
20 GitHubReviewComment, GitHubUser, UpdateIssueRequest, UpdatePullRequestRequest,
21};
22
23pub struct GitHubClient {
24 base_url: String,
25 owner: String,
26 repo: String,
27 token: SecretString,
28 client: reqwest::Client,
29}
30
31impl GitHubClient {
32 pub fn new(owner: impl Into<String>, repo: impl Into<String>, token: SecretString) -> Self {
34 Self::with_base_url(DEFAULT_GITHUB_URL, owner, repo, token)
35 }
36
37 pub fn with_base_url(
39 base_url: impl Into<String>,
40 owner: impl Into<String>,
41 repo: impl Into<String>,
42 token: SecretString,
43 ) -> Self {
44 Self {
45 base_url: base_url.into().trim_end_matches('/').to_string(),
46 owner: owner.into(),
47 repo: repo.into(),
48 token,
49 client: reqwest::Client::builder()
50 .user_agent("devboy-tools")
51 .build()
52 .expect("Failed to create HTTP client"),
53 }
54 }
55
56 fn request(&self, method: reqwest::Method, url: &str) -> reqwest::RequestBuilder {
58 let mut builder = self
59 .client
60 .request(method, url)
61 .header("Accept", "application/vnd.github+json")
62 .header("X-GitHub-Api-Version", "2022-11-28");
63
64 let token = self.token.expose_secret();
65 if !token.is_empty() {
66 builder = builder.header("Authorization", format!("Bearer {}", token));
67 }
68
69 builder
70 }
71
72 async fn get<T: serde::de::DeserializeOwned>(&self, url: &str) -> Result<T> {
74 debug!(url = url, "GitHub GET request");
75
76 let response = self
77 .request(reqwest::Method::GET, url)
78 .send()
79 .await
80 .map_err(|e| Error::Http(e.to_string()))?;
81
82 self.handle_response(response).await
83 }
84
85 async fn post<T: serde::de::DeserializeOwned, B: serde::Serialize>(
87 &self,
88 url: &str,
89 body: &B,
90 ) -> Result<T> {
91 debug!(url = url, "GitHub POST request");
92
93 let response = self
94 .request(reqwest::Method::POST, url)
95 .json(body)
96 .send()
97 .await
98 .map_err(|e| Error::Http(e.to_string()))?;
99
100 self.handle_response(response).await
101 }
102
103 async fn patch<T: serde::de::DeserializeOwned, B: serde::Serialize>(
105 &self,
106 url: &str,
107 body: &B,
108 ) -> Result<T> {
109 debug!(url = url, "GitHub PATCH request");
110
111 let response = self
112 .request(reqwest::Method::PATCH, url)
113 .json(body)
114 .send()
115 .await
116 .map_err(|e| Error::Http(e.to_string()))?;
117
118 self.handle_response(response).await
119 }
120
121 async fn handle_response<T: serde::de::DeserializeOwned>(
123 &self,
124 response: reqwest::Response,
125 ) -> Result<T> {
126 let status = response.status();
127
128 if !status.is_success() {
129 let status_code = status.as_u16();
130 let message = response.text().await.unwrap_or_default();
131 warn!(
132 status = status_code,
133 message = message,
134 "GitHub API error response"
135 );
136 return Err(Error::from_status(status_code, message));
137 }
138
139 response
140 .json()
141 .await
142 .map_err(|e| Error::InvalidData(format!("Failed to parse response: {}", e)))
143 }
144
145 fn repo_url(&self, endpoint: &str) -> String {
147 format!(
148 "{}/repos/{}/{}{}",
149 self.base_url, self.owner, self.repo, endpoint
150 )
151 }
152}
153
154fn map_user(gh_user: Option<&GitHubUser>) -> Option<User> {
159 gh_user.map(|u| User {
160 id: u.id.to_string(),
161 username: u.login.clone(),
162 name: u.name.clone(),
163 email: u.email.clone(),
164 avatar_url: u.avatar_url.clone(),
165 })
166}
167
168fn map_user_required(gh_user: Option<&GitHubUser>) -> User {
169 map_user(gh_user).unwrap_or_else(|| User {
170 id: "unknown".to_string(),
171 username: "unknown".to_string(),
172 name: Some("Unknown".to_string()),
173 ..Default::default()
174 })
175}
176
177fn map_labels(labels: &[GitHubLabel]) -> Vec<String> {
178 labels.iter().map(|l| l.name.clone()).collect()
179}
180
181fn map_issue(gh_issue: &GitHubIssue) -> Issue {
182 let attachments_count = gh_issue
186 .body
187 .as_deref()
188 .map(|body| {
189 parse_markdown_attachments(body)
190 .iter()
191 .filter(|a| is_github_attachment_url("https://github.com", &a.url))
192 .count() as u32
193 })
194 .filter(|&c| c > 0);
195
196 Issue {
197 custom_fields: std::collections::HashMap::new(),
198 key: format!("gh#{}", gh_issue.number),
199 title: gh_issue.title.clone(),
200 description: gh_issue.body.clone(),
201 state: gh_issue.state.clone(),
202 source: "github".to_string(),
203 priority: None, labels: map_labels(&gh_issue.labels),
205 author: map_user(gh_issue.user.as_ref()),
206 assignees: gh_issue
207 .assignees
208 .iter()
209 .map(|u| map_user_required(Some(u)))
210 .collect(),
211 url: Some(gh_issue.html_url.clone()),
212 created_at: Some(gh_issue.created_at.clone()),
213 updated_at: Some(gh_issue.updated_at.clone()),
214 attachments_count,
215 parent: None,
216 subtasks: vec![],
217 }
218}
219
220fn map_pull_request(gh_pr: &GitHubPullRequest) -> MergeRequest {
221 let state = if gh_pr.merged || gh_pr.merged_at.is_some() {
223 "merged".to_string()
224 } else if gh_pr.state == "closed" {
225 "closed".to_string()
226 } else if gh_pr.draft {
227 "draft".to_string()
228 } else {
229 "open".to_string()
230 };
231
232 MergeRequest {
233 key: format!("pr#{}", gh_pr.number),
234 title: gh_pr.title.clone(),
235 description: gh_pr.body.clone(),
236 state,
237 source: "github".to_string(),
238 source_branch: gh_pr.head.ref_name.clone(),
239 target_branch: gh_pr.base.ref_name.clone(),
240 author: map_user(gh_pr.user.as_ref()),
241 assignees: gh_pr
242 .assignees
243 .iter()
244 .map(|u| map_user_required(Some(u)))
245 .collect(),
246 reviewers: gh_pr
247 .requested_reviewers
248 .iter()
249 .map(|u| map_user_required(Some(u)))
250 .collect(),
251 labels: map_labels(&gh_pr.labels),
252 draft: gh_pr.draft,
253 url: Some(gh_pr.html_url.clone()),
254 created_at: Some(gh_pr.created_at.clone()),
255 updated_at: Some(gh_pr.updated_at.clone()),
256 }
257}
258
259fn map_comment(gh_comment: &GitHubComment) -> Comment {
260 Comment {
261 id: gh_comment.id.to_string(),
262 body: gh_comment.body.clone(),
263 author: map_user(gh_comment.user.as_ref()),
264 created_at: Some(gh_comment.created_at.clone()),
265 updated_at: gh_comment.updated_at.clone(),
266 position: None,
267 }
268}
269
270fn map_review_comment(gh_comment: &GitHubReviewComment) -> Comment {
271 let position = gh_comment
272 .line
273 .or(gh_comment.original_line)
274 .map(|line| CodePosition {
275 file_path: gh_comment.path.clone(),
276 line,
277 line_type: gh_comment
278 .side
279 .as_ref()
280 .map(|s| if s == "LEFT" { "old" } else { "new" })
281 .unwrap_or("new")
282 .to_string(),
283 commit_sha: gh_comment
284 .commit_id
285 .clone()
286 .or_else(|| gh_comment.original_commit_id.clone()),
287 });
288
289 Comment {
290 id: gh_comment.id.to_string(),
291 body: gh_comment.body.clone(),
292 author: map_user(gh_comment.user.as_ref()),
293 created_at: Some(gh_comment.created_at.clone()),
294 updated_at: gh_comment.updated_at.clone(),
295 position,
296 }
297}
298
299fn map_file(gh_file: &GitHubFile) -> FileDiff {
300 FileDiff {
301 file_path: gh_file.filename.clone(),
302 old_path: gh_file.previous_filename.clone(),
303 new_file: gh_file.status == "added",
304 deleted_file: gh_file.status == "removed",
305 renamed_file: gh_file.status == "renamed",
306 diff: gh_file.patch.clone().unwrap_or_default(),
307 additions: Some(gh_file.additions),
308 deletions: Some(gh_file.deletions),
309 }
310}
311
312#[async_trait]
317impl IssueProvider for GitHubClient {
318 async fn get_issues(&self, filter: IssueFilter) -> Result<ProviderResult<Issue>> {
319 let mut url = self.repo_url("/issues");
320 let mut params = vec![];
321
322 if let Some(state) = &filter.state {
324 let gh_state = match state.as_str() {
325 "opened" | "open" => "open",
326 "closed" => "closed",
327 "all" => "all",
328 _ => "open",
329 };
330 params.push(format!("state={}", gh_state));
331 }
332
333 if let Some(labels) = &filter.labels
334 && !labels.is_empty()
335 {
336 params.push(format!("labels={}", labels.join(",")));
337 }
338
339 if let Some(assignee) = &filter.assignee {
340 params.push(format!("assignee={}", assignee));
341 }
342
343 if let Some(limit) = filter.limit {
344 params.push(format!("per_page={}", limit.min(100)));
345 }
346
347 if let Some(offset) = filter.offset {
348 let per_page = filter.limit.unwrap_or(30);
350 let page = (offset / per_page) + 1;
351 params.push(format!("page={}", page));
352 }
353
354 if let Some(sort_by) = &filter.sort_by {
355 let gh_sort = match sort_by.as_str() {
356 "created_at" | "created" => "created",
357 "updated_at" | "updated" => "updated",
358 _ => "updated",
359 };
360 params.push(format!("sort={}", gh_sort));
361 }
362
363 if let Some(order) = &filter.sort_order {
364 params.push(format!("direction={}", order));
365 }
366
367 if !params.is_empty() {
368 url.push_str(&format!("?{}", params.join("&")));
369 }
370
371 let gh_issues: Vec<GitHubIssue> = self.get(&url).await?;
372
373 let issues: Vec<Issue> = gh_issues
375 .iter()
376 .filter(|i| i.pull_request.is_none())
377 .map(map_issue)
378 .collect();
379
380 Ok(issues.into())
381 }
382
383 async fn get_issue(&self, key: &str) -> Result<Issue> {
384 let number = parse_issue_key(key)?;
385 let url = self.repo_url(&format!("/issues/{}", number));
386 let gh_issue: GitHubIssue = self.get(&url).await?;
387
388 if gh_issue.pull_request.is_some() {
390 return Err(Error::InvalidData(format!(
391 "{} is a pull request, not an issue",
392 key
393 )));
394 }
395
396 Ok(map_issue(&gh_issue))
397 }
398
399 async fn create_issue(&self, input: CreateIssueInput) -> Result<Issue> {
400 let url = self.repo_url("/issues");
401 let request = CreateIssueRequest {
402 title: input.title,
403 body: input.description,
404 labels: input.labels,
405 assignees: input.assignees,
406 };
407
408 let gh_issue: GitHubIssue = self.post(&url, &request).await?;
409 Ok(map_issue(&gh_issue))
410 }
411
412 async fn update_issue(&self, key: &str, input: UpdateIssueInput) -> Result<Issue> {
413 let number = parse_issue_key(key)?;
414 let url = self.repo_url(&format!("/issues/{}", number));
415
416 let state = input.state.map(|s| match s.as_str() {
418 "opened" | "open" => "open".to_string(),
419 "closed" => "closed".to_string(),
420 _ => s,
421 });
422
423 let request = UpdateIssueRequest {
424 title: input.title,
425 body: input.description,
426 state,
427 labels: input.labels,
428 assignees: input.assignees,
429 };
430
431 let gh_issue: GitHubIssue = self.patch(&url, &request).await?;
432 Ok(map_issue(&gh_issue))
433 }
434
435 async fn get_comments(&self, issue_key: &str) -> Result<ProviderResult<Comment>> {
436 let number = parse_issue_key(issue_key)?;
437 let url = self.repo_url(&format!("/issues/{}/comments", number));
438 let gh_comments: Vec<GitHubComment> = self.get(&url).await?;
439 Ok(gh_comments
440 .iter()
441 .map(map_comment)
442 .collect::<Vec<_>>()
443 .into())
444 }
445
446 async fn add_comment(&self, issue_key: &str, body: &str) -> Result<Comment> {
447 let number = parse_issue_key(issue_key)?;
448 let url = self.repo_url(&format!("/issues/{}/comments", number));
449 let request = CreateCommentRequest {
450 body: body.to_string(),
451 };
452
453 let gh_comment: GitHubComment = self.post(&url, &request).await?;
454 Ok(map_comment(&gh_comment))
455 }
456
457 async fn get_issue_attachments(&self, issue_key: &str) -> Result<Vec<AssetMeta>> {
458 let issue = self.get_issue(issue_key).await?;
461 let comments = self.get_comments(issue_key).await?;
462
463 let mut attachments: Vec<AssetMeta> = Vec::new();
464 let mut seen: std::collections::HashSet<String> = std::collections::HashSet::new();
465 let base = self.base_url.clone();
466 let mut collect = |source: &str| {
467 for att in parse_markdown_attachments(source) {
468 if is_github_attachment_url(&base, &att.url) && seen.insert(att.url.clone()) {
472 attachments.push(markdown_to_meta(&att));
473 }
474 }
475 };
476 if let Some(body) = issue.description.as_deref() {
477 collect(body);
478 }
479 for comment in &comments.items {
480 collect(&comment.body);
481 }
482 Ok(attachments)
483 }
484
485 async fn download_attachment(&self, _issue_key: &str, asset_id: &str) -> Result<Vec<u8>> {
486 download_github_url(&self.client, &self.base_url, &self.token, asset_id).await
487 }
488
489 fn asset_capabilities(&self) -> AssetCapabilities {
490 let caps = ContextCapabilities {
494 upload: false,
495 download: true,
496 delete: false,
497 list: true,
498 max_file_size: None,
499 allowed_types: Vec::new(),
500 };
501 AssetCapabilities {
502 issue: caps.clone(),
503 issue_comment: caps.clone(),
504 merge_request: caps.clone(),
505 mr_comment: caps,
506 }
507 }
508
509 fn provider_name(&self) -> &'static str {
510 "github"
511 }
512}
513
514#[async_trait]
515impl MergeRequestProvider for GitHubClient {
516 async fn get_merge_requests(&self, filter: MrFilter) -> Result<ProviderResult<MergeRequest>> {
517 let mut url = self.repo_url("/pulls");
518 let mut params = vec![];
519
520 if let Some(state) = &filter.state {
522 let gh_state = match state.as_str() {
523 "opened" | "open" => "open",
524 "closed" => "closed",
525 "merged" => "closed", "all" => "all",
527 _ => "open",
528 };
529 params.push(format!("state={}", gh_state));
530 }
531
532 if let Some(source_branch) = &filter.source_branch {
533 params.push(format!("head={}", source_branch));
534 }
535
536 if let Some(target_branch) = &filter.target_branch {
537 params.push(format!("base={}", target_branch));
538 }
539
540 if let Some(limit) = filter.limit {
541 params.push(format!("per_page={}", limit.min(100)));
542 }
543
544 params.push("sort=updated".to_string());
545 params.push("direction=desc".to_string());
546
547 if !params.is_empty() {
548 url.push_str(&format!("?{}", params.join("&")));
549 }
550
551 let gh_prs: Vec<GitHubPullRequest> = self.get(&url).await?;
552
553 let mut prs: Vec<MergeRequest> = gh_prs.iter().map(map_pull_request).collect();
554
555 if filter.state.as_deref() == Some("merged") {
557 prs.retain(|pr| pr.state == "merged");
558 }
559
560 Ok(prs.into())
561 }
562
563 async fn get_merge_request(&self, key: &str) -> Result<MergeRequest> {
564 let number = parse_pr_key(key)?;
565 let url = self.repo_url(&format!("/pulls/{}", number));
566 let gh_pr: GitHubPullRequest = self.get(&url).await?;
567 Ok(map_pull_request(&gh_pr))
568 }
569
570 async fn get_discussions(&self, mr_key: &str) -> Result<ProviderResult<Discussion>> {
571 let number = parse_pr_key(mr_key)?;
572
573 let reviews_url = self.repo_url(&format!("/pulls/{}/reviews", number));
575 let review_comments_url = self.repo_url(&format!("/pulls/{}/comments", number));
576 let issue_comments_url = self.repo_url(&format!("/issues/{}/comments", number));
577
578 let reviews: Vec<GitHubReview> = self.get(&reviews_url).await?;
579 let review_comments: Vec<GitHubReviewComment> = self.get(&review_comments_url).await?;
580 let issue_comments: Vec<GitHubComment> = self.get(&issue_comments_url).await?;
581
582 let mut discussions = Vec::new();
583
584 let mut comment_threads: std::collections::HashMap<u64, Vec<&GitHubReviewComment>> =
586 std::collections::HashMap::new();
587
588 for comment in &review_comments {
589 let thread_id = comment.in_reply_to_id.unwrap_or(comment.id);
590 comment_threads.entry(thread_id).or_default().push(comment);
591 }
592
593 for (thread_id, comments) in comment_threads {
595 let mapped_comments: Vec<Comment> =
596 comments.iter().map(|c| map_review_comment(c)).collect();
597 let position = mapped_comments.first().and_then(|c| c.position.clone());
598
599 discussions.push(Discussion {
600 id: format!("thread-{}", thread_id),
601 resolved: false, resolved_by: None,
603 comments: mapped_comments,
604 position,
605 });
606 }
607
608 for review in &reviews {
610 let mut comments = Vec::new();
611 if let Some(body) = &review.body
612 && !body.is_empty()
613 {
614 comments.push(Comment {
615 id: review.id.to_string(),
616 body: body.clone(),
617 author: map_user(review.user.as_ref()),
618 created_at: review.submitted_at.clone(),
619 updated_at: None,
620 position: None,
621 });
622 }
623
624 if !comments.is_empty() || !review.state.is_empty() {
625 discussions.push(Discussion {
626 id: format!("review-{}", review.id),
627 resolved: false,
628 resolved_by: None,
629 comments,
630 position: None,
631 });
632 }
633 }
634
635 for comment in &issue_comments {
637 discussions.push(Discussion {
638 id: format!("comment-{}", comment.id),
639 resolved: false,
640 resolved_by: None,
641 comments: vec![map_comment(comment)],
642 position: None,
643 });
644 }
645
646 Ok(discussions.into())
647 }
648
649 async fn get_diffs(&self, mr_key: &str) -> Result<ProviderResult<FileDiff>> {
650 let number = parse_pr_key(mr_key)?;
651 let url = self.repo_url(&format!("/pulls/{}/files", number));
652 let gh_files: Vec<GitHubFile> = self.get(&url).await?;
653 Ok(gh_files.iter().map(map_file).collect::<Vec<_>>().into())
654 }
655
656 async fn add_comment(&self, mr_key: &str, input: CreateCommentInput) -> Result<Comment> {
657 let number = parse_pr_key(mr_key)?;
658
659 let pr_url = self.repo_url(&format!("/pulls/{}", number));
661 let pr_result: Result<GitHubPullRequest> = self.get(&pr_url).await;
662
663 if let Err(Error::Http(status)) = &pr_result
664 && status.contains("404")
665 {
666 return Err(Error::InvalidData(format!(
667 "{} is not a valid pull request (it may be an issue)",
668 mr_key
669 )));
670 }
671
672 let pr: GitHubPullRequest = pr_result?;
674
675 if let Some(position) = &input.position {
677 let url = self.repo_url(&format!("/pulls/{}/comments", number));
678
679 let commit_sha = if let Some(sha) = &position.commit_sha {
681 sha.clone()
682 } else {
683 pr.head.sha
685 };
686
687 let request = CreateReviewCommentRequest {
688 body: input.body,
689 commit_id: commit_sha,
690 path: position.file_path.clone(),
691 line: Some(position.line),
692 side: Some(if position.line_type == "old" {
693 "LEFT".to_string()
694 } else {
695 "RIGHT".to_string()
696 }),
697 in_reply_to: input
705 .discussion_id
706 .as_deref()
707 .and_then(parse_discussion_numeric_id),
708 };
709
710 let gh_comment: GitHubReviewComment = self.post(&url, &request).await?;
711 return Ok(map_review_comment(&gh_comment));
712 }
713
714 let url = self.repo_url(&format!("/issues/{}/comments", number));
716 let request = CreateCommentRequest { body: input.body };
717
718 let gh_comment: GitHubComment = self.post(&url, &request).await?;
719 Ok(map_comment(&gh_comment))
720 }
721
722 async fn create_merge_request(&self, input: CreateMergeRequestInput) -> Result<MergeRequest> {
723 let url = self.repo_url("/pulls");
724
725 let request = CreatePullRequestRequest {
726 title: input.title,
727 body: input.description,
728 head: input.source_branch,
729 base: input.target_branch,
730 draft: if input.draft { Some(true) } else { None },
731 };
732
733 let gh_pr: GitHubPullRequest = self.post(&url, &request).await?;
734
735 if !input.labels.is_empty() {
737 let labels_url = self.repo_url(&format!("/issues/{}/labels", gh_pr.number));
738 let result: Result<serde_json::Value> = self
739 .post(&labels_url, &serde_json::json!({ "labels": input.labels }))
740 .await;
741 if let Err(err) = result {
742 warn!(
743 error = ?err,
744 pr_number = gh_pr.number,
745 "Failed to add labels to GitHub pull request"
746 );
747 }
748 }
749
750 if !input.reviewers.is_empty() {
752 let reviewers_url =
753 self.repo_url(&format!("/pulls/{}/requested_reviewers", gh_pr.number));
754 let result: Result<serde_json::Value> = self
755 .post(
756 &reviewers_url,
757 &serde_json::json!({ "reviewers": input.reviewers }),
758 )
759 .await;
760 if let Err(err) = result {
761 warn!(
762 error = ?err,
763 pr_number = gh_pr.number,
764 "Failed to add reviewers to GitHub pull request"
765 );
766 }
767 }
768
769 if !input.labels.is_empty() || !input.reviewers.is_empty() {
771 let pr_url = self.repo_url(&format!("/pulls/{}", gh_pr.number));
772 match self.get::<GitHubPullRequest>(&pr_url).await {
773 Ok(updated_pr) => return Ok(map_pull_request(&updated_pr)),
774 Err(err) => {
775 warn!(
776 error = ?err,
777 pr_number = gh_pr.number,
778 "Failed to re-fetch GitHub pull request"
779 );
780 }
781 }
782 }
783
784 Ok(map_pull_request(&gh_pr))
785 }
786
787 async fn update_merge_request(
788 &self,
789 key: &str,
790 input: UpdateMergeRequestInput,
791 ) -> Result<MergeRequest> {
792 let number = parse_pr_key(key)?;
793 let url = self.repo_url(&format!("/pulls/{}", number));
794
795 let state = input.state.map(|s| match s.as_str() {
797 "opened" | "open" | "reopen" => "open".to_string(),
798 "closed" | "close" => "closed".to_string(),
799 _ => s,
800 });
801
802 let request = UpdatePullRequestRequest {
803 title: input.title,
804 body: input.description,
805 state,
806 draft: input.draft,
807 };
808
809 let gh_pr: GitHubPullRequest = self.patch(&url, &request).await?;
810
811 if let Some(labels) = input.labels {
813 let labels_url = self.repo_url(&format!("/issues/{}/labels", number));
814 let result: Result<serde_json::Value> = self
815 .patch(&labels_url, &serde_json::json!({ "labels": labels }))
816 .await;
817 if let Err(err) = result {
818 warn!(
819 error = ?err,
820 pr_number = number,
821 "Failed to update labels on GitHub pull request"
822 );
823 }
824
825 let pr_url = self.repo_url(&format!("/pulls/{}", number));
827 match self.get::<GitHubPullRequest>(&pr_url).await {
828 Ok(updated_pr) => return Ok(map_pull_request(&updated_pr)),
829 Err(err) => {
830 warn!(
831 error = ?err,
832 pr_number = number,
833 "Failed to re-fetch GitHub pull request"
834 );
835 }
836 }
837 }
838
839 Ok(map_pull_request(&gh_pr))
840 }
841
842 async fn get_mr_attachments(&self, mr_key: &str) -> Result<Vec<AssetMeta>> {
843 let mr = self.get_merge_request(mr_key).await?;
844 let discussions = self.get_discussions(mr_key).await?;
845
846 let mut attachments: Vec<AssetMeta> = Vec::new();
847 let mut seen: std::collections::HashSet<String> = std::collections::HashSet::new();
848 let base = self.base_url.clone();
849 let mut collect = |source: &str| {
850 for att in parse_markdown_attachments(source) {
851 if is_github_attachment_url(&base, &att.url) && seen.insert(att.url.clone()) {
852 attachments.push(markdown_to_meta(&att));
853 }
854 }
855 };
856 if let Some(body) = mr.description.as_deref() {
857 collect(body);
858 }
859 for discussion in &discussions.items {
860 for comment in &discussion.comments {
861 collect(&comment.body);
862 }
863 }
864 Ok(attachments)
865 }
866
867 async fn download_mr_attachment(&self, _mr_key: &str, asset_id: &str) -> Result<Vec<u8>> {
868 download_github_url(&self.client, &self.base_url, &self.token, asset_id).await
869 }
870
871 fn provider_name(&self) -> &'static str {
872 "github"
873 }
874}
875
876const GITHUB_TRUSTED_HOSTS: &[&str] = &[
882 "github.com",
883 "api.github.com",
884 "githubusercontent.com",
885 "user-images.githubusercontent.com",
886 "raw.githubusercontent.com",
887 "objects.githubusercontent.com",
888 "camo.githubusercontent.com",
889];
890
891async fn download_github_url(
896 client: &reqwest::Client,
897 base_url: &str,
898 token: &SecretString,
899 url: &str,
900) -> Result<Vec<u8>> {
901 let needs_auth = is_github_api_host(base_url, url);
902 let mut request = client
903 .get(url)
904 .header("Accept", "application/octet-stream")
905 .header("User-Agent", "devboy-tools");
906 let token_value = token.expose_secret();
907 if needs_auth && !token_value.is_empty() {
908 request = request.header("Authorization", format!("Bearer {token_value}"));
909 } else if !is_github_trusted_host(base_url, url) {
910 tracing::warn!(
911 url,
912 "downloading cross-origin attachment without auth headers"
913 );
914 }
915 let response = request
916 .send()
917 .await
918 .map_err(|e| Error::Http(e.to_string()))?;
919 let status = response.status();
920 if !status.is_success() {
921 let message = response.text().await.unwrap_or_default();
922 return Err(Error::from_status(status.as_u16(), message));
923 }
924 let bytes = response
925 .bytes()
926 .await
927 .map_err(|e| Error::Http(format!("failed to read attachment bytes: {e}")))?;
928 Ok(bytes.to_vec())
929}
930
931fn is_github_api_host(base_url: &str, url: &str) -> bool {
936 let (url_scheme, url_host) = split_scheme_host(url);
937 if url_scheme != "https" {
938 return false;
939 }
940 if url_host == "api.github.com" || url_host == "github.com" {
942 return true;
943 }
944 let (_base_scheme, base_host) = split_scheme_host(base_url);
946 url_host == base_host
947}
948
949fn is_github_trusted_host(base_url: &str, url: &str) -> bool {
955 let (url_scheme, url_host) = split_scheme_host(url);
956 if url_scheme != "https" {
957 return false;
958 }
959
960 for trusted in GITHUB_TRUSTED_HOSTS {
962 if url_host == *trusted || url_host.ends_with(&format!(".{trusted}")) {
963 return true;
964 }
965 }
966
967 let (_base_scheme, base_host) = split_scheme_host(base_url);
969 url_host == base_host
970}
971
972fn split_scheme_host(url: &str) -> (String, String) {
974 let (scheme, rest) = match url.split_once("://") {
975 Some((s, r)) => (s.to_ascii_lowercase(), r),
976 None => return (String::new(), String::new()),
977 };
978 let host = rest.split('/').next().unwrap_or("").to_ascii_lowercase();
979 (scheme, host)
980}
981
982fn is_github_attachment_url(base_url: &str, url: &str) -> bool {
990 let (scheme, host) = split_scheme_host(url);
991 if scheme.is_empty() {
992 return false; }
994 if host.ends_with("githubusercontent.com") {
996 return true;
997 }
998 if host == "github.com" {
1000 let path = url
1001 .split("://")
1002 .nth(1)
1003 .unwrap_or("")
1004 .split_once('/')
1005 .map(|(_, p)| p)
1006 .unwrap_or("");
1007 if path.starts_with("user-attachments/assets/")
1008 || path.starts_with("user-attachments/files/")
1009 {
1010 return true;
1011 }
1012 }
1013 let (_base_scheme, base_host) = split_scheme_host(base_url);
1015 if host == base_host {
1016 let path = url
1017 .split("://")
1018 .nth(1)
1019 .unwrap_or("")
1020 .split_once('/')
1021 .map(|(_, p)| p)
1022 .unwrap_or("");
1023 return path.contains("/assets/");
1024 }
1025 false
1026}
1027
1028fn markdown_to_meta(att: &devboy_core::MarkdownAttachment) -> AssetMeta {
1029 AssetMeta {
1030 id: att.url.clone(),
1031 filename: att.filename.clone(),
1032 mime_type: None,
1033 size: None,
1034 url: Some(att.url.clone()),
1035 created_at: None,
1036 author: None,
1037 cached: false,
1038 local_path: None,
1039 checksum_sha256: None,
1040 analysis: None,
1041 }
1042}
1043
1044#[derive(Debug, Deserialize)]
1050struct GhWorkflowRun {
1051 id: u64,
1052 name: Option<String>,
1053 status: Option<String>,
1054 conclusion: Option<String>,
1055 #[allow(dead_code)]
1056 head_branch: Option<String>,
1057 head_sha: String,
1058 html_url: String,
1059 run_started_at: Option<String>,
1060 updated_at: Option<String>,
1061}
1062
1063#[derive(Debug, Deserialize)]
1065struct GhWorkflowRuns {
1066 workflow_runs: Vec<GhWorkflowRun>,
1067}
1068
1069#[derive(Debug, Deserialize)]
1071struct GhJob {
1072 id: u64,
1073 name: String,
1074 status: Option<String>,
1075 conclusion: Option<String>,
1076 html_url: Option<String>,
1077 started_at: Option<String>,
1078 completed_at: Option<String>,
1079}
1080
1081#[derive(Debug, Deserialize)]
1083struct GhJobs {
1084 jobs: Vec<GhJob>,
1085}
1086
1087fn map_gh_status(status: Option<&str>, conclusion: Option<&str>) -> PipelineStatus {
1088 match (status, conclusion) {
1089 (Some("completed"), Some("success")) => PipelineStatus::Success,
1090 (Some("completed"), Some("failure")) => PipelineStatus::Failed,
1091 (Some("completed"), Some("cancelled")) => PipelineStatus::Canceled,
1092 (Some("completed"), Some("skipped")) => PipelineStatus::Skipped,
1093 (Some("in_progress"), _) => PipelineStatus::Running,
1094 (Some("queued"), _) | (Some("waiting"), _) => PipelineStatus::Pending,
1095 _ => PipelineStatus::Unknown,
1096 }
1097}
1098
1099fn estimate_duration(started: Option<&str>, completed: Option<&str>) -> Option<u64> {
1100 let start = started?.parse::<chrono::DateTime<chrono::Utc>>().ok()?;
1101 let end = completed?.parse::<chrono::DateTime<chrono::Utc>>().ok()?;
1102 Some(
1103 end.signed_duration_since(start)
1104 .num_seconds()
1105 .unsigned_abs(),
1106 )
1107}
1108
1109fn strip_ansi(text: &str) -> String {
1111 let mut result = String::with_capacity(text.len());
1112 let mut chars = text.chars().peekable();
1113 while let Some(ch) = chars.next() {
1114 if ch == '\x1b' {
1115 while let Some(&next) = chars.peek() {
1117 chars.next();
1118 if next.is_ascii_alphabetic() {
1119 break;
1120 }
1121 }
1122 } else {
1123 result.push(ch);
1124 }
1125 }
1126 result
1127}
1128
1129fn extract_errors(log: &str, max_lines: usize) -> Option<String> {
1131 let patterns = [
1132 "error[",
1133 "error:",
1134 "FAILED",
1135 "Error:",
1136 "panic",
1137 "FATAL",
1138 "AssertionError",
1139 "TypeError",
1140 "Cannot find",
1141 "not found",
1142 "exit code",
1143 ];
1144 let lines: Vec<&str> = log.lines().collect();
1145 let mut error_lines: Vec<String> = Vec::new();
1146
1147 for (i, line) in lines.iter().enumerate() {
1148 let stripped = strip_ansi(line);
1149 if patterns.iter().any(|p| stripped.contains(p)) {
1150 let start = i.saturating_sub(2);
1152 let end = (i + 3).min(lines.len());
1153 for ctx_line_raw in &lines[start..end] {
1154 let ctx_line = strip_ansi(ctx_line_raw).trim().to_string();
1155 if !ctx_line.is_empty() && !error_lines.contains(&ctx_line) {
1156 error_lines.push(ctx_line);
1157 }
1158 }
1159 if error_lines.len() >= max_lines {
1160 break;
1161 }
1162 }
1163 }
1164
1165 if error_lines.is_empty() {
1166 let tail: Vec<String> = lines
1168 .iter()
1169 .rev()
1170 .filter_map(|l| {
1171 let s = strip_ansi(l).trim().to_string();
1172 if s.is_empty() { None } else { Some(s) }
1173 })
1174 .take(10)
1175 .collect();
1176 if tail.is_empty() {
1177 None
1178 } else {
1179 Some(tail.into_iter().rev().collect::<Vec<_>>().join("\n"))
1180 }
1181 } else {
1182 Some(error_lines.join("\n"))
1183 }
1184}
1185
1186#[async_trait]
1187impl PipelineProvider for GitHubClient {
1188 fn provider_name(&self) -> &'static str {
1189 "github"
1190 }
1191
1192 async fn get_pipeline(&self, input: GetPipelineInput) -> Result<PipelineInfo> {
1193 let branch = if let Some(ref mr_key) = input.mr_key {
1195 let number = parse_pr_key(mr_key)?;
1197 let pr_url = self.repo_url(&format!("/pulls/{number}"));
1198 let pr: GitHubPullRequest = self.get(&pr_url).await?;
1199 pr.head.ref_name
1200 } else if let Some(ref branch) = input.branch {
1201 branch.clone()
1202 } else {
1203 "main".to_string()
1205 };
1206
1207 let runs_url = self.repo_url(&format!(
1209 "/actions/runs?branch={}&per_page=1&status=completed",
1210 urlencoding::encode(&branch)
1211 ));
1212 let runs: GhWorkflowRuns = self.get(&runs_url).await?;
1213
1214 let active_runs_url = self.repo_url(&format!(
1216 "/actions/runs?branch={}&per_page=1&status=in_progress",
1217 urlencoding::encode(&branch)
1218 ));
1219 let active_runs: GhWorkflowRuns =
1220 self.get(&active_runs_url).await.unwrap_or(GhWorkflowRuns {
1221 workflow_runs: vec![],
1222 });
1223
1224 let run = active_runs
1226 .workflow_runs
1227 .into_iter()
1228 .chain(runs.workflow_runs)
1229 .next()
1230 .ok_or_else(|| {
1231 Error::NotFound(format!("No workflow runs found for branch '{branch}'"))
1232 })?;
1233
1234 let run_status = map_gh_status(run.status.as_deref(), run.conclusion.as_deref());
1235
1236 let jobs_url = self.repo_url(&format!("/actions/runs/{}/jobs?per_page=100", run.id));
1238 let gh_jobs: GhJobs = self.get(&jobs_url).await?;
1239
1240 let mut summary = PipelineSummary {
1242 total: gh_jobs.jobs.len() as u32,
1243 ..Default::default()
1244 };
1245
1246 let mut jobs: Vec<PipelineJob> = Vec::new();
1248 let mut failed_job_ids: Vec<(u64, String)> = Vec::new();
1249
1250 for job in &gh_jobs.jobs {
1251 let status = map_gh_status(job.status.as_deref(), job.conclusion.as_deref());
1252 match status {
1253 PipelineStatus::Success => summary.success += 1,
1254 PipelineStatus::Failed => {
1255 summary.failed += 1;
1256 failed_job_ids.push((job.id, job.name.clone()));
1257 }
1258 PipelineStatus::Running => summary.running += 1,
1259 PipelineStatus::Pending => summary.pending += 1,
1260 PipelineStatus::Canceled => summary.canceled += 1,
1261 PipelineStatus::Skipped => summary.skipped += 1,
1262 PipelineStatus::Unknown => {}
1263 }
1264
1265 let duration =
1266 estimate_duration(job.started_at.as_deref(), job.completed_at.as_deref());
1267
1268 jobs.push(PipelineJob {
1269 id: job.id.to_string(),
1270 name: job.name.clone(),
1271 status,
1272 url: job.html_url.clone(),
1273 duration,
1274 });
1275 }
1276
1277 let mut failed_jobs: Vec<FailedJob> = Vec::new();
1279 if input.include_failed_logs {
1280 for (job_id, job_name) in failed_job_ids.iter().take(5) {
1281 let log_url = self.repo_url(&format!("/actions/jobs/{job_id}/logs"));
1282 let error_snippet = match self.request(reqwest::Method::GET, &log_url).send().await
1283 {
1284 Ok(resp) if resp.status().is_success() => {
1285 let log_text = resp.text().await.unwrap_or_default();
1286 extract_errors(&log_text, 20)
1287 }
1288 _ => None,
1289 };
1290 failed_jobs.push(FailedJob {
1291 id: job_id.to_string(),
1292 name: job_name.clone(),
1293 url: None,
1294 error_snippet,
1295 });
1296 }
1297 }
1298
1299 let duration = estimate_duration(run.run_started_at.as_deref(), run.updated_at.as_deref());
1300
1301 let stage_name = run.name.unwrap_or_else(|| "CI".to_string());
1302
1303 Ok(PipelineInfo {
1304 id: run.id.to_string(),
1305 status: run_status,
1306 reference: branch,
1307 sha: run.head_sha,
1308 url: Some(run.html_url),
1309 duration,
1310 coverage: None,
1311 summary,
1312 stages: vec![PipelineStage {
1313 name: stage_name,
1314 jobs,
1315 }],
1316 failed_jobs,
1317 })
1318 }
1319
1320 async fn get_job_logs(&self, job_id: &str, options: JobLogOptions) -> Result<JobLogOutput> {
1321 let log_url = self.repo_url(&format!("/actions/jobs/{job_id}/logs"));
1322 let resp = self
1323 .request(reqwest::Method::GET, &log_url)
1324 .send()
1325 .await
1326 .map_err(|e| Error::Network(e.to_string()))?;
1327
1328 if !resp.status().is_success() {
1329 return Err(Error::from_status(
1330 resp.status().as_u16(),
1331 format!("Failed to fetch job logs for job {job_id}"),
1332 ));
1333 }
1334
1335 let content_type = resp
1338 .headers()
1339 .get("content-type")
1340 .and_then(|v| v.to_str().ok())
1341 .unwrap_or("")
1342 .to_string();
1343
1344 let raw_log = if content_type.contains("application/zip")
1345 || content_type.contains("application/octet-stream")
1346 {
1347 return Err(Error::InvalidData(
1349 "Job logs returned as ZIP archive. This typically happens for large logs. \
1350 Try using pattern search mode to find specific errors."
1351 .to_string(),
1352 ));
1353 } else {
1354 resp.text()
1355 .await
1356 .map_err(|e| Error::Network(e.to_string()))?
1357 };
1358 let log = strip_ansi(&raw_log);
1359 let lines: Vec<&str> = log.lines().collect();
1360 let total_lines = lines.len();
1361
1362 let (content, mode_name) = match options.mode {
1363 JobLogMode::Smart => {
1364 let extracted = extract_errors(&log, 30).unwrap_or_else(|| {
1365 lines
1366 .iter()
1367 .rev()
1368 .take(20)
1369 .copied()
1370 .collect::<Vec<_>>()
1371 .into_iter()
1372 .rev()
1373 .collect::<Vec<_>>()
1374 .join("\n")
1375 });
1376 (extracted, "smart")
1377 }
1378 JobLogMode::Search {
1379 ref pattern,
1380 context,
1381 max_matches,
1382 } => {
1383 let re = regex::Regex::new(pattern)
1384 .unwrap_or_else(|_| regex::Regex::new(®ex::escape(pattern)).unwrap());
1385 let mut matches = Vec::new();
1386 for (i, line) in lines.iter().enumerate() {
1387 if re.is_match(line) {
1388 let start = i.saturating_sub(context);
1389 let end = (i + context + 1).min(total_lines);
1390 matches.push(format!("--- Match at line {} ---", i + 1));
1391 for (j, ctx_line) in lines[start..end].iter().enumerate() {
1392 let line_num = start + j;
1393 let marker = if line_num == i { ">>>" } else { " " };
1394 matches.push(format!("{} {}: {}", marker, line_num + 1, ctx_line));
1395 }
1396 if matches.len() / (context * 2 + 2) >= max_matches {
1397 break;
1398 }
1399 }
1400 }
1401 (matches.join("\n"), "search")
1402 }
1403 JobLogMode::Paginated { offset, limit } => {
1404 let page: Vec<&str> = lines.iter().skip(offset).take(limit).copied().collect();
1405 (page.join("\n"), "paginated")
1406 }
1407 JobLogMode::Full { max_lines } => {
1408 let truncated: Vec<&str> = lines.iter().take(max_lines).copied().collect();
1409 (truncated.join("\n"), "full")
1410 }
1411 };
1412
1413 Ok(JobLogOutput {
1414 job_id: job_id.to_string(),
1415 job_name: None,
1416 content,
1417 mode: mode_name.to_string(),
1418 total_lines: Some(total_lines),
1419 })
1420 }
1421}
1422
1423#[async_trait]
1424impl Provider for GitHubClient {
1425 async fn get_current_user(&self) -> Result<User> {
1426 let url = format!("{}/user", self.base_url);
1427 let gh_user: GitHubUser = self.get(&url).await?;
1428 Ok(map_user_required(Some(&gh_user)))
1429 }
1430}
1431
1432fn parse_issue_key(key: &str) -> Result<u64> {
1438 key.strip_prefix("gh#")
1439 .and_then(|s| s.parse::<u64>().ok())
1440 .ok_or_else(|| Error::InvalidData(format!("Invalid issue key: {}", key)))
1441}
1442
1443fn parse_pr_key(key: &str) -> Result<u64> {
1445 key.strip_prefix("pr#")
1446 .and_then(|s| s.parse::<u64>().ok())
1447 .ok_or_else(|| Error::InvalidData(format!("Invalid PR key: {}", key)))
1448}
1449
1450fn parse_discussion_numeric_id(id: &str) -> Option<u64> {
1466 let trimmed = id
1467 .strip_prefix("thread-")
1468 .or_else(|| id.strip_prefix("review-"))
1469 .or_else(|| id.strip_prefix("comment-"))
1470 .unwrap_or(id);
1471 trimmed.parse::<u64>().ok()
1472}
1473
1474#[cfg(test)]
1475mod tests {
1476 use super::*;
1477 use crate::types::GitHubBranchRef;
1478
1479 #[test]
1480 fn test_parse_issue_key() {
1481 assert_eq!(parse_issue_key("gh#123").unwrap(), 123);
1482 assert_eq!(parse_issue_key("gh#1").unwrap(), 1);
1483 assert!(parse_issue_key("pr#123").is_err());
1484 assert!(parse_issue_key("123").is_err());
1485 assert!(parse_issue_key("gh#").is_err());
1486 }
1487
1488 #[test]
1489 fn test_parse_pr_key() {
1490 assert_eq!(parse_pr_key("pr#456").unwrap(), 456);
1491 assert_eq!(parse_pr_key("pr#1").unwrap(), 1);
1492 assert!(parse_pr_key("gh#123").is_err());
1493 assert!(parse_pr_key("456").is_err());
1494 }
1495
1496 #[test]
1497 fn test_parse_discussion_numeric_id_strips_prefixes() {
1498 assert_eq!(
1507 parse_discussion_numeric_id("thread-3694869522"),
1508 Some(3694869522)
1509 );
1510 assert_eq!(
1511 parse_discussion_numeric_id("review-3694869522"),
1512 Some(3694869522)
1513 );
1514 assert_eq!(
1515 parse_discussion_numeric_id("comment-4147511088"),
1516 Some(4147511088)
1517 );
1518 assert_eq!(parse_discussion_numeric_id("12345"), Some(12345));
1520 assert_eq!(parse_discussion_numeric_id("weird-42"), None);
1524 assert_eq!(parse_discussion_numeric_id("review-notnumeric"), None);
1525 assert_eq!(parse_discussion_numeric_id(""), None);
1526 }
1527
1528 #[test]
1529 fn test_map_user() {
1530 let gh_user = GitHubUser {
1531 id: 123,
1532 login: "testuser".to_string(),
1533 name: Some("Test User".to_string()),
1534 email: Some("test@example.com".to_string()),
1535 avatar_url: Some("https://example.com/avatar.png".to_string()),
1536 };
1537
1538 let user = map_user(Some(&gh_user)).unwrap();
1539 assert_eq!(user.id, "123");
1540 assert_eq!(user.username, "testuser");
1541 assert_eq!(user.name, Some("Test User".to_string()));
1542 assert_eq!(user.email, Some("test@example.com".to_string()));
1543 }
1544
1545 #[test]
1546 fn test_map_user_none() {
1547 assert!(map_user(None).is_none());
1548 }
1549
1550 #[test]
1551 fn test_map_user_required_with_user() {
1552 let gh_user = GitHubUser {
1553 id: 1,
1554 login: "user1".to_string(),
1555 name: Some("User One".to_string()),
1556 email: None,
1557 avatar_url: None,
1558 };
1559 let user = map_user_required(Some(&gh_user));
1560 assert_eq!(user.username, "user1");
1561 }
1562
1563 #[test]
1564 fn test_map_user_required_without_user() {
1565 let user = map_user_required(None);
1566 assert_eq!(user.id, "unknown");
1567 assert_eq!(user.username, "unknown");
1568 assert_eq!(user.name, Some("Unknown".to_string()));
1569 }
1570
1571 #[test]
1572 fn test_map_labels() {
1573 let labels = vec![
1574 GitHubLabel {
1575 id: 1,
1576 name: "bug".to_string(),
1577 color: None,
1578 description: None,
1579 },
1580 GitHubLabel {
1581 id: 2,
1582 name: "feature".to_string(),
1583 color: Some("00ff00".to_string()),
1584 description: Some("Feature request".to_string()),
1585 },
1586 ];
1587 let result = map_labels(&labels);
1588 assert_eq!(result, vec!["bug", "feature"]);
1589 }
1590
1591 #[test]
1592 fn test_map_labels_empty() {
1593 let result = map_labels(&[]);
1594 assert!(result.is_empty());
1595 }
1596
1597 #[test]
1598 fn test_map_comment() {
1599 let gh_comment = GitHubComment {
1600 id: 42,
1601 body: "Nice work!".to_string(),
1602 user: Some(GitHubUser {
1603 id: 1,
1604 login: "reviewer".to_string(),
1605 name: None,
1606 email: None,
1607 avatar_url: None,
1608 }),
1609 created_at: "2024-01-15T10:00:00Z".to_string(),
1610 updated_at: Some("2024-01-15T12:00:00Z".to_string()),
1611 };
1612
1613 let comment = map_comment(&gh_comment);
1614 assert_eq!(comment.id, "42");
1615 assert_eq!(comment.body, "Nice work!");
1616 assert!(comment.author.is_some());
1617 assert_eq!(comment.author.unwrap().username, "reviewer");
1618 assert_eq!(comment.created_at, Some("2024-01-15T10:00:00Z".to_string()));
1619 assert_eq!(comment.updated_at, Some("2024-01-15T12:00:00Z".to_string()));
1620 assert!(comment.position.is_none());
1621 }
1622
1623 #[test]
1624 fn test_map_review_comment_with_line() {
1625 let gh_comment = GitHubReviewComment {
1626 id: 100,
1627 body: "Fix this".to_string(),
1628 user: Some(GitHubUser {
1629 id: 1,
1630 login: "reviewer".to_string(),
1631 name: None,
1632 email: None,
1633 avatar_url: None,
1634 }),
1635 created_at: "2024-01-15T10:00:00Z".to_string(),
1636 updated_at: None,
1637 path: "src/main.rs".to_string(),
1638 line: Some(42),
1639 original_line: None,
1640 position: None,
1641 side: Some("RIGHT".to_string()),
1642 diff_hunk: None,
1643 commit_id: Some("abc123".to_string()),
1644 original_commit_id: None,
1645 in_reply_to_id: None,
1646 };
1647
1648 let comment = map_review_comment(&gh_comment);
1649 assert_eq!(comment.id, "100");
1650 assert_eq!(comment.body, "Fix this");
1651 let pos = comment.position.unwrap();
1652 assert_eq!(pos.file_path, "src/main.rs");
1653 assert_eq!(pos.line, 42);
1654 assert_eq!(pos.line_type, "new");
1655 assert_eq!(pos.commit_sha, Some("abc123".to_string()));
1656 }
1657
1658 #[test]
1659 fn test_map_review_comment_with_left_side() {
1660 let gh_comment = GitHubReviewComment {
1661 id: 101,
1662 body: "Old code".to_string(),
1663 user: None,
1664 created_at: "2024-01-15T10:00:00Z".to_string(),
1665 updated_at: None,
1666 path: "src/lib.rs".to_string(),
1667 line: Some(10),
1668 original_line: None,
1669 position: None,
1670 side: Some("LEFT".to_string()),
1671 diff_hunk: None,
1672 commit_id: None,
1673 original_commit_id: Some("def456".to_string()),
1674 in_reply_to_id: None,
1675 };
1676
1677 let comment = map_review_comment(&gh_comment);
1678 let pos = comment.position.unwrap();
1679 assert_eq!(pos.line_type, "old");
1680 assert_eq!(pos.commit_sha, Some("def456".to_string()));
1681 }
1682
1683 #[test]
1684 fn test_map_review_comment_with_original_line_fallback() {
1685 let gh_comment = GitHubReviewComment {
1686 id: 102,
1687 body: "Outdated".to_string(),
1688 user: None,
1689 created_at: "2024-01-15T10:00:00Z".to_string(),
1690 updated_at: None,
1691 path: "src/lib.rs".to_string(),
1692 line: None,
1693 original_line: Some(5),
1694 position: None,
1695 side: None,
1696 diff_hunk: None,
1697 commit_id: None,
1698 original_commit_id: None,
1699 in_reply_to_id: None,
1700 };
1701
1702 let comment = map_review_comment(&gh_comment);
1703 let pos = comment.position.unwrap();
1704 assert_eq!(pos.line, 5);
1705 assert_eq!(pos.line_type, "new"); }
1707
1708 #[test]
1709 fn test_map_review_comment_without_line() {
1710 let gh_comment = GitHubReviewComment {
1711 id: 103,
1712 body: "General".to_string(),
1713 user: None,
1714 created_at: "2024-01-15T10:00:00Z".to_string(),
1715 updated_at: None,
1716 path: "src/lib.rs".to_string(),
1717 line: None,
1718 original_line: None,
1719 position: None,
1720 side: None,
1721 diff_hunk: None,
1722 commit_id: None,
1723 original_commit_id: None,
1724 in_reply_to_id: None,
1725 };
1726
1727 let comment = map_review_comment(&gh_comment);
1728 assert!(comment.position.is_none());
1729 }
1730
1731 #[test]
1732 fn test_map_file() {
1733 let gh_file = GitHubFile {
1734 sha: "abc123".to_string(),
1735 filename: "src/main.rs".to_string(),
1736 status: "modified".to_string(),
1737 additions: 10,
1738 deletions: 3,
1739 changes: 13,
1740 patch: Some("@@ -1,3 +1,10 @@\n+new line".to_string()),
1741 previous_filename: None,
1742 };
1743
1744 let diff = map_file(&gh_file);
1745 assert_eq!(diff.file_path, "src/main.rs");
1746 assert!(!diff.new_file);
1747 assert!(!diff.deleted_file);
1748 assert!(!diff.renamed_file);
1749 assert_eq!(diff.additions, Some(10));
1750 assert_eq!(diff.deletions, Some(3));
1751 assert!(diff.diff.contains("+new line"));
1752 }
1753
1754 #[test]
1755 fn test_map_file_added() {
1756 let gh_file = GitHubFile {
1757 sha: "abc".to_string(),
1758 filename: "new_file.rs".to_string(),
1759 status: "added".to_string(),
1760 additions: 50,
1761 deletions: 0,
1762 changes: 50,
1763 patch: None,
1764 previous_filename: None,
1765 };
1766
1767 let diff = map_file(&gh_file);
1768 assert!(diff.new_file);
1769 assert!(!diff.deleted_file);
1770 assert!(diff.diff.is_empty());
1771 }
1772
1773 #[test]
1774 fn test_map_file_removed() {
1775 let gh_file = GitHubFile {
1776 sha: "abc".to_string(),
1777 filename: "old_file.rs".to_string(),
1778 status: "removed".to_string(),
1779 additions: 0,
1780 deletions: 30,
1781 changes: 30,
1782 patch: None,
1783 previous_filename: None,
1784 };
1785
1786 let diff = map_file(&gh_file);
1787 assert!(diff.deleted_file);
1788 assert!(!diff.new_file);
1789 }
1790
1791 #[test]
1792 fn test_map_file_renamed() {
1793 let gh_file = GitHubFile {
1794 sha: "abc".to_string(),
1795 filename: "new_name.rs".to_string(),
1796 status: "renamed".to_string(),
1797 additions: 0,
1798 deletions: 0,
1799 changes: 0,
1800 patch: None,
1801 previous_filename: Some("old_name.rs".to_string()),
1802 };
1803
1804 let diff = map_file(&gh_file);
1805 assert!(diff.renamed_file);
1806 assert_eq!(diff.old_path, Some("old_name.rs".to_string()));
1807 }
1808
1809 #[test]
1810 fn test_map_pull_request_with_full_data() {
1811 let pr = GitHubPullRequest {
1812 id: 1,
1813 number: 10,
1814 title: "Add feature".to_string(),
1815 body: Some("Description".to_string()),
1816 state: "open".to_string(),
1817 html_url: "https://github.com/test/repo/pull/10".to_string(),
1818 draft: false,
1819 merged: false,
1820 merged_at: None,
1821 user: Some(GitHubUser {
1822 id: 1,
1823 login: "author".to_string(),
1824 name: None,
1825 email: None,
1826 avatar_url: None,
1827 }),
1828 assignees: vec![GitHubUser {
1829 id: 2,
1830 login: "assignee".to_string(),
1831 name: Some("Assignee".to_string()),
1832 email: None,
1833 avatar_url: None,
1834 }],
1835 requested_reviewers: vec![GitHubUser {
1836 id: 3,
1837 login: "reviewer".to_string(),
1838 name: None,
1839 email: None,
1840 avatar_url: None,
1841 }],
1842 labels: vec![GitHubLabel {
1843 id: 1,
1844 name: "enhancement".to_string(),
1845 color: None,
1846 description: None,
1847 }],
1848 head: GitHubBranchRef {
1849 ref_name: "feature-branch".to_string(),
1850 sha: "abc123".to_string(),
1851 },
1852 base: GitHubBranchRef {
1853 ref_name: "main".to_string(),
1854 sha: "def456".to_string(),
1855 },
1856 created_at: "2024-01-01T00:00:00Z".to_string(),
1857 updated_at: "2024-01-02T00:00:00Z".to_string(),
1858 };
1859
1860 let mr = map_pull_request(&pr);
1861 assert_eq!(mr.key, "pr#10");
1862 assert_eq!(mr.title, "Add feature");
1863 assert_eq!(mr.description, Some("Description".to_string()));
1864 assert_eq!(mr.state, "open");
1865 assert_eq!(mr.source, "github");
1866 assert_eq!(mr.source_branch, "feature-branch");
1867 assert_eq!(mr.target_branch, "main");
1868 assert!(mr.author.is_some());
1869 assert_eq!(mr.assignees.len(), 1);
1870 assert_eq!(mr.assignees[0].username, "assignee");
1871 assert_eq!(mr.reviewers.len(), 1);
1872 assert_eq!(mr.reviewers[0].username, "reviewer");
1873 assert_eq!(mr.labels, vec!["enhancement"]);
1874 assert!(!mr.draft);
1875 }
1876
1877 #[test]
1878 fn test_map_pull_request_merged_at() {
1879 let pr = GitHubPullRequest {
1880 id: 1,
1881 number: 10,
1882 title: "Merged PR".to_string(),
1883 body: None,
1884 state: "closed".to_string(),
1885 html_url: "https://github.com/test/repo/pull/10".to_string(),
1886 draft: false,
1887 merged: false,
1888 merged_at: Some("2024-01-03T00:00:00Z".to_string()),
1889 user: None,
1890 assignees: vec![],
1891 requested_reviewers: vec![],
1892 labels: vec![],
1893 head: GitHubBranchRef {
1894 ref_name: "feature".to_string(),
1895 sha: "abc123".to_string(),
1896 },
1897 base: GitHubBranchRef {
1898 ref_name: "main".to_string(),
1899 sha: "def456".to_string(),
1900 },
1901 created_at: "2024-01-01T00:00:00Z".to_string(),
1902 updated_at: "2024-01-02T00:00:00Z".to_string(),
1903 };
1904
1905 let mr = map_pull_request(&pr);
1906 assert_eq!(mr.state, "merged");
1907 }
1908
1909 #[test]
1910 fn test_map_issue() {
1911 let gh_issue = GitHubIssue {
1912 id: 1,
1913 number: 42,
1914 title: "Test Issue".to_string(),
1915 body: Some("Issue body".to_string()),
1916 state: "open".to_string(),
1917 html_url: "https://github.com/test/repo/issues/42".to_string(),
1918 user: Some(GitHubUser {
1919 id: 1,
1920 login: "author".to_string(),
1921 name: None,
1922 email: None,
1923 avatar_url: None,
1924 }),
1925 assignees: vec![],
1926 labels: vec![GitHubLabel {
1927 id: 1,
1928 name: "bug".to_string(),
1929 color: None,
1930 description: None,
1931 }],
1932 created_at: "2024-01-01T00:00:00Z".to_string(),
1933 updated_at: "2024-01-02T00:00:00Z".to_string(),
1934 closed_at: None,
1935 pull_request: None,
1936 };
1937
1938 let issue = map_issue(&gh_issue);
1939 assert_eq!(issue.key, "gh#42");
1940 assert_eq!(issue.title, "Test Issue");
1941 assert_eq!(issue.state, "open");
1942 assert_eq!(issue.source, "github");
1943 assert_eq!(issue.labels, vec!["bug"]);
1944 }
1945
1946 #[test]
1947 fn test_map_issue_with_assignees() {
1948 let gh_issue = GitHubIssue {
1949 id: 1,
1950 number: 1,
1951 title: "Issue".to_string(),
1952 body: None,
1953 state: "open".to_string(),
1954 html_url: "https://github.com/test/repo/issues/1".to_string(),
1955 user: None,
1956 assignees: vec![
1957 GitHubUser {
1958 id: 1,
1959 login: "user1".to_string(),
1960 name: None,
1961 email: None,
1962 avatar_url: None,
1963 },
1964 GitHubUser {
1965 id: 2,
1966 login: "user2".to_string(),
1967 name: None,
1968 email: None,
1969 avatar_url: None,
1970 },
1971 ],
1972 labels: vec![],
1973 created_at: "2024-01-01T00:00:00Z".to_string(),
1974 updated_at: "2024-01-02T00:00:00Z".to_string(),
1975 closed_at: None,
1976 pull_request: None,
1977 };
1978
1979 let issue = map_issue(&gh_issue);
1980 assert_eq!(issue.assignees.len(), 2);
1981 assert_eq!(issue.assignees[0].username, "user1");
1982 assert_eq!(issue.assignees[1].username, "user2");
1983 }
1984
1985 #[test]
1986 fn test_map_pull_request_states() {
1987 let base_pr = || GitHubPullRequest {
1988 id: 1,
1989 number: 10,
1990 title: "Test PR".to_string(),
1991 body: None,
1992 state: "open".to_string(),
1993 html_url: "https://github.com/test/repo/pull/10".to_string(),
1994 draft: false,
1995 merged: false,
1996 merged_at: None,
1997 user: None,
1998 assignees: vec![],
1999 requested_reviewers: vec![],
2000 labels: vec![],
2001 head: GitHubBranchRef {
2002 ref_name: "feature".to_string(),
2003 sha: "abc123".to_string(),
2004 },
2005 base: GitHubBranchRef {
2006 ref_name: "main".to_string(),
2007 sha: "def456".to_string(),
2008 },
2009 created_at: "2024-01-01T00:00:00Z".to_string(),
2010 updated_at: "2024-01-02T00:00:00Z".to_string(),
2011 };
2012
2013 let pr = map_pull_request(&base_pr());
2015 assert_eq!(pr.state, "open");
2016
2017 let mut draft_pr = base_pr();
2019 draft_pr.draft = true;
2020 let pr = map_pull_request(&draft_pr);
2021 assert_eq!(pr.state, "draft");
2022
2023 let mut merged_pr = base_pr();
2025 merged_pr.merged = true;
2026 let pr = map_pull_request(&merged_pr);
2027 assert_eq!(pr.state, "merged");
2028
2029 let mut closed_pr = base_pr();
2031 closed_pr.state = "closed".to_string();
2032 let pr = map_pull_request(&closed_pr);
2033 assert_eq!(pr.state, "closed");
2034 }
2035
2036 fn token(s: &str) -> SecretString {
2037 SecretString::from(s.to_string())
2038 }
2039
2040 #[test]
2041 fn test_repo_url() {
2042 let client =
2043 GitHubClient::with_base_url("https://api.github.com", "owner", "repo", token("token"));
2044 assert_eq!(
2045 client.repo_url("/issues"),
2046 "https://api.github.com/repos/owner/repo/issues"
2047 );
2048 assert_eq!(
2049 client.repo_url("/pulls/1"),
2050 "https://api.github.com/repos/owner/repo/pulls/1"
2051 );
2052 }
2053
2054 #[test]
2055 fn test_repo_url_strips_trailing_slash() {
2056 let client =
2057 GitHubClient::with_base_url("https://api.github.com/", "owner", "repo", token("token"));
2058 assert_eq!(
2059 client.repo_url("/issues"),
2060 "https://api.github.com/repos/owner/repo/issues"
2061 );
2062 }
2063
2064 #[test]
2065 fn test_provider_name() {
2066 let client = GitHubClient::new("owner", "repo", token("token"));
2067 assert_eq!(IssueProvider::provider_name(&client), "github");
2068 assert_eq!(MergeRequestProvider::provider_name(&client), "github");
2069 }
2070
2071 mod integration {
2076 use super::*;
2077 use httpmock::prelude::*;
2078
2079 fn create_test_client(server: &MockServer) -> GitHubClient {
2080 GitHubClient::with_base_url(server.base_url(), "owner", "repo", token("test-token"))
2081 }
2082
2083 fn sample_issue_json() -> serde_json::Value {
2084 serde_json::json!({
2085 "id": 1,
2086 "number": 42,
2087 "title": "Test Issue",
2088 "body": "Issue body",
2089 "state": "open",
2090 "html_url": "https://github.com/owner/repo/issues/42",
2091 "user": {"id": 1, "login": "author"},
2092 "assignees": [],
2093 "labels": [{"id": 1, "name": "bug"}],
2094 "created_at": "2024-01-01T00:00:00Z",
2095 "updated_at": "2024-01-02T00:00:00Z"
2096 })
2097 }
2098
2099 fn sample_pr_json() -> serde_json::Value {
2100 serde_json::json!({
2101 "id": 1,
2102 "number": 10,
2103 "title": "Test PR",
2104 "body": "PR body",
2105 "state": "open",
2106 "html_url": "https://github.com/owner/repo/pull/10",
2107 "draft": false,
2108 "merged": false,
2109 "user": {"id": 1, "login": "author"},
2110 "assignees": [],
2111 "requested_reviewers": [],
2112 "labels": [],
2113 "head": {"ref": "feature", "sha": "abc123"},
2114 "base": {"ref": "main", "sha": "def456"},
2115 "created_at": "2024-01-01T00:00:00Z",
2116 "updated_at": "2024-01-02T00:00:00Z"
2117 })
2118 }
2119
2120 #[tokio::test]
2121 async fn test_get_issues() {
2122 let server = MockServer::start();
2123
2124 server.mock(|when, then| {
2125 when.method(GET)
2126 .path("/repos/owner/repo/issues")
2127 .header("Authorization", "Bearer test-token");
2128 then.status(200)
2129 .json_body(serde_json::json!([sample_issue_json()]));
2130 });
2131
2132 let client = create_test_client(&server);
2133 let issues = client
2134 .get_issues(IssueFilter {
2135 state: Some("open".to_string()),
2136 ..Default::default()
2137 })
2138 .await
2139 .unwrap()
2140 .items;
2141
2142 assert_eq!(issues.len(), 1);
2143 assert_eq!(issues[0].key, "gh#42");
2144 assert_eq!(issues[0].title, "Test Issue");
2145 }
2146
2147 #[tokio::test]
2148 async fn test_get_issues_filters_pull_requests() {
2149 let server = MockServer::start();
2150
2151 let mut pr_as_issue = sample_issue_json();
2152 pr_as_issue["pull_request"] = serde_json::json!({"url": "..."});
2153 pr_as_issue["number"] = serde_json::json!(99);
2154
2155 server.mock(|when, then| {
2156 when.method(GET).path("/repos/owner/repo/issues");
2157 then.status(200)
2158 .json_body(serde_json::json!([sample_issue_json(), pr_as_issue]));
2159 });
2160
2161 let client = create_test_client(&server);
2162 let issues = client
2163 .get_issues(IssueFilter::default())
2164 .await
2165 .unwrap()
2166 .items;
2167
2168 assert_eq!(issues.len(), 1);
2170 assert_eq!(issues[0].key, "gh#42");
2171 }
2172
2173 #[tokio::test]
2174 async fn test_get_issues_with_all_filters() {
2175 let server = MockServer::start();
2176
2177 server.mock(|when, then| {
2178 when.method(GET)
2179 .path("/repos/owner/repo/issues")
2180 .query_param("state", "closed")
2181 .query_param("labels", "bug,feature")
2182 .query_param("assignee", "user1")
2183 .query_param("per_page", "10")
2184 .query_param("page", "2")
2185 .query_param("sort", "created")
2186 .query_param("direction", "asc");
2187 then.status(200).json_body(serde_json::json!([]));
2188 });
2189
2190 let client = create_test_client(&server);
2191 let issues = client
2192 .get_issues(IssueFilter {
2193 state: Some("closed".to_string()),
2194 labels: Some(vec!["bug".to_string(), "feature".to_string()]),
2195 assignee: Some("user1".to_string()),
2196 limit: Some(10),
2197 offset: Some(10),
2198 sort_by: Some("created_at".to_string()),
2199 sort_order: Some("asc".to_string()),
2200 ..Default::default()
2201 })
2202 .await
2203 .unwrap()
2204 .items;
2205
2206 assert!(issues.is_empty());
2207 }
2208
2209 #[tokio::test]
2210 async fn test_get_issue() {
2211 let server = MockServer::start();
2212
2213 server.mock(|when, then| {
2214 when.method(GET).path("/repos/owner/repo/issues/42");
2215 then.status(200).json_body(sample_issue_json());
2216 });
2217
2218 let client = create_test_client(&server);
2219 let issue = client.get_issue("gh#42").await.unwrap();
2220
2221 assert_eq!(issue.key, "gh#42");
2222 assert_eq!(issue.title, "Test Issue");
2223 }
2224
2225 #[tokio::test]
2226 async fn test_get_issue_rejects_pr() {
2227 let server = MockServer::start();
2228
2229 let mut issue_json = sample_issue_json();
2230 issue_json["pull_request"] = serde_json::json!({"url": "..."});
2231
2232 server.mock(|when, then| {
2233 when.method(GET).path("/repos/owner/repo/issues/42");
2234 then.status(200).json_body(issue_json);
2235 });
2236
2237 let client = create_test_client(&server);
2238 let result = client.get_issue("gh#42").await;
2239 assert!(result.is_err());
2240 }
2241
2242 #[tokio::test]
2243 async fn test_create_issue() {
2244 let server = MockServer::start();
2245
2246 server.mock(|when, then| {
2247 when.method(POST)
2248 .path("/repos/owner/repo/issues")
2249 .body_includes("\"title\":\"New Issue\"");
2250 then.status(201).json_body(sample_issue_json());
2251 });
2252
2253 let client = create_test_client(&server);
2254 let issue = client
2255 .create_issue(CreateIssueInput {
2256 title: "New Issue".to_string(),
2257 description: Some("Body".to_string()),
2258 labels: vec!["bug".to_string()],
2259 ..Default::default()
2260 })
2261 .await
2262 .unwrap();
2263
2264 assert_eq!(issue.key, "gh#42");
2265 }
2266
2267 #[tokio::test]
2268 async fn test_update_issue() {
2269 let server = MockServer::start();
2270
2271 server.mock(|when, then| {
2272 when.method(PATCH)
2273 .path("/repos/owner/repo/issues/42")
2274 .body_includes("\"state\":\"closed\"");
2275 then.status(200).json_body(sample_issue_json());
2276 });
2277
2278 let client = create_test_client(&server);
2279 let issue = client
2280 .update_issue(
2281 "gh#42",
2282 UpdateIssueInput {
2283 state: Some("closed".to_string()),
2284 ..Default::default()
2285 },
2286 )
2287 .await
2288 .unwrap();
2289
2290 assert_eq!(issue.key, "gh#42");
2291 }
2292
2293 #[tokio::test]
2294 async fn test_update_issue_state_mapping() {
2295 let server = MockServer::start();
2296
2297 server.mock(|when, then| {
2298 when.method(PATCH)
2299 .path("/repos/owner/repo/issues/42")
2300 .body_includes("\"state\":\"open\"");
2301 then.status(200).json_body(sample_issue_json());
2302 });
2303
2304 let client = create_test_client(&server);
2305 let result = client
2306 .update_issue(
2307 "gh#42",
2308 UpdateIssueInput {
2309 state: Some("opened".to_string()),
2310 ..Default::default()
2311 },
2312 )
2313 .await;
2314
2315 assert!(result.is_ok());
2316 }
2317
2318 #[tokio::test]
2319 async fn test_get_comments() {
2320 let server = MockServer::start();
2321
2322 server.mock(|when, then| {
2323 when.method(GET)
2324 .path("/repos/owner/repo/issues/42/comments");
2325 then.status(200).json_body(serde_json::json!([{
2326 "id": 1,
2327 "body": "Comment text",
2328 "user": {"id": 1, "login": "commenter"},
2329 "created_at": "2024-01-15T10:00:00Z"
2330 }]));
2331 });
2332
2333 let client = create_test_client(&server);
2334 let comments = client.get_comments("gh#42").await.unwrap().items;
2335
2336 assert_eq!(comments.len(), 1);
2337 assert_eq!(comments[0].body, "Comment text");
2338 }
2339
2340 #[tokio::test]
2341 async fn test_add_comment() {
2342 let server = MockServer::start();
2343
2344 server.mock(|when, then| {
2345 when.method(POST)
2346 .path("/repos/owner/repo/issues/42/comments")
2347 .body_includes("\"body\":\"My comment\"");
2348 then.status(201).json_body(serde_json::json!({
2349 "id": 1,
2350 "body": "My comment",
2351 "user": {"id": 1, "login": "me"},
2352 "created_at": "2024-01-15T10:00:00Z"
2353 }));
2354 });
2355
2356 let client = create_test_client(&server);
2357 let comment = IssueProvider::add_comment(&client, "gh#42", "My comment")
2358 .await
2359 .unwrap();
2360
2361 assert_eq!(comment.body, "My comment");
2362 }
2363
2364 #[tokio::test]
2365 async fn test_get_pull_request() {
2366 let server = MockServer::start();
2367
2368 server.mock(|when, then| {
2369 when.method(GET).path("/repos/owner/repo/pulls/10");
2370 then.status(200).json_body(sample_pr_json());
2371 });
2372
2373 let client = create_test_client(&server);
2374 let mr = client.get_merge_request("pr#10").await.unwrap();
2375
2376 assert_eq!(mr.key, "pr#10");
2377 assert_eq!(mr.title, "Test PR");
2378 assert_eq!(mr.source_branch, "feature");
2379 assert_eq!(mr.target_branch, "main");
2380 }
2381
2382 #[tokio::test]
2383 async fn test_get_pull_requests() {
2384 let server = MockServer::start();
2385
2386 server.mock(|when, then| {
2387 when.method(GET).path("/repos/owner/repo/pulls");
2388 then.status(200)
2389 .json_body(serde_json::json!([sample_pr_json()]));
2390 });
2391
2392 let client = create_test_client(&server);
2393 let mrs = client
2394 .get_merge_requests(MrFilter::default())
2395 .await
2396 .unwrap()
2397 .items;
2398
2399 assert_eq!(mrs.len(), 1);
2400 assert_eq!(mrs[0].key, "pr#10");
2401 }
2402
2403 #[tokio::test]
2404 async fn test_get_pull_requests_with_filters() {
2405 let server = MockServer::start();
2406
2407 server.mock(|when, then| {
2408 when.method(GET)
2409 .path("/repos/owner/repo/pulls")
2410 .query_param("state", "closed")
2411 .query_param("head", "feature")
2412 .query_param("base", "main")
2413 .query_param("per_page", "5");
2414 then.status(200).json_body(serde_json::json!([]));
2415 });
2416
2417 let client = create_test_client(&server);
2418 let mrs = client
2419 .get_merge_requests(MrFilter {
2420 state: Some("closed".to_string()),
2421 source_branch: Some("feature".to_string()),
2422 target_branch: Some("main".to_string()),
2423 limit: Some(5),
2424 ..Default::default()
2425 })
2426 .await
2427 .unwrap()
2428 .items;
2429
2430 assert!(mrs.is_empty());
2431 }
2432
2433 #[tokio::test]
2434 async fn test_get_pull_requests_merged_filter() {
2435 let server = MockServer::start();
2436
2437 let mut merged_pr = sample_pr_json();
2438 merged_pr["merged"] = serde_json::json!(true);
2439 merged_pr["state"] = serde_json::json!("closed");
2440
2441 let open_pr = sample_pr_json();
2442
2443 server.mock(|when, then| {
2444 when.method(GET)
2445 .path("/repos/owner/repo/pulls")
2446 .query_param("state", "closed");
2447 then.status(200)
2448 .json_body(serde_json::json!([merged_pr, open_pr]));
2449 });
2450
2451 let client = create_test_client(&server);
2452 let mrs = client
2453 .get_merge_requests(MrFilter {
2454 state: Some("merged".to_string()),
2455 ..Default::default()
2456 })
2457 .await
2458 .unwrap()
2459 .items;
2460
2461 assert_eq!(mrs.len(), 1);
2463 assert_eq!(mrs[0].state, "merged");
2464 }
2465
2466 #[tokio::test]
2467 async fn test_get_discussions() {
2468 let server = MockServer::start();
2469
2470 server.mock(|when, then| {
2472 when.method(GET).path("/repos/owner/repo/pulls/10/reviews");
2473 then.status(200).json_body(serde_json::json!([{
2474 "id": 1,
2475 "user": {"id": 1, "login": "reviewer"},
2476 "body": "LGTM",
2477 "state": "APPROVED",
2478 "submitted_at": "2024-01-15T10:00:00Z"
2479 }]));
2480 });
2481
2482 server.mock(|when, then| {
2484 when.method(GET).path("/repos/owner/repo/pulls/10/comments");
2485 then.status(200).json_body(serde_json::json!([{
2486 "id": 100,
2487 "body": "Fix this line",
2488 "user": {"id": 2, "login": "reviewer2"},
2489 "created_at": "2024-01-15T11:00:00Z",
2490 "path": "src/main.rs",
2491 "line": 42,
2492 "side": "RIGHT"
2493 }]));
2494 });
2495
2496 server.mock(|when, then| {
2498 when.method(GET)
2499 .path("/repos/owner/repo/issues/10/comments");
2500 then.status(200).json_body(serde_json::json!([{
2501 "id": 200,
2502 "body": "General comment",
2503 "user": {"id": 3, "login": "user3"},
2504 "created_at": "2024-01-15T12:00:00Z"
2505 }]));
2506 });
2507
2508 let client = create_test_client(&server);
2509 let discussions = client.get_discussions("pr#10").await.unwrap().items;
2510
2511 assert_eq!(discussions.len(), 3);
2513 }
2514
2515 #[tokio::test]
2516 async fn test_get_diffs() {
2517 let server = MockServer::start();
2518
2519 server.mock(|when, then| {
2520 when.method(GET).path("/repos/owner/repo/pulls/10/files");
2521 then.status(200).json_body(serde_json::json!([{
2522 "sha": "abc123",
2523 "filename": "src/main.rs",
2524 "status": "modified",
2525 "additions": 10,
2526 "deletions": 3,
2527 "changes": 13,
2528 "patch": "@@ +new code"
2529 }]));
2530 });
2531
2532 let client = create_test_client(&server);
2533 let diffs = client.get_diffs("pr#10").await.unwrap().items;
2534
2535 assert_eq!(diffs.len(), 1);
2536 assert_eq!(diffs[0].file_path, "src/main.rs");
2537 assert_eq!(diffs[0].additions, Some(10));
2538 }
2539
2540 #[tokio::test]
2541 async fn test_add_mr_comment_general() {
2542 let server = MockServer::start();
2543
2544 server.mock(|when, then| {
2546 when.method(GET).path("/repos/owner/repo/pulls/10");
2547 then.status(200).json_body(sample_pr_json());
2548 });
2549
2550 server.mock(|when, then| {
2552 when.method(POST)
2553 .path("/repos/owner/repo/issues/10/comments");
2554 then.status(201).json_body(serde_json::json!({
2555 "id": 1,
2556 "body": "General comment",
2557 "user": {"id": 1, "login": "me"},
2558 "created_at": "2024-01-15T10:00:00Z"
2559 }));
2560 });
2561
2562 let client = create_test_client(&server);
2563 let comment = MergeRequestProvider::add_comment(
2564 &client,
2565 "pr#10",
2566 CreateCommentInput {
2567 body: "General comment".to_string(),
2568 position: None,
2569 discussion_id: None,
2570 },
2571 )
2572 .await
2573 .unwrap();
2574
2575 assert_eq!(comment.body, "General comment");
2576 }
2577
2578 #[tokio::test]
2579 async fn test_add_mr_comment_inline() {
2580 let server = MockServer::start();
2581
2582 server.mock(|when, then| {
2584 when.method(GET).path("/repos/owner/repo/pulls/10");
2585 then.status(200).json_body(sample_pr_json());
2586 });
2587
2588 server.mock(|when, then| {
2590 when.method(POST)
2591 .path("/repos/owner/repo/pulls/10/comments")
2592 .body_includes("\"path\":\"src/main.rs\"")
2593 .body_includes("\"line\":42");
2594 then.status(201).json_body(serde_json::json!({
2595 "id": 1,
2596 "body": "Inline comment",
2597 "user": {"id": 1, "login": "me"},
2598 "created_at": "2024-01-15T10:00:00Z",
2599 "path": "src/main.rs",
2600 "line": 42,
2601 "side": "RIGHT"
2602 }));
2603 });
2604
2605 let client = create_test_client(&server);
2606 let comment = MergeRequestProvider::add_comment(
2607 &client,
2608 "pr#10",
2609 CreateCommentInput {
2610 body: "Inline comment".to_string(),
2611 position: Some(CodePosition {
2612 file_path: "src/main.rs".to_string(),
2613 line: 42,
2614 line_type: "new".to_string(),
2615 commit_sha: Some("abc123".to_string()),
2616 }),
2617 discussion_id: None,
2618 },
2619 )
2620 .await
2621 .unwrap();
2622
2623 assert_eq!(comment.body, "Inline comment");
2624 }
2625
2626 #[tokio::test]
2627 async fn test_handle_response_401() {
2628 let server = MockServer::start();
2629
2630 server.mock(|when, then| {
2631 when.method(GET).path("/repos/owner/repo/issues");
2632 then.status(401).body("Bad credentials");
2633 });
2634
2635 let client = create_test_client(&server);
2636 let result = client.get_issues(IssueFilter::default()).await;
2637
2638 assert!(result.is_err());
2639 let err = result.unwrap_err();
2640 assert!(matches!(err, Error::Unauthorized(_)));
2641 }
2642
2643 #[tokio::test]
2644 async fn test_handle_response_404() {
2645 let server = MockServer::start();
2646
2647 server.mock(|when, then| {
2648 when.method(GET).path("/repos/owner/repo/issues/999");
2649 then.status(404).body("Not Found");
2650 });
2651
2652 let client = create_test_client(&server);
2653 let result = client.get_issue("gh#999").await;
2654
2655 assert!(result.is_err());
2656 let err = result.unwrap_err();
2657 assert!(matches!(err, Error::NotFound(_)));
2658 }
2659
2660 #[tokio::test]
2661 async fn test_handle_response_500() {
2662 let server = MockServer::start();
2663
2664 server.mock(|when, then| {
2665 when.method(GET).path("/repos/owner/repo/issues");
2666 then.status(500).body("Internal Server Error");
2667 });
2668
2669 let client = create_test_client(&server);
2670 let result = client.get_issues(IssueFilter::default()).await;
2671
2672 assert!(result.is_err());
2673 let err = result.unwrap_err();
2674 assert!(matches!(err, Error::ServerError { .. }));
2675 }
2676
2677 #[tokio::test]
2678 async fn test_get_current_user() {
2679 let server = MockServer::start();
2680
2681 server.mock(|when, then| {
2682 when.method(GET).path("/user");
2683 then.status(200).json_body(serde_json::json!({
2684 "id": 1,
2685 "login": "testuser",
2686 "name": "Test User",
2687 "email": "test@example.com"
2688 }));
2689 });
2690
2691 let client = create_test_client(&server);
2692 let user = client.get_current_user().await.unwrap();
2693
2694 assert_eq!(user.username, "testuser");
2695 assert_eq!(user.name, Some("Test User".to_string()));
2696 }
2697
2698 fn sample_workflow_run_json() -> serde_json::Value {
2703 serde_json::json!({
2704 "id": 100,
2705 "name": "CI",
2706 "status": "completed",
2707 "conclusion": "failure",
2708 "head_branch": "feat/test",
2709 "head_sha": "abc123def456",
2710 "html_url": "https://github.com/owner/repo/actions/runs/100",
2711 "run_started_at": "2024-01-01T00:00:00Z",
2712 "updated_at": "2024-01-01T00:01:00Z"
2713 })
2714 }
2715
2716 fn sample_jobs_json() -> serde_json::Value {
2717 serde_json::json!({
2718 "jobs": [
2719 {
2720 "id": 201,
2721 "name": "Build",
2722 "status": "completed",
2723 "conclusion": "success",
2724 "html_url": "https://github.com/owner/repo/actions/runs/100/job/201",
2725 "started_at": "2024-01-01T00:00:00Z",
2726 "completed_at": "2024-01-01T00:00:30Z"
2727 },
2728 {
2729 "id": 202,
2730 "name": "Test",
2731 "status": "completed",
2732 "conclusion": "failure",
2733 "html_url": "https://github.com/owner/repo/actions/runs/100/job/202",
2734 "started_at": "2024-01-01T00:00:00Z",
2735 "completed_at": "2024-01-01T00:00:45Z"
2736 }
2737 ]
2738 })
2739 }
2740
2741 #[tokio::test]
2742 async fn test_get_pipeline_by_branch() {
2743 let server = MockServer::start();
2744
2745 server.mock(|when, then| {
2747 when.method(GET)
2748 .path("/repos/owner/repo/actions/runs")
2749 .query_param("branch", "main")
2750 .query_param("status", "completed");
2751 then.status(200).json_body(serde_json::json!({
2752 "workflow_runs": [sample_workflow_run_json()]
2753 }));
2754 });
2755
2756 server.mock(|when, then| {
2758 when.method(GET)
2759 .path("/repos/owner/repo/actions/runs")
2760 .query_param("status", "in_progress");
2761 then.status(200)
2762 .json_body(serde_json::json!({ "workflow_runs": [] }));
2763 });
2764
2765 server.mock(|when, then| {
2767 when.method(GET)
2768 .path("/repos/owner/repo/actions/runs/100/jobs");
2769 then.status(200).json_body(sample_jobs_json());
2770 });
2771
2772 server.mock(|when, then| {
2774 when.method(GET)
2775 .path("/repos/owner/repo/actions/jobs/202/logs");
2776 then.status(200)
2777 .body("Step 1\nerror: test failed\nStep 3\n");
2778 });
2779
2780 let client = create_test_client(&server);
2781 let input = devboy_core::GetPipelineInput {
2782 branch: Some("main".into()),
2783 mr_key: None,
2784 include_failed_logs: true,
2785 };
2786
2787 let result = client.get_pipeline(input).await.unwrap();
2788
2789 assert_eq!(result.id, "100");
2790 assert_eq!(result.status, PipelineStatus::Failed);
2791 assert_eq!(result.reference, "main");
2792 assert_eq!(result.summary.total, 2);
2793 assert_eq!(result.summary.success, 1);
2794 assert_eq!(result.summary.failed, 1);
2795 assert_eq!(result.stages.len(), 1);
2796 assert_eq!(result.stages[0].name, "CI");
2797 assert_eq!(result.stages[0].jobs.len(), 2);
2798 assert_eq!(result.failed_jobs.len(), 1);
2799 assert_eq!(result.failed_jobs[0].name, "Test");
2800 assert!(result.failed_jobs[0].error_snippet.is_some());
2801 }
2802
2803 #[tokio::test]
2804 async fn test_get_pipeline_by_mr_key() {
2805 let server = MockServer::start();
2806
2807 server.mock(|when, then| {
2809 when.method(GET).path("/repos/owner/repo/pulls/42");
2810 then.status(200).json_body(sample_pr_json());
2811 });
2812
2813 server.mock(|when, then| {
2815 when.method(GET)
2816 .path("/repos/owner/repo/actions/runs")
2817 .query_param("status", "completed");
2818 then.status(200).json_body(serde_json::json!({
2819 "workflow_runs": [sample_workflow_run_json()]
2820 }));
2821 });
2822
2823 server.mock(|when, then| {
2825 when.method(GET)
2826 .path("/repos/owner/repo/actions/runs")
2827 .query_param("status", "in_progress");
2828 then.status(200)
2829 .json_body(serde_json::json!({ "workflow_runs": [] }));
2830 });
2831
2832 server.mock(|when, then| {
2834 when.method(GET)
2835 .path("/repos/owner/repo/actions/runs/100/jobs");
2836 then.status(200).json_body(sample_jobs_json());
2837 });
2838
2839 let client = create_test_client(&server);
2840 let input = devboy_core::GetPipelineInput {
2841 branch: None,
2842 mr_key: Some("pr#42".into()),
2843 include_failed_logs: false,
2844 };
2845
2846 let result = client.get_pipeline(input).await.unwrap();
2847 assert_eq!(result.id, "100");
2848 }
2849
2850 #[tokio::test]
2851 async fn test_get_job_logs_smart_mode() {
2852 let server = MockServer::start();
2853
2854 server.mock(|when, then| {
2855 when.method(GET)
2856 .path("/repos/owner/repo/actions/jobs/202/logs");
2857 then.status(200)
2858 .body("Building...\nCompiling...\nerror: cannot find module 'foo'\nDone.\n");
2859 });
2860
2861 let client = create_test_client(&server);
2862 let options = devboy_core::JobLogOptions {
2863 mode: devboy_core::JobLogMode::Smart,
2864 };
2865
2866 let result = client.get_job_logs("202", options).await.unwrap();
2867 assert_eq!(result.job_id, "202");
2868 assert_eq!(result.mode, "smart");
2869 assert!(result.content.contains("cannot find module"));
2870 }
2871
2872 #[tokio::test]
2873 async fn test_get_job_logs_search_mode() {
2874 let server = MockServer::start();
2875
2876 server.mock(|when, then| {
2877 when.method(GET)
2878 .path("/repos/owner/repo/actions/jobs/202/logs");
2879 then.status(200)
2880 .body("Line 1\nLine 2\nERROR: something broke\nLine 4\nLine 5\n");
2881 });
2882
2883 let client = create_test_client(&server);
2884 let options = devboy_core::JobLogOptions {
2885 mode: devboy_core::JobLogMode::Search {
2886 pattern: "ERROR".into(),
2887 context: 1,
2888 max_matches: 5,
2889 },
2890 };
2891
2892 let result = client.get_job_logs("202", options).await.unwrap();
2893 assert_eq!(result.mode, "search");
2894 assert!(result.content.contains("ERROR: something broke"));
2895 assert!(result.content.contains("Match at line 3"));
2896 }
2897
2898 #[tokio::test]
2899 async fn test_get_job_logs_paginated_mode() {
2900 let server = MockServer::start();
2901
2902 server.mock(|when, then| {
2903 when.method(GET)
2904 .path("/repos/owner/repo/actions/jobs/202/logs");
2905 then.status(200)
2906 .body("Line 1\nLine 2\nLine 3\nLine 4\nLine 5\n");
2907 });
2908
2909 let client = create_test_client(&server);
2910 let options = devboy_core::JobLogOptions {
2911 mode: devboy_core::JobLogMode::Paginated {
2912 offset: 1,
2913 limit: 2,
2914 },
2915 };
2916
2917 let result = client.get_job_logs("202", options).await.unwrap();
2918 assert_eq!(result.mode, "paginated");
2919 assert!(result.content.contains("Line 2"));
2920 assert!(result.content.contains("Line 3"));
2921 assert!(!result.content.contains("Line 1"));
2922 assert!(!result.content.contains("Line 4"));
2923 }
2924
2925 #[tokio::test]
2930 async fn test_get_issue_attachments_parses_body_and_comments() {
2931 let server = MockServer::start();
2932
2933 server.mock(|when, then| {
2934 when.method(GET).path("/repos/owner/repo/issues/42");
2935 then.status(200).json_body(serde_json::json!({
2936 "id": 1,
2937 "number": 42,
2938 "title": "bug",
2939 "body": "Error: ",
2940 "state": "open",
2941 "html_url": "https://github.com/owner/repo/issues/42",
2942 "created_at": "2024-01-01T00:00:00Z",
2943 "updated_at": "2024-01-02T00:00:00Z"
2944 }));
2945 });
2946 server.mock(|when, then| {
2947 when.method(GET)
2948 .path("/repos/owner/repo/issues/42/comments");
2949 then.status(200).json_body(serde_json::json!([
2950 {
2951 "id": 10,
2952 "body": "Log [here](https://user-images.githubusercontent.com/1/log.txt)",
2953 "html_url": "https://github.com/owner/repo/issues/42#issuecomment-10",
2954 "created_at": "2024-01-03T00:00:00Z",
2955 "updated_at": "2024-01-03T00:00:00Z"
2956 }
2957 ]));
2958 });
2959
2960 let client = create_test_client(&server);
2961 let attachments = client.get_issue_attachments("gh#42").await.unwrap();
2962 assert_eq!(attachments.len(), 2);
2963 assert_eq!(attachments[0].filename, "screen");
2964 assert_eq!(attachments[1].filename, "here");
2965 }
2966
2967 #[tokio::test]
2968 async fn test_download_attachment_fetches_url() {
2969 let server = MockServer::start();
2970
2971 server.mock(|when, then| {
2972 when.method(GET).path("/cdn/file.txt");
2973 then.status(200).body("github-bytes");
2974 });
2975
2976 let client = create_test_client(&server);
2977 let url = format!("{}/cdn/file.txt", server.base_url());
2978 let bytes = client.download_attachment("gh#42", &url).await.unwrap();
2979 assert_eq!(bytes, b"github-bytes");
2980 }
2981
2982 #[tokio::test]
2983 async fn test_github_asset_capabilities() {
2984 let server = MockServer::start();
2985 let client = create_test_client(&server);
2986 let caps = client.asset_capabilities();
2987 assert!(!caps.issue.upload, "GitHub has no public upload API");
2988 assert!(caps.issue.download);
2989 assert!(caps.issue.list);
2990 assert!(!caps.issue.delete);
2991 assert!(!caps.merge_request.upload);
2992 assert!(caps.merge_request.download);
2993 }
2994 }
2995
2996 #[test]
3001 fn test_map_gh_status() {
3002 assert_eq!(
3003 map_gh_status(Some("completed"), Some("success")),
3004 PipelineStatus::Success
3005 );
3006 assert_eq!(
3007 map_gh_status(Some("completed"), Some("failure")),
3008 PipelineStatus::Failed
3009 );
3010 assert_eq!(
3011 map_gh_status(Some("in_progress"), None),
3012 PipelineStatus::Running
3013 );
3014 assert_eq!(map_gh_status(Some("queued"), None), PipelineStatus::Pending);
3015 assert_eq!(
3016 map_gh_status(Some("completed"), Some("cancelled")),
3017 PipelineStatus::Canceled
3018 );
3019 assert_eq!(map_gh_status(None, None), PipelineStatus::Unknown);
3020 }
3021
3022 #[test]
3023 fn test_strip_ansi() {
3024 assert_eq!(strip_ansi("\x1b[31merror\x1b[0m"), "error");
3025 assert_eq!(strip_ansi("no ansi here"), "no ansi here");
3026 assert_eq!(strip_ansi("\x1b[1m\x1b[32mgreen\x1b[0m"), "green");
3027 }
3028
3029 #[test]
3030 fn test_extract_errors_finds_patterns() {
3031 let log = "Step 1: build\nStep 2: test\nerror: test failed at line 42\nStep 4: done\n";
3032 let result = extract_errors(log, 10).unwrap();
3033 assert!(result.contains("error: test failed"));
3034 }
3035
3036 #[test]
3037 fn test_extract_errors_fallback_to_tail() {
3038 let log = "Line 1\nLine 2\nLine 3\n";
3039 let result = extract_errors(log, 10).unwrap();
3040 assert!(result.contains("Line 3"));
3041 }
3042
3043 #[test]
3044 fn test_extract_errors_empty_log() {
3045 assert!(extract_errors("", 10).is_none());
3046 }
3047
3048 #[test]
3049 fn test_estimate_duration() {
3050 let d = estimate_duration(Some("2024-01-01T00:00:00Z"), Some("2024-01-01T00:01:30Z"));
3051 assert_eq!(d, Some(90));
3052 }
3053
3054 #[test]
3055 fn test_estimate_duration_invalid() {
3056 assert!(estimate_duration(None, Some("2024-01-01T00:00:00Z")).is_none());
3057 assert!(estimate_duration(Some("not-a-date"), Some("2024-01-01T00:00:00Z")).is_none());
3058 }
3059}