1use async_trait::async_trait;
4use devboy_core::{
5 AssetCapabilities, AssetMeta, CodePosition, Comment, ContextCapabilities, CreateCommentInput,
6 CreateIssueInput, CreateMergeRequestInput, Discussion, Error, FailedJob, FileDiff,
7 GetPipelineInput, Issue, IssueFilter, IssueProvider, JobLogMode, JobLogOptions, JobLogOutput,
8 MergeRequest, MergeRequestProvider, MrFilter, PipelineInfo, PipelineJob, PipelineProvider,
9 PipelineStage, PipelineStatus, PipelineSummary, Provider, ProviderResult, Result,
10 UpdateIssueInput, UpdateMergeRequestInput, User, parse_markdown_attachments,
11};
12use secrecy::{ExposeSecret, SecretString};
13use serde::Deserialize;
14use tracing::{debug, warn};
15
16use crate::DEFAULT_GITHUB_URL;
17use crate::types::{
18 CreateCommentRequest, CreateIssueRequest, CreatePullRequestRequest, CreateReviewCommentRequest,
19 GitHubComment, GitHubFile, GitHubIssue, GitHubLabel, GitHubPullRequest, GitHubReview,
20 GitHubReviewComment, GitHubUser, UpdateIssueRequest, UpdatePullRequestRequest,
21};
22
23pub struct GitHubClient {
24 base_url: String,
25 owner: String,
26 repo: String,
27 token: SecretString,
28 client: reqwest::Client,
29}
30
31impl GitHubClient {
32 pub fn new(owner: impl Into<String>, repo: impl Into<String>, token: SecretString) -> Self {
34 Self::with_base_url(DEFAULT_GITHUB_URL, owner, repo, token)
35 }
36
37 pub fn with_base_url(
39 base_url: impl Into<String>,
40 owner: impl Into<String>,
41 repo: impl Into<String>,
42 token: SecretString,
43 ) -> Self {
44 Self {
45 base_url: base_url.into().trim_end_matches('/').to_string(),
46 owner: owner.into(),
47 repo: repo.into(),
48 token,
49 client: reqwest::Client::builder()
50 .user_agent("devboy-tools")
51 .build()
52 .expect("Failed to create HTTP client"),
53 }
54 }
55
56 fn request(&self, method: reqwest::Method, url: &str) -> reqwest::RequestBuilder {
58 let mut builder = self
59 .client
60 .request(method, url)
61 .header("Accept", "application/vnd.github+json")
62 .header("X-GitHub-Api-Version", "2022-11-28");
63
64 let token = self.token.expose_secret();
65 if !token.is_empty() {
66 builder = builder.header("Authorization", format!("Bearer {}", token));
67 }
68
69 builder
70 }
71
72 async fn get<T: serde::de::DeserializeOwned>(&self, url: &str) -> Result<T> {
74 debug!(url = url, "GitHub GET request");
75
76 let response = self
77 .request(reqwest::Method::GET, url)
78 .send()
79 .await
80 .map_err(|e| Error::Http(e.to_string()))?;
81
82 self.handle_response(response).await
83 }
84
85 async fn post<T: serde::de::DeserializeOwned, B: serde::Serialize>(
87 &self,
88 url: &str,
89 body: &B,
90 ) -> Result<T> {
91 debug!(url = url, "GitHub POST request");
92
93 let response = self
94 .request(reqwest::Method::POST, url)
95 .json(body)
96 .send()
97 .await
98 .map_err(|e| Error::Http(e.to_string()))?;
99
100 self.handle_response(response).await
101 }
102
103 async fn patch<T: serde::de::DeserializeOwned, B: serde::Serialize>(
105 &self,
106 url: &str,
107 body: &B,
108 ) -> Result<T> {
109 debug!(url = url, "GitHub PATCH request");
110
111 let response = self
112 .request(reqwest::Method::PATCH, url)
113 .json(body)
114 .send()
115 .await
116 .map_err(|e| Error::Http(e.to_string()))?;
117
118 self.handle_response(response).await
119 }
120
121 async fn handle_response<T: serde::de::DeserializeOwned>(
123 &self,
124 response: reqwest::Response,
125 ) -> Result<T> {
126 let status = response.status();
127
128 if !status.is_success() {
129 let status_code = status.as_u16();
130 let message = response.text().await.unwrap_or_default();
131 warn!(
132 status = status_code,
133 message = message,
134 "GitHub API error response"
135 );
136 return Err(Error::from_status(status_code, message));
137 }
138
139 response
140 .json()
141 .await
142 .map_err(|e| Error::InvalidData(format!("Failed to parse response: {}", e)))
143 }
144
145 fn repo_url(&self, endpoint: &str) -> String {
147 format!(
148 "{}/repos/{}/{}{}",
149 self.base_url, self.owner, self.repo, endpoint
150 )
151 }
152}
153
154fn map_user(gh_user: Option<&GitHubUser>) -> Option<User> {
159 gh_user.map(|u| User {
160 id: u.id.to_string(),
161 username: u.login.clone(),
162 name: u.name.clone(),
163 email: u.email.clone(),
164 avatar_url: u.avatar_url.clone(),
165 })
166}
167
168fn map_user_required(gh_user: Option<&GitHubUser>) -> User {
169 map_user(gh_user).unwrap_or_else(|| User {
170 id: "unknown".to_string(),
171 username: "unknown".to_string(),
172 name: Some("Unknown".to_string()),
173 ..Default::default()
174 })
175}
176
177fn map_labels(labels: &[GitHubLabel]) -> Vec<String> {
178 labels.iter().map(|l| l.name.clone()).collect()
179}
180
181fn map_issue(gh_issue: &GitHubIssue) -> Issue {
182 let attachments_count = gh_issue
186 .body
187 .as_deref()
188 .map(|body| {
189 parse_markdown_attachments(body)
190 .iter()
191 .filter(|a| is_github_attachment_url("https://github.com", &a.url))
192 .count() as u32
193 })
194 .filter(|&c| c > 0);
195
196 Issue {
197 key: format!("gh#{}", gh_issue.number),
198 title: gh_issue.title.clone(),
199 description: gh_issue.body.clone(),
200 state: gh_issue.state.clone(),
201 source: "github".to_string(),
202 priority: None, labels: map_labels(&gh_issue.labels),
204 author: map_user(gh_issue.user.as_ref()),
205 assignees: gh_issue
206 .assignees
207 .iter()
208 .map(|u| map_user_required(Some(u)))
209 .collect(),
210 url: Some(gh_issue.html_url.clone()),
211 created_at: Some(gh_issue.created_at.clone()),
212 updated_at: Some(gh_issue.updated_at.clone()),
213 attachments_count,
214 parent: None,
215 subtasks: vec![],
216 }
217}
218
219fn map_pull_request(gh_pr: &GitHubPullRequest) -> MergeRequest {
220 let state = if gh_pr.merged || gh_pr.merged_at.is_some() {
222 "merged".to_string()
223 } else if gh_pr.state == "closed" {
224 "closed".to_string()
225 } else if gh_pr.draft {
226 "draft".to_string()
227 } else {
228 "open".to_string()
229 };
230
231 MergeRequest {
232 key: format!("pr#{}", gh_pr.number),
233 title: gh_pr.title.clone(),
234 description: gh_pr.body.clone(),
235 state,
236 source: "github".to_string(),
237 source_branch: gh_pr.head.ref_name.clone(),
238 target_branch: gh_pr.base.ref_name.clone(),
239 author: map_user(gh_pr.user.as_ref()),
240 assignees: gh_pr
241 .assignees
242 .iter()
243 .map(|u| map_user_required(Some(u)))
244 .collect(),
245 reviewers: gh_pr
246 .requested_reviewers
247 .iter()
248 .map(|u| map_user_required(Some(u)))
249 .collect(),
250 labels: map_labels(&gh_pr.labels),
251 draft: gh_pr.draft,
252 url: Some(gh_pr.html_url.clone()),
253 created_at: Some(gh_pr.created_at.clone()),
254 updated_at: Some(gh_pr.updated_at.clone()),
255 }
256}
257
258fn map_comment(gh_comment: &GitHubComment) -> Comment {
259 Comment {
260 id: gh_comment.id.to_string(),
261 body: gh_comment.body.clone(),
262 author: map_user(gh_comment.user.as_ref()),
263 created_at: Some(gh_comment.created_at.clone()),
264 updated_at: gh_comment.updated_at.clone(),
265 position: None,
266 }
267}
268
269fn map_review_comment(gh_comment: &GitHubReviewComment) -> Comment {
270 let position = gh_comment
271 .line
272 .or(gh_comment.original_line)
273 .map(|line| CodePosition {
274 file_path: gh_comment.path.clone(),
275 line,
276 line_type: gh_comment
277 .side
278 .as_ref()
279 .map(|s| if s == "LEFT" { "old" } else { "new" })
280 .unwrap_or("new")
281 .to_string(),
282 commit_sha: gh_comment
283 .commit_id
284 .clone()
285 .or_else(|| gh_comment.original_commit_id.clone()),
286 });
287
288 Comment {
289 id: gh_comment.id.to_string(),
290 body: gh_comment.body.clone(),
291 author: map_user(gh_comment.user.as_ref()),
292 created_at: Some(gh_comment.created_at.clone()),
293 updated_at: gh_comment.updated_at.clone(),
294 position,
295 }
296}
297
298fn map_file(gh_file: &GitHubFile) -> FileDiff {
299 FileDiff {
300 file_path: gh_file.filename.clone(),
301 old_path: gh_file.previous_filename.clone(),
302 new_file: gh_file.status == "added",
303 deleted_file: gh_file.status == "removed",
304 renamed_file: gh_file.status == "renamed",
305 diff: gh_file.patch.clone().unwrap_or_default(),
306 additions: Some(gh_file.additions),
307 deletions: Some(gh_file.deletions),
308 }
309}
310
311#[async_trait]
316impl IssueProvider for GitHubClient {
317 async fn get_issues(&self, filter: IssueFilter) -> Result<ProviderResult<Issue>> {
318 let mut url = self.repo_url("/issues");
319 let mut params = vec![];
320
321 if let Some(state) = &filter.state {
323 let gh_state = match state.as_str() {
324 "opened" | "open" => "open",
325 "closed" => "closed",
326 "all" => "all",
327 _ => "open",
328 };
329 params.push(format!("state={}", gh_state));
330 }
331
332 if let Some(labels) = &filter.labels
333 && !labels.is_empty()
334 {
335 params.push(format!("labels={}", labels.join(",")));
336 }
337
338 if let Some(assignee) = &filter.assignee {
339 params.push(format!("assignee={}", assignee));
340 }
341
342 if let Some(limit) = filter.limit {
343 params.push(format!("per_page={}", limit.min(100)));
344 }
345
346 if let Some(offset) = filter.offset {
347 let per_page = filter.limit.unwrap_or(30);
349 let page = (offset / per_page) + 1;
350 params.push(format!("page={}", page));
351 }
352
353 if let Some(sort_by) = &filter.sort_by {
354 let gh_sort = match sort_by.as_str() {
355 "created_at" | "created" => "created",
356 "updated_at" | "updated" => "updated",
357 _ => "updated",
358 };
359 params.push(format!("sort={}", gh_sort));
360 }
361
362 if let Some(order) = &filter.sort_order {
363 params.push(format!("direction={}", order));
364 }
365
366 if !params.is_empty() {
367 url.push_str(&format!("?{}", params.join("&")));
368 }
369
370 let gh_issues: Vec<GitHubIssue> = self.get(&url).await?;
371
372 let issues: Vec<Issue> = gh_issues
374 .iter()
375 .filter(|i| i.pull_request.is_none())
376 .map(map_issue)
377 .collect();
378
379 Ok(issues.into())
380 }
381
382 async fn get_issue(&self, key: &str) -> Result<Issue> {
383 let number = parse_issue_key(key)?;
384 let url = self.repo_url(&format!("/issues/{}", number));
385 let gh_issue: GitHubIssue = self.get(&url).await?;
386
387 if gh_issue.pull_request.is_some() {
389 return Err(Error::InvalidData(format!(
390 "{} is a pull request, not an issue",
391 key
392 )));
393 }
394
395 Ok(map_issue(&gh_issue))
396 }
397
398 async fn create_issue(&self, input: CreateIssueInput) -> Result<Issue> {
399 let url = self.repo_url("/issues");
400 let request = CreateIssueRequest {
401 title: input.title,
402 body: input.description,
403 labels: input.labels,
404 assignees: input.assignees,
405 };
406
407 let gh_issue: GitHubIssue = self.post(&url, &request).await?;
408 Ok(map_issue(&gh_issue))
409 }
410
411 async fn update_issue(&self, key: &str, input: UpdateIssueInput) -> Result<Issue> {
412 let number = parse_issue_key(key)?;
413 let url = self.repo_url(&format!("/issues/{}", number));
414
415 let state = input.state.map(|s| match s.as_str() {
417 "opened" | "open" => "open".to_string(),
418 "closed" => "closed".to_string(),
419 _ => s,
420 });
421
422 let request = UpdateIssueRequest {
423 title: input.title,
424 body: input.description,
425 state,
426 labels: input.labels,
427 assignees: input.assignees,
428 };
429
430 let gh_issue: GitHubIssue = self.patch(&url, &request).await?;
431 Ok(map_issue(&gh_issue))
432 }
433
434 async fn get_comments(&self, issue_key: &str) -> Result<ProviderResult<Comment>> {
435 let number = parse_issue_key(issue_key)?;
436 let url = self.repo_url(&format!("/issues/{}/comments", number));
437 let gh_comments: Vec<GitHubComment> = self.get(&url).await?;
438 Ok(gh_comments
439 .iter()
440 .map(map_comment)
441 .collect::<Vec<_>>()
442 .into())
443 }
444
445 async fn add_comment(&self, issue_key: &str, body: &str) -> Result<Comment> {
446 let number = parse_issue_key(issue_key)?;
447 let url = self.repo_url(&format!("/issues/{}/comments", number));
448 let request = CreateCommentRequest {
449 body: body.to_string(),
450 };
451
452 let gh_comment: GitHubComment = self.post(&url, &request).await?;
453 Ok(map_comment(&gh_comment))
454 }
455
456 async fn get_issue_attachments(&self, issue_key: &str) -> Result<Vec<AssetMeta>> {
457 let issue = self.get_issue(issue_key).await?;
460 let comments = self.get_comments(issue_key).await?;
461
462 let mut attachments: Vec<AssetMeta> = Vec::new();
463 let mut seen: std::collections::HashSet<String> = std::collections::HashSet::new();
464 let base = self.base_url.clone();
465 let mut collect = |source: &str| {
466 for att in parse_markdown_attachments(source) {
467 if is_github_attachment_url(&base, &att.url) && seen.insert(att.url.clone()) {
471 attachments.push(markdown_to_meta(&att));
472 }
473 }
474 };
475 if let Some(body) = issue.description.as_deref() {
476 collect(body);
477 }
478 for comment in &comments.items {
479 collect(&comment.body);
480 }
481 Ok(attachments)
482 }
483
484 async fn download_attachment(&self, _issue_key: &str, asset_id: &str) -> Result<Vec<u8>> {
485 download_github_url(&self.client, &self.base_url, &self.token, asset_id).await
486 }
487
488 fn asset_capabilities(&self) -> AssetCapabilities {
489 let caps = ContextCapabilities {
493 upload: false,
494 download: true,
495 delete: false,
496 list: true,
497 max_file_size: None,
498 allowed_types: Vec::new(),
499 };
500 AssetCapabilities {
501 issue: caps.clone(),
502 issue_comment: caps.clone(),
503 merge_request: caps.clone(),
504 mr_comment: caps,
505 }
506 }
507
508 fn provider_name(&self) -> &'static str {
509 "github"
510 }
511}
512
513#[async_trait]
514impl MergeRequestProvider for GitHubClient {
515 async fn get_merge_requests(&self, filter: MrFilter) -> Result<ProviderResult<MergeRequest>> {
516 let mut url = self.repo_url("/pulls");
517 let mut params = vec![];
518
519 if let Some(state) = &filter.state {
521 let gh_state = match state.as_str() {
522 "opened" | "open" => "open",
523 "closed" => "closed",
524 "merged" => "closed", "all" => "all",
526 _ => "open",
527 };
528 params.push(format!("state={}", gh_state));
529 }
530
531 if let Some(source_branch) = &filter.source_branch {
532 params.push(format!("head={}", source_branch));
533 }
534
535 if let Some(target_branch) = &filter.target_branch {
536 params.push(format!("base={}", target_branch));
537 }
538
539 if let Some(limit) = filter.limit {
540 params.push(format!("per_page={}", limit.min(100)));
541 }
542
543 params.push("sort=updated".to_string());
544 params.push("direction=desc".to_string());
545
546 if !params.is_empty() {
547 url.push_str(&format!("?{}", params.join("&")));
548 }
549
550 let gh_prs: Vec<GitHubPullRequest> = self.get(&url).await?;
551
552 let mut prs: Vec<MergeRequest> = gh_prs.iter().map(map_pull_request).collect();
553
554 if filter.state.as_deref() == Some("merged") {
556 prs.retain(|pr| pr.state == "merged");
557 }
558
559 Ok(prs.into())
560 }
561
562 async fn get_merge_request(&self, key: &str) -> Result<MergeRequest> {
563 let number = parse_pr_key(key)?;
564 let url = self.repo_url(&format!("/pulls/{}", number));
565 let gh_pr: GitHubPullRequest = self.get(&url).await?;
566 Ok(map_pull_request(&gh_pr))
567 }
568
569 async fn get_discussions(&self, mr_key: &str) -> Result<ProviderResult<Discussion>> {
570 let number = parse_pr_key(mr_key)?;
571
572 let reviews_url = self.repo_url(&format!("/pulls/{}/reviews", number));
574 let review_comments_url = self.repo_url(&format!("/pulls/{}/comments", number));
575 let issue_comments_url = self.repo_url(&format!("/issues/{}/comments", number));
576
577 let reviews: Vec<GitHubReview> = self.get(&reviews_url).await?;
578 let review_comments: Vec<GitHubReviewComment> = self.get(&review_comments_url).await?;
579 let issue_comments: Vec<GitHubComment> = self.get(&issue_comments_url).await?;
580
581 let mut discussions = Vec::new();
582
583 let mut comment_threads: std::collections::HashMap<u64, Vec<&GitHubReviewComment>> =
585 std::collections::HashMap::new();
586
587 for comment in &review_comments {
588 let thread_id = comment.in_reply_to_id.unwrap_or(comment.id);
589 comment_threads.entry(thread_id).or_default().push(comment);
590 }
591
592 for (thread_id, comments) in comment_threads {
594 let mapped_comments: Vec<Comment> =
595 comments.iter().map(|c| map_review_comment(c)).collect();
596 let position = mapped_comments.first().and_then(|c| c.position.clone());
597
598 discussions.push(Discussion {
599 id: format!("thread-{}", thread_id),
600 resolved: false, resolved_by: None,
602 comments: mapped_comments,
603 position,
604 });
605 }
606
607 for review in &reviews {
609 let mut comments = Vec::new();
610 if let Some(body) = &review.body
611 && !body.is_empty()
612 {
613 comments.push(Comment {
614 id: review.id.to_string(),
615 body: body.clone(),
616 author: map_user(review.user.as_ref()),
617 created_at: review.submitted_at.clone(),
618 updated_at: None,
619 position: None,
620 });
621 }
622
623 if !comments.is_empty() || !review.state.is_empty() {
624 discussions.push(Discussion {
625 id: format!("review-{}", review.id),
626 resolved: false,
627 resolved_by: None,
628 comments,
629 position: None,
630 });
631 }
632 }
633
634 for comment in &issue_comments {
636 discussions.push(Discussion {
637 id: format!("comment-{}", comment.id),
638 resolved: false,
639 resolved_by: None,
640 comments: vec![map_comment(comment)],
641 position: None,
642 });
643 }
644
645 Ok(discussions.into())
646 }
647
648 async fn get_diffs(&self, mr_key: &str) -> Result<ProviderResult<FileDiff>> {
649 let number = parse_pr_key(mr_key)?;
650 let url = self.repo_url(&format!("/pulls/{}/files", number));
651 let gh_files: Vec<GitHubFile> = self.get(&url).await?;
652 Ok(gh_files.iter().map(map_file).collect::<Vec<_>>().into())
653 }
654
655 async fn add_comment(&self, mr_key: &str, input: CreateCommentInput) -> Result<Comment> {
656 let number = parse_pr_key(mr_key)?;
657
658 let pr_url = self.repo_url(&format!("/pulls/{}", number));
660 let pr_result: Result<GitHubPullRequest> = self.get(&pr_url).await;
661
662 if let Err(Error::Http(status)) = &pr_result
663 && status.contains("404")
664 {
665 return Err(Error::InvalidData(format!(
666 "{} is not a valid pull request (it may be an issue)",
667 mr_key
668 )));
669 }
670
671 let pr: GitHubPullRequest = pr_result?;
673
674 if let Some(position) = &input.position {
676 let url = self.repo_url(&format!("/pulls/{}/comments", number));
677
678 let commit_sha = if let Some(sha) = &position.commit_sha {
680 sha.clone()
681 } else {
682 pr.head.sha
684 };
685
686 let request = CreateReviewCommentRequest {
687 body: input.body,
688 commit_id: commit_sha,
689 path: position.file_path.clone(),
690 line: Some(position.line),
691 side: Some(if position.line_type == "old" {
692 "LEFT".to_string()
693 } else {
694 "RIGHT".to_string()
695 }),
696 in_reply_to: input
704 .discussion_id
705 .as_deref()
706 .and_then(parse_discussion_numeric_id),
707 };
708
709 let gh_comment: GitHubReviewComment = self.post(&url, &request).await?;
710 return Ok(map_review_comment(&gh_comment));
711 }
712
713 let url = self.repo_url(&format!("/issues/{}/comments", number));
715 let request = CreateCommentRequest { body: input.body };
716
717 let gh_comment: GitHubComment = self.post(&url, &request).await?;
718 Ok(map_comment(&gh_comment))
719 }
720
721 async fn create_merge_request(&self, input: CreateMergeRequestInput) -> Result<MergeRequest> {
722 let url = self.repo_url("/pulls");
723
724 let request = CreatePullRequestRequest {
725 title: input.title,
726 body: input.description,
727 head: input.source_branch,
728 base: input.target_branch,
729 draft: if input.draft { Some(true) } else { None },
730 };
731
732 let gh_pr: GitHubPullRequest = self.post(&url, &request).await?;
733
734 if !input.labels.is_empty() {
736 let labels_url = self.repo_url(&format!("/issues/{}/labels", gh_pr.number));
737 let result: Result<serde_json::Value> = self
738 .post(&labels_url, &serde_json::json!({ "labels": input.labels }))
739 .await;
740 if let Err(err) = result {
741 warn!(
742 error = ?err,
743 pr_number = gh_pr.number,
744 "Failed to add labels to GitHub pull request"
745 );
746 }
747 }
748
749 if !input.reviewers.is_empty() {
751 let reviewers_url =
752 self.repo_url(&format!("/pulls/{}/requested_reviewers", gh_pr.number));
753 let result: Result<serde_json::Value> = self
754 .post(
755 &reviewers_url,
756 &serde_json::json!({ "reviewers": input.reviewers }),
757 )
758 .await;
759 if let Err(err) = result {
760 warn!(
761 error = ?err,
762 pr_number = gh_pr.number,
763 "Failed to add reviewers to GitHub pull request"
764 );
765 }
766 }
767
768 if !input.labels.is_empty() || !input.reviewers.is_empty() {
770 let pr_url = self.repo_url(&format!("/pulls/{}", gh_pr.number));
771 match self.get::<GitHubPullRequest>(&pr_url).await {
772 Ok(updated_pr) => return Ok(map_pull_request(&updated_pr)),
773 Err(err) => {
774 warn!(
775 error = ?err,
776 pr_number = gh_pr.number,
777 "Failed to re-fetch GitHub pull request"
778 );
779 }
780 }
781 }
782
783 Ok(map_pull_request(&gh_pr))
784 }
785
786 async fn update_merge_request(
787 &self,
788 key: &str,
789 input: UpdateMergeRequestInput,
790 ) -> Result<MergeRequest> {
791 let number = parse_pr_key(key)?;
792 let url = self.repo_url(&format!("/pulls/{}", number));
793
794 let state = input.state.map(|s| match s.as_str() {
796 "opened" | "open" | "reopen" => "open".to_string(),
797 "closed" | "close" => "closed".to_string(),
798 _ => s,
799 });
800
801 let request = UpdatePullRequestRequest {
802 title: input.title,
803 body: input.description,
804 state,
805 draft: input.draft,
806 };
807
808 let gh_pr: GitHubPullRequest = self.patch(&url, &request).await?;
809
810 if let Some(labels) = input.labels {
812 let labels_url = self.repo_url(&format!("/issues/{}/labels", number));
813 let result: Result<serde_json::Value> = self
814 .patch(&labels_url, &serde_json::json!({ "labels": labels }))
815 .await;
816 if let Err(err) = result {
817 warn!(
818 error = ?err,
819 pr_number = number,
820 "Failed to update labels on GitHub pull request"
821 );
822 }
823
824 let pr_url = self.repo_url(&format!("/pulls/{}", number));
826 match self.get::<GitHubPullRequest>(&pr_url).await {
827 Ok(updated_pr) => return Ok(map_pull_request(&updated_pr)),
828 Err(err) => {
829 warn!(
830 error = ?err,
831 pr_number = number,
832 "Failed to re-fetch GitHub pull request"
833 );
834 }
835 }
836 }
837
838 Ok(map_pull_request(&gh_pr))
839 }
840
841 async fn get_mr_attachments(&self, mr_key: &str) -> Result<Vec<AssetMeta>> {
842 let mr = self.get_merge_request(mr_key).await?;
843 let discussions = self.get_discussions(mr_key).await?;
844
845 let mut attachments: Vec<AssetMeta> = Vec::new();
846 let mut seen: std::collections::HashSet<String> = std::collections::HashSet::new();
847 let base = self.base_url.clone();
848 let mut collect = |source: &str| {
849 for att in parse_markdown_attachments(source) {
850 if is_github_attachment_url(&base, &att.url) && seen.insert(att.url.clone()) {
851 attachments.push(markdown_to_meta(&att));
852 }
853 }
854 };
855 if let Some(body) = mr.description.as_deref() {
856 collect(body);
857 }
858 for discussion in &discussions.items {
859 for comment in &discussion.comments {
860 collect(&comment.body);
861 }
862 }
863 Ok(attachments)
864 }
865
866 async fn download_mr_attachment(&self, _mr_key: &str, asset_id: &str) -> Result<Vec<u8>> {
867 download_github_url(&self.client, &self.base_url, &self.token, asset_id).await
868 }
869
870 fn provider_name(&self) -> &'static str {
871 "github"
872 }
873}
874
875const GITHUB_TRUSTED_HOSTS: &[&str] = &[
881 "github.com",
882 "api.github.com",
883 "githubusercontent.com",
884 "user-images.githubusercontent.com",
885 "raw.githubusercontent.com",
886 "objects.githubusercontent.com",
887 "camo.githubusercontent.com",
888];
889
890async fn download_github_url(
895 client: &reqwest::Client,
896 base_url: &str,
897 token: &SecretString,
898 url: &str,
899) -> Result<Vec<u8>> {
900 let needs_auth = is_github_api_host(base_url, url);
901 let mut request = client
902 .get(url)
903 .header("Accept", "application/octet-stream")
904 .header("User-Agent", "devboy-tools");
905 let token_value = token.expose_secret();
906 if needs_auth && !token_value.is_empty() {
907 request = request.header("Authorization", format!("Bearer {token_value}"));
908 } else if !is_github_trusted_host(base_url, url) {
909 tracing::warn!(
910 url,
911 "downloading cross-origin attachment without auth headers"
912 );
913 }
914 let response = request
915 .send()
916 .await
917 .map_err(|e| Error::Http(e.to_string()))?;
918 let status = response.status();
919 if !status.is_success() {
920 let message = response.text().await.unwrap_or_default();
921 return Err(Error::from_status(status.as_u16(), message));
922 }
923 let bytes = response
924 .bytes()
925 .await
926 .map_err(|e| Error::Http(format!("failed to read attachment bytes: {e}")))?;
927 Ok(bytes.to_vec())
928}
929
930fn is_github_api_host(base_url: &str, url: &str) -> bool {
935 let (url_scheme, url_host) = split_scheme_host(url);
936 if url_scheme != "https" {
937 return false;
938 }
939 if url_host == "api.github.com" || url_host == "github.com" {
941 return true;
942 }
943 let (_base_scheme, base_host) = split_scheme_host(base_url);
945 url_host == base_host
946}
947
948fn is_github_trusted_host(base_url: &str, url: &str) -> bool {
954 let (url_scheme, url_host) = split_scheme_host(url);
955 if url_scheme != "https" {
956 return false;
957 }
958
959 for trusted in GITHUB_TRUSTED_HOSTS {
961 if url_host == *trusted || url_host.ends_with(&format!(".{trusted}")) {
962 return true;
963 }
964 }
965
966 let (_base_scheme, base_host) = split_scheme_host(base_url);
968 url_host == base_host
969}
970
971fn split_scheme_host(url: &str) -> (String, String) {
973 let (scheme, rest) = match url.split_once("://") {
974 Some((s, r)) => (s.to_ascii_lowercase(), r),
975 None => return (String::new(), String::new()),
976 };
977 let host = rest.split('/').next().unwrap_or("").to_ascii_lowercase();
978 (scheme, host)
979}
980
981fn is_github_attachment_url(base_url: &str, url: &str) -> bool {
989 let (scheme, host) = split_scheme_host(url);
990 if scheme.is_empty() {
991 return false; }
993 if host.ends_with("githubusercontent.com") {
995 return true;
996 }
997 if host == "github.com" {
999 let path = url
1000 .split("://")
1001 .nth(1)
1002 .unwrap_or("")
1003 .split_once('/')
1004 .map(|(_, p)| p)
1005 .unwrap_or("");
1006 if path.starts_with("user-attachments/assets/")
1007 || path.starts_with("user-attachments/files/")
1008 {
1009 return true;
1010 }
1011 }
1012 let (_base_scheme, base_host) = split_scheme_host(base_url);
1014 if host == base_host {
1015 let path = url
1016 .split("://")
1017 .nth(1)
1018 .unwrap_or("")
1019 .split_once('/')
1020 .map(|(_, p)| p)
1021 .unwrap_or("");
1022 return path.contains("/assets/");
1023 }
1024 false
1025}
1026
1027fn markdown_to_meta(att: &devboy_core::MarkdownAttachment) -> AssetMeta {
1028 AssetMeta {
1029 id: att.url.clone(),
1030 filename: att.filename.clone(),
1031 mime_type: None,
1032 size: None,
1033 url: Some(att.url.clone()),
1034 created_at: None,
1035 author: None,
1036 cached: false,
1037 local_path: None,
1038 checksum_sha256: None,
1039 analysis: None,
1040 }
1041}
1042
1043#[derive(Debug, Deserialize)]
1049struct GhWorkflowRun {
1050 id: u64,
1051 name: Option<String>,
1052 status: Option<String>,
1053 conclusion: Option<String>,
1054 #[allow(dead_code)]
1055 head_branch: Option<String>,
1056 head_sha: String,
1057 html_url: String,
1058 run_started_at: Option<String>,
1059 updated_at: Option<String>,
1060}
1061
1062#[derive(Debug, Deserialize)]
1064struct GhWorkflowRuns {
1065 workflow_runs: Vec<GhWorkflowRun>,
1066}
1067
1068#[derive(Debug, Deserialize)]
1070struct GhJob {
1071 id: u64,
1072 name: String,
1073 status: Option<String>,
1074 conclusion: Option<String>,
1075 html_url: Option<String>,
1076 started_at: Option<String>,
1077 completed_at: Option<String>,
1078}
1079
1080#[derive(Debug, Deserialize)]
1082struct GhJobs {
1083 jobs: Vec<GhJob>,
1084}
1085
1086fn map_gh_status(status: Option<&str>, conclusion: Option<&str>) -> PipelineStatus {
1087 match (status, conclusion) {
1088 (Some("completed"), Some("success")) => PipelineStatus::Success,
1089 (Some("completed"), Some("failure")) => PipelineStatus::Failed,
1090 (Some("completed"), Some("cancelled")) => PipelineStatus::Canceled,
1091 (Some("completed"), Some("skipped")) => PipelineStatus::Skipped,
1092 (Some("in_progress"), _) => PipelineStatus::Running,
1093 (Some("queued"), _) | (Some("waiting"), _) => PipelineStatus::Pending,
1094 _ => PipelineStatus::Unknown,
1095 }
1096}
1097
1098fn estimate_duration(started: Option<&str>, completed: Option<&str>) -> Option<u64> {
1099 let start = started?.parse::<chrono::DateTime<chrono::Utc>>().ok()?;
1100 let end = completed?.parse::<chrono::DateTime<chrono::Utc>>().ok()?;
1101 Some(
1102 end.signed_duration_since(start)
1103 .num_seconds()
1104 .unsigned_abs(),
1105 )
1106}
1107
1108fn strip_ansi(text: &str) -> String {
1110 let mut result = String::with_capacity(text.len());
1111 let mut chars = text.chars().peekable();
1112 while let Some(ch) = chars.next() {
1113 if ch == '\x1b' {
1114 while let Some(&next) = chars.peek() {
1116 chars.next();
1117 if next.is_ascii_alphabetic() {
1118 break;
1119 }
1120 }
1121 } else {
1122 result.push(ch);
1123 }
1124 }
1125 result
1126}
1127
1128fn extract_errors(log: &str, max_lines: usize) -> Option<String> {
1130 let patterns = [
1131 "error[",
1132 "error:",
1133 "FAILED",
1134 "Error:",
1135 "panic",
1136 "FATAL",
1137 "AssertionError",
1138 "TypeError",
1139 "Cannot find",
1140 "not found",
1141 "exit code",
1142 ];
1143 let lines: Vec<&str> = log.lines().collect();
1144 let mut error_lines: Vec<String> = Vec::new();
1145
1146 for (i, line) in lines.iter().enumerate() {
1147 let stripped = strip_ansi(line);
1148 if patterns.iter().any(|p| stripped.contains(p)) {
1149 let start = i.saturating_sub(2);
1151 let end = (i + 3).min(lines.len());
1152 for ctx_line_raw in &lines[start..end] {
1153 let ctx_line = strip_ansi(ctx_line_raw).trim().to_string();
1154 if !ctx_line.is_empty() && !error_lines.contains(&ctx_line) {
1155 error_lines.push(ctx_line);
1156 }
1157 }
1158 if error_lines.len() >= max_lines {
1159 break;
1160 }
1161 }
1162 }
1163
1164 if error_lines.is_empty() {
1165 let tail: Vec<String> = lines
1167 .iter()
1168 .rev()
1169 .filter_map(|l| {
1170 let s = strip_ansi(l).trim().to_string();
1171 if s.is_empty() { None } else { Some(s) }
1172 })
1173 .take(10)
1174 .collect();
1175 if tail.is_empty() {
1176 None
1177 } else {
1178 Some(tail.into_iter().rev().collect::<Vec<_>>().join("\n"))
1179 }
1180 } else {
1181 Some(error_lines.join("\n"))
1182 }
1183}
1184
1185#[async_trait]
1186impl PipelineProvider for GitHubClient {
1187 fn provider_name(&self) -> &'static str {
1188 "github"
1189 }
1190
1191 async fn get_pipeline(&self, input: GetPipelineInput) -> Result<PipelineInfo> {
1192 let branch = if let Some(ref mr_key) = input.mr_key {
1194 let number = parse_pr_key(mr_key)?;
1196 let pr_url = self.repo_url(&format!("/pulls/{number}"));
1197 let pr: GitHubPullRequest = self.get(&pr_url).await?;
1198 pr.head.ref_name
1199 } else if let Some(ref branch) = input.branch {
1200 branch.clone()
1201 } else {
1202 "main".to_string()
1204 };
1205
1206 let runs_url = self.repo_url(&format!(
1208 "/actions/runs?branch={}&per_page=1&status=completed",
1209 urlencoding::encode(&branch)
1210 ));
1211 let runs: GhWorkflowRuns = self.get(&runs_url).await?;
1212
1213 let active_runs_url = self.repo_url(&format!(
1215 "/actions/runs?branch={}&per_page=1&status=in_progress",
1216 urlencoding::encode(&branch)
1217 ));
1218 let active_runs: GhWorkflowRuns =
1219 self.get(&active_runs_url).await.unwrap_or(GhWorkflowRuns {
1220 workflow_runs: vec![],
1221 });
1222
1223 let run = active_runs
1225 .workflow_runs
1226 .into_iter()
1227 .chain(runs.workflow_runs)
1228 .next()
1229 .ok_or_else(|| {
1230 Error::NotFound(format!("No workflow runs found for branch '{branch}'"))
1231 })?;
1232
1233 let run_status = map_gh_status(run.status.as_deref(), run.conclusion.as_deref());
1234
1235 let jobs_url = self.repo_url(&format!("/actions/runs/{}/jobs?per_page=100", run.id));
1237 let gh_jobs: GhJobs = self.get(&jobs_url).await?;
1238
1239 let mut summary = PipelineSummary {
1241 total: gh_jobs.jobs.len() as u32,
1242 ..Default::default()
1243 };
1244
1245 let mut jobs: Vec<PipelineJob> = Vec::new();
1247 let mut failed_job_ids: Vec<(u64, String)> = Vec::new();
1248
1249 for job in &gh_jobs.jobs {
1250 let status = map_gh_status(job.status.as_deref(), job.conclusion.as_deref());
1251 match status {
1252 PipelineStatus::Success => summary.success += 1,
1253 PipelineStatus::Failed => {
1254 summary.failed += 1;
1255 failed_job_ids.push((job.id, job.name.clone()));
1256 }
1257 PipelineStatus::Running => summary.running += 1,
1258 PipelineStatus::Pending => summary.pending += 1,
1259 PipelineStatus::Canceled => summary.canceled += 1,
1260 PipelineStatus::Skipped => summary.skipped += 1,
1261 PipelineStatus::Unknown => {}
1262 }
1263
1264 let duration =
1265 estimate_duration(job.started_at.as_deref(), job.completed_at.as_deref());
1266
1267 jobs.push(PipelineJob {
1268 id: job.id.to_string(),
1269 name: job.name.clone(),
1270 status,
1271 url: job.html_url.clone(),
1272 duration,
1273 });
1274 }
1275
1276 let mut failed_jobs: Vec<FailedJob> = Vec::new();
1278 if input.include_failed_logs {
1279 for (job_id, job_name) in failed_job_ids.iter().take(5) {
1280 let log_url = self.repo_url(&format!("/actions/jobs/{job_id}/logs"));
1281 let error_snippet = match self.request(reqwest::Method::GET, &log_url).send().await
1282 {
1283 Ok(resp) if resp.status().is_success() => {
1284 let log_text = resp.text().await.unwrap_or_default();
1285 extract_errors(&log_text, 20)
1286 }
1287 _ => None,
1288 };
1289 failed_jobs.push(FailedJob {
1290 id: job_id.to_string(),
1291 name: job_name.clone(),
1292 url: None,
1293 error_snippet,
1294 });
1295 }
1296 }
1297
1298 let duration = estimate_duration(run.run_started_at.as_deref(), run.updated_at.as_deref());
1299
1300 let stage_name = run.name.unwrap_or_else(|| "CI".to_string());
1301
1302 Ok(PipelineInfo {
1303 id: run.id.to_string(),
1304 status: run_status,
1305 reference: branch,
1306 sha: run.head_sha,
1307 url: Some(run.html_url),
1308 duration,
1309 coverage: None,
1310 summary,
1311 stages: vec![PipelineStage {
1312 name: stage_name,
1313 jobs,
1314 }],
1315 failed_jobs,
1316 })
1317 }
1318
1319 async fn get_job_logs(&self, job_id: &str, options: JobLogOptions) -> Result<JobLogOutput> {
1320 let log_url = self.repo_url(&format!("/actions/jobs/{job_id}/logs"));
1321 let resp = self
1322 .request(reqwest::Method::GET, &log_url)
1323 .send()
1324 .await
1325 .map_err(|e| Error::Network(e.to_string()))?;
1326
1327 if !resp.status().is_success() {
1328 return Err(Error::from_status(
1329 resp.status().as_u16(),
1330 format!("Failed to fetch job logs for job {job_id}"),
1331 ));
1332 }
1333
1334 let content_type = resp
1337 .headers()
1338 .get("content-type")
1339 .and_then(|v| v.to_str().ok())
1340 .unwrap_or("")
1341 .to_string();
1342
1343 let raw_log = if content_type.contains("application/zip")
1344 || content_type.contains("application/octet-stream")
1345 {
1346 return Err(Error::InvalidData(
1348 "Job logs returned as ZIP archive. This typically happens for large logs. \
1349 Try using pattern search mode to find specific errors."
1350 .to_string(),
1351 ));
1352 } else {
1353 resp.text()
1354 .await
1355 .map_err(|e| Error::Network(e.to_string()))?
1356 };
1357 let log = strip_ansi(&raw_log);
1358 let lines: Vec<&str> = log.lines().collect();
1359 let total_lines = lines.len();
1360
1361 let (content, mode_name) = match options.mode {
1362 JobLogMode::Smart => {
1363 let extracted = extract_errors(&log, 30).unwrap_or_else(|| {
1364 lines
1365 .iter()
1366 .rev()
1367 .take(20)
1368 .copied()
1369 .collect::<Vec<_>>()
1370 .into_iter()
1371 .rev()
1372 .collect::<Vec<_>>()
1373 .join("\n")
1374 });
1375 (extracted, "smart")
1376 }
1377 JobLogMode::Search {
1378 ref pattern,
1379 context,
1380 max_matches,
1381 } => {
1382 let re = regex::Regex::new(pattern)
1383 .unwrap_or_else(|_| regex::Regex::new(®ex::escape(pattern)).unwrap());
1384 let mut matches = Vec::new();
1385 for (i, line) in lines.iter().enumerate() {
1386 if re.is_match(line) {
1387 let start = i.saturating_sub(context);
1388 let end = (i + context + 1).min(total_lines);
1389 matches.push(format!("--- Match at line {} ---", i + 1));
1390 for (j, ctx_line) in lines[start..end].iter().enumerate() {
1391 let line_num = start + j;
1392 let marker = if line_num == i { ">>>" } else { " " };
1393 matches.push(format!("{} {}: {}", marker, line_num + 1, ctx_line));
1394 }
1395 if matches.len() / (context * 2 + 2) >= max_matches {
1396 break;
1397 }
1398 }
1399 }
1400 (matches.join("\n"), "search")
1401 }
1402 JobLogMode::Paginated { offset, limit } => {
1403 let page: Vec<&str> = lines.iter().skip(offset).take(limit).copied().collect();
1404 (page.join("\n"), "paginated")
1405 }
1406 JobLogMode::Full { max_lines } => {
1407 let truncated: Vec<&str> = lines.iter().take(max_lines).copied().collect();
1408 (truncated.join("\n"), "full")
1409 }
1410 };
1411
1412 Ok(JobLogOutput {
1413 job_id: job_id.to_string(),
1414 job_name: None,
1415 content,
1416 mode: mode_name.to_string(),
1417 total_lines: Some(total_lines),
1418 })
1419 }
1420}
1421
1422#[async_trait]
1423impl Provider for GitHubClient {
1424 async fn get_current_user(&self) -> Result<User> {
1425 let url = format!("{}/user", self.base_url);
1426 let gh_user: GitHubUser = self.get(&url).await?;
1427 Ok(map_user_required(Some(&gh_user)))
1428 }
1429}
1430
1431fn parse_issue_key(key: &str) -> Result<u64> {
1437 key.strip_prefix("gh#")
1438 .and_then(|s| s.parse::<u64>().ok())
1439 .ok_or_else(|| Error::InvalidData(format!("Invalid issue key: {}", key)))
1440}
1441
1442fn parse_pr_key(key: &str) -> Result<u64> {
1444 key.strip_prefix("pr#")
1445 .and_then(|s| s.parse::<u64>().ok())
1446 .ok_or_else(|| Error::InvalidData(format!("Invalid PR key: {}", key)))
1447}
1448
1449fn parse_discussion_numeric_id(id: &str) -> Option<u64> {
1465 let trimmed = id
1466 .strip_prefix("thread-")
1467 .or_else(|| id.strip_prefix("review-"))
1468 .or_else(|| id.strip_prefix("comment-"))
1469 .unwrap_or(id);
1470 trimmed.parse::<u64>().ok()
1471}
1472
1473#[cfg(test)]
1474mod tests {
1475 use super::*;
1476 use crate::types::GitHubBranchRef;
1477
1478 #[test]
1479 fn test_parse_issue_key() {
1480 assert_eq!(parse_issue_key("gh#123").unwrap(), 123);
1481 assert_eq!(parse_issue_key("gh#1").unwrap(), 1);
1482 assert!(parse_issue_key("pr#123").is_err());
1483 assert!(parse_issue_key("123").is_err());
1484 assert!(parse_issue_key("gh#").is_err());
1485 }
1486
1487 #[test]
1488 fn test_parse_pr_key() {
1489 assert_eq!(parse_pr_key("pr#456").unwrap(), 456);
1490 assert_eq!(parse_pr_key("pr#1").unwrap(), 1);
1491 assert!(parse_pr_key("gh#123").is_err());
1492 assert!(parse_pr_key("456").is_err());
1493 }
1494
1495 #[test]
1496 fn test_parse_discussion_numeric_id_strips_prefixes() {
1497 assert_eq!(
1506 parse_discussion_numeric_id("thread-3694869522"),
1507 Some(3694869522)
1508 );
1509 assert_eq!(
1510 parse_discussion_numeric_id("review-3694869522"),
1511 Some(3694869522)
1512 );
1513 assert_eq!(
1514 parse_discussion_numeric_id("comment-4147511088"),
1515 Some(4147511088)
1516 );
1517 assert_eq!(parse_discussion_numeric_id("12345"), Some(12345));
1519 assert_eq!(parse_discussion_numeric_id("weird-42"), None);
1523 assert_eq!(parse_discussion_numeric_id("review-notnumeric"), None);
1524 assert_eq!(parse_discussion_numeric_id(""), None);
1525 }
1526
1527 #[test]
1528 fn test_map_user() {
1529 let gh_user = GitHubUser {
1530 id: 123,
1531 login: "testuser".to_string(),
1532 name: Some("Test User".to_string()),
1533 email: Some("test@example.com".to_string()),
1534 avatar_url: Some("https://example.com/avatar.png".to_string()),
1535 };
1536
1537 let user = map_user(Some(&gh_user)).unwrap();
1538 assert_eq!(user.id, "123");
1539 assert_eq!(user.username, "testuser");
1540 assert_eq!(user.name, Some("Test User".to_string()));
1541 assert_eq!(user.email, Some("test@example.com".to_string()));
1542 }
1543
1544 #[test]
1545 fn test_map_user_none() {
1546 assert!(map_user(None).is_none());
1547 }
1548
1549 #[test]
1550 fn test_map_user_required_with_user() {
1551 let gh_user = GitHubUser {
1552 id: 1,
1553 login: "user1".to_string(),
1554 name: Some("User One".to_string()),
1555 email: None,
1556 avatar_url: None,
1557 };
1558 let user = map_user_required(Some(&gh_user));
1559 assert_eq!(user.username, "user1");
1560 }
1561
1562 #[test]
1563 fn test_map_user_required_without_user() {
1564 let user = map_user_required(None);
1565 assert_eq!(user.id, "unknown");
1566 assert_eq!(user.username, "unknown");
1567 assert_eq!(user.name, Some("Unknown".to_string()));
1568 }
1569
1570 #[test]
1571 fn test_map_labels() {
1572 let labels = vec![
1573 GitHubLabel {
1574 id: 1,
1575 name: "bug".to_string(),
1576 color: None,
1577 description: None,
1578 },
1579 GitHubLabel {
1580 id: 2,
1581 name: "feature".to_string(),
1582 color: Some("00ff00".to_string()),
1583 description: Some("Feature request".to_string()),
1584 },
1585 ];
1586 let result = map_labels(&labels);
1587 assert_eq!(result, vec!["bug", "feature"]);
1588 }
1589
1590 #[test]
1591 fn test_map_labels_empty() {
1592 let result = map_labels(&[]);
1593 assert!(result.is_empty());
1594 }
1595
1596 #[test]
1597 fn test_map_comment() {
1598 let gh_comment = GitHubComment {
1599 id: 42,
1600 body: "Nice work!".to_string(),
1601 user: Some(GitHubUser {
1602 id: 1,
1603 login: "reviewer".to_string(),
1604 name: None,
1605 email: None,
1606 avatar_url: None,
1607 }),
1608 created_at: "2024-01-15T10:00:00Z".to_string(),
1609 updated_at: Some("2024-01-15T12:00:00Z".to_string()),
1610 };
1611
1612 let comment = map_comment(&gh_comment);
1613 assert_eq!(comment.id, "42");
1614 assert_eq!(comment.body, "Nice work!");
1615 assert!(comment.author.is_some());
1616 assert_eq!(comment.author.unwrap().username, "reviewer");
1617 assert_eq!(comment.created_at, Some("2024-01-15T10:00:00Z".to_string()));
1618 assert_eq!(comment.updated_at, Some("2024-01-15T12:00:00Z".to_string()));
1619 assert!(comment.position.is_none());
1620 }
1621
1622 #[test]
1623 fn test_map_review_comment_with_line() {
1624 let gh_comment = GitHubReviewComment {
1625 id: 100,
1626 body: "Fix this".to_string(),
1627 user: Some(GitHubUser {
1628 id: 1,
1629 login: "reviewer".to_string(),
1630 name: None,
1631 email: None,
1632 avatar_url: None,
1633 }),
1634 created_at: "2024-01-15T10:00:00Z".to_string(),
1635 updated_at: None,
1636 path: "src/main.rs".to_string(),
1637 line: Some(42),
1638 original_line: None,
1639 position: None,
1640 side: Some("RIGHT".to_string()),
1641 diff_hunk: None,
1642 commit_id: Some("abc123".to_string()),
1643 original_commit_id: None,
1644 in_reply_to_id: None,
1645 };
1646
1647 let comment = map_review_comment(&gh_comment);
1648 assert_eq!(comment.id, "100");
1649 assert_eq!(comment.body, "Fix this");
1650 let pos = comment.position.unwrap();
1651 assert_eq!(pos.file_path, "src/main.rs");
1652 assert_eq!(pos.line, 42);
1653 assert_eq!(pos.line_type, "new");
1654 assert_eq!(pos.commit_sha, Some("abc123".to_string()));
1655 }
1656
1657 #[test]
1658 fn test_map_review_comment_with_left_side() {
1659 let gh_comment = GitHubReviewComment {
1660 id: 101,
1661 body: "Old code".to_string(),
1662 user: None,
1663 created_at: "2024-01-15T10:00:00Z".to_string(),
1664 updated_at: None,
1665 path: "src/lib.rs".to_string(),
1666 line: Some(10),
1667 original_line: None,
1668 position: None,
1669 side: Some("LEFT".to_string()),
1670 diff_hunk: None,
1671 commit_id: None,
1672 original_commit_id: Some("def456".to_string()),
1673 in_reply_to_id: None,
1674 };
1675
1676 let comment = map_review_comment(&gh_comment);
1677 let pos = comment.position.unwrap();
1678 assert_eq!(pos.line_type, "old");
1679 assert_eq!(pos.commit_sha, Some("def456".to_string()));
1680 }
1681
1682 #[test]
1683 fn test_map_review_comment_with_original_line_fallback() {
1684 let gh_comment = GitHubReviewComment {
1685 id: 102,
1686 body: "Outdated".to_string(),
1687 user: None,
1688 created_at: "2024-01-15T10:00:00Z".to_string(),
1689 updated_at: None,
1690 path: "src/lib.rs".to_string(),
1691 line: None,
1692 original_line: Some(5),
1693 position: None,
1694 side: None,
1695 diff_hunk: None,
1696 commit_id: None,
1697 original_commit_id: None,
1698 in_reply_to_id: None,
1699 };
1700
1701 let comment = map_review_comment(&gh_comment);
1702 let pos = comment.position.unwrap();
1703 assert_eq!(pos.line, 5);
1704 assert_eq!(pos.line_type, "new"); }
1706
1707 #[test]
1708 fn test_map_review_comment_without_line() {
1709 let gh_comment = GitHubReviewComment {
1710 id: 103,
1711 body: "General".to_string(),
1712 user: None,
1713 created_at: "2024-01-15T10:00:00Z".to_string(),
1714 updated_at: None,
1715 path: "src/lib.rs".to_string(),
1716 line: None,
1717 original_line: None,
1718 position: None,
1719 side: None,
1720 diff_hunk: None,
1721 commit_id: None,
1722 original_commit_id: None,
1723 in_reply_to_id: None,
1724 };
1725
1726 let comment = map_review_comment(&gh_comment);
1727 assert!(comment.position.is_none());
1728 }
1729
1730 #[test]
1731 fn test_map_file() {
1732 let gh_file = GitHubFile {
1733 sha: "abc123".to_string(),
1734 filename: "src/main.rs".to_string(),
1735 status: "modified".to_string(),
1736 additions: 10,
1737 deletions: 3,
1738 changes: 13,
1739 patch: Some("@@ -1,3 +1,10 @@\n+new line".to_string()),
1740 previous_filename: None,
1741 };
1742
1743 let diff = map_file(&gh_file);
1744 assert_eq!(diff.file_path, "src/main.rs");
1745 assert!(!diff.new_file);
1746 assert!(!diff.deleted_file);
1747 assert!(!diff.renamed_file);
1748 assert_eq!(diff.additions, Some(10));
1749 assert_eq!(diff.deletions, Some(3));
1750 assert!(diff.diff.contains("+new line"));
1751 }
1752
1753 #[test]
1754 fn test_map_file_added() {
1755 let gh_file = GitHubFile {
1756 sha: "abc".to_string(),
1757 filename: "new_file.rs".to_string(),
1758 status: "added".to_string(),
1759 additions: 50,
1760 deletions: 0,
1761 changes: 50,
1762 patch: None,
1763 previous_filename: None,
1764 };
1765
1766 let diff = map_file(&gh_file);
1767 assert!(diff.new_file);
1768 assert!(!diff.deleted_file);
1769 assert!(diff.diff.is_empty());
1770 }
1771
1772 #[test]
1773 fn test_map_file_removed() {
1774 let gh_file = GitHubFile {
1775 sha: "abc".to_string(),
1776 filename: "old_file.rs".to_string(),
1777 status: "removed".to_string(),
1778 additions: 0,
1779 deletions: 30,
1780 changes: 30,
1781 patch: None,
1782 previous_filename: None,
1783 };
1784
1785 let diff = map_file(&gh_file);
1786 assert!(diff.deleted_file);
1787 assert!(!diff.new_file);
1788 }
1789
1790 #[test]
1791 fn test_map_file_renamed() {
1792 let gh_file = GitHubFile {
1793 sha: "abc".to_string(),
1794 filename: "new_name.rs".to_string(),
1795 status: "renamed".to_string(),
1796 additions: 0,
1797 deletions: 0,
1798 changes: 0,
1799 patch: None,
1800 previous_filename: Some("old_name.rs".to_string()),
1801 };
1802
1803 let diff = map_file(&gh_file);
1804 assert!(diff.renamed_file);
1805 assert_eq!(diff.old_path, Some("old_name.rs".to_string()));
1806 }
1807
1808 #[test]
1809 fn test_map_pull_request_with_full_data() {
1810 let pr = GitHubPullRequest {
1811 id: 1,
1812 number: 10,
1813 title: "Add feature".to_string(),
1814 body: Some("Description".to_string()),
1815 state: "open".to_string(),
1816 html_url: "https://github.com/test/repo/pull/10".to_string(),
1817 draft: false,
1818 merged: false,
1819 merged_at: None,
1820 user: Some(GitHubUser {
1821 id: 1,
1822 login: "author".to_string(),
1823 name: None,
1824 email: None,
1825 avatar_url: None,
1826 }),
1827 assignees: vec![GitHubUser {
1828 id: 2,
1829 login: "assignee".to_string(),
1830 name: Some("Assignee".to_string()),
1831 email: None,
1832 avatar_url: None,
1833 }],
1834 requested_reviewers: vec![GitHubUser {
1835 id: 3,
1836 login: "reviewer".to_string(),
1837 name: None,
1838 email: None,
1839 avatar_url: None,
1840 }],
1841 labels: vec![GitHubLabel {
1842 id: 1,
1843 name: "enhancement".to_string(),
1844 color: None,
1845 description: None,
1846 }],
1847 head: GitHubBranchRef {
1848 ref_name: "feature-branch".to_string(),
1849 sha: "abc123".to_string(),
1850 },
1851 base: GitHubBranchRef {
1852 ref_name: "main".to_string(),
1853 sha: "def456".to_string(),
1854 },
1855 created_at: "2024-01-01T00:00:00Z".to_string(),
1856 updated_at: "2024-01-02T00:00:00Z".to_string(),
1857 };
1858
1859 let mr = map_pull_request(&pr);
1860 assert_eq!(mr.key, "pr#10");
1861 assert_eq!(mr.title, "Add feature");
1862 assert_eq!(mr.description, Some("Description".to_string()));
1863 assert_eq!(mr.state, "open");
1864 assert_eq!(mr.source, "github");
1865 assert_eq!(mr.source_branch, "feature-branch");
1866 assert_eq!(mr.target_branch, "main");
1867 assert!(mr.author.is_some());
1868 assert_eq!(mr.assignees.len(), 1);
1869 assert_eq!(mr.assignees[0].username, "assignee");
1870 assert_eq!(mr.reviewers.len(), 1);
1871 assert_eq!(mr.reviewers[0].username, "reviewer");
1872 assert_eq!(mr.labels, vec!["enhancement"]);
1873 assert!(!mr.draft);
1874 }
1875
1876 #[test]
1877 fn test_map_pull_request_merged_at() {
1878 let pr = GitHubPullRequest {
1879 id: 1,
1880 number: 10,
1881 title: "Merged PR".to_string(),
1882 body: None,
1883 state: "closed".to_string(),
1884 html_url: "https://github.com/test/repo/pull/10".to_string(),
1885 draft: false,
1886 merged: false,
1887 merged_at: Some("2024-01-03T00:00:00Z".to_string()),
1888 user: None,
1889 assignees: vec![],
1890 requested_reviewers: vec![],
1891 labels: vec![],
1892 head: GitHubBranchRef {
1893 ref_name: "feature".to_string(),
1894 sha: "abc123".to_string(),
1895 },
1896 base: GitHubBranchRef {
1897 ref_name: "main".to_string(),
1898 sha: "def456".to_string(),
1899 },
1900 created_at: "2024-01-01T00:00:00Z".to_string(),
1901 updated_at: "2024-01-02T00:00:00Z".to_string(),
1902 };
1903
1904 let mr = map_pull_request(&pr);
1905 assert_eq!(mr.state, "merged");
1906 }
1907
1908 #[test]
1909 fn test_map_issue() {
1910 let gh_issue = GitHubIssue {
1911 id: 1,
1912 number: 42,
1913 title: "Test Issue".to_string(),
1914 body: Some("Issue body".to_string()),
1915 state: "open".to_string(),
1916 html_url: "https://github.com/test/repo/issues/42".to_string(),
1917 user: Some(GitHubUser {
1918 id: 1,
1919 login: "author".to_string(),
1920 name: None,
1921 email: None,
1922 avatar_url: None,
1923 }),
1924 assignees: vec![],
1925 labels: vec![GitHubLabel {
1926 id: 1,
1927 name: "bug".to_string(),
1928 color: None,
1929 description: None,
1930 }],
1931 created_at: "2024-01-01T00:00:00Z".to_string(),
1932 updated_at: "2024-01-02T00:00:00Z".to_string(),
1933 closed_at: None,
1934 pull_request: None,
1935 };
1936
1937 let issue = map_issue(&gh_issue);
1938 assert_eq!(issue.key, "gh#42");
1939 assert_eq!(issue.title, "Test Issue");
1940 assert_eq!(issue.state, "open");
1941 assert_eq!(issue.source, "github");
1942 assert_eq!(issue.labels, vec!["bug"]);
1943 }
1944
1945 #[test]
1946 fn test_map_issue_with_assignees() {
1947 let gh_issue = GitHubIssue {
1948 id: 1,
1949 number: 1,
1950 title: "Issue".to_string(),
1951 body: None,
1952 state: "open".to_string(),
1953 html_url: "https://github.com/test/repo/issues/1".to_string(),
1954 user: None,
1955 assignees: vec![
1956 GitHubUser {
1957 id: 1,
1958 login: "user1".to_string(),
1959 name: None,
1960 email: None,
1961 avatar_url: None,
1962 },
1963 GitHubUser {
1964 id: 2,
1965 login: "user2".to_string(),
1966 name: None,
1967 email: None,
1968 avatar_url: None,
1969 },
1970 ],
1971 labels: vec![],
1972 created_at: "2024-01-01T00:00:00Z".to_string(),
1973 updated_at: "2024-01-02T00:00:00Z".to_string(),
1974 closed_at: None,
1975 pull_request: None,
1976 };
1977
1978 let issue = map_issue(&gh_issue);
1979 assert_eq!(issue.assignees.len(), 2);
1980 assert_eq!(issue.assignees[0].username, "user1");
1981 assert_eq!(issue.assignees[1].username, "user2");
1982 }
1983
1984 #[test]
1985 fn test_map_pull_request_states() {
1986 let base_pr = || GitHubPullRequest {
1987 id: 1,
1988 number: 10,
1989 title: "Test PR".to_string(),
1990 body: None,
1991 state: "open".to_string(),
1992 html_url: "https://github.com/test/repo/pull/10".to_string(),
1993 draft: false,
1994 merged: false,
1995 merged_at: None,
1996 user: None,
1997 assignees: vec![],
1998 requested_reviewers: vec![],
1999 labels: vec![],
2000 head: GitHubBranchRef {
2001 ref_name: "feature".to_string(),
2002 sha: "abc123".to_string(),
2003 },
2004 base: GitHubBranchRef {
2005 ref_name: "main".to_string(),
2006 sha: "def456".to_string(),
2007 },
2008 created_at: "2024-01-01T00:00:00Z".to_string(),
2009 updated_at: "2024-01-02T00:00:00Z".to_string(),
2010 };
2011
2012 let pr = map_pull_request(&base_pr());
2014 assert_eq!(pr.state, "open");
2015
2016 let mut draft_pr = base_pr();
2018 draft_pr.draft = true;
2019 let pr = map_pull_request(&draft_pr);
2020 assert_eq!(pr.state, "draft");
2021
2022 let mut merged_pr = base_pr();
2024 merged_pr.merged = true;
2025 let pr = map_pull_request(&merged_pr);
2026 assert_eq!(pr.state, "merged");
2027
2028 let mut closed_pr = base_pr();
2030 closed_pr.state = "closed".to_string();
2031 let pr = map_pull_request(&closed_pr);
2032 assert_eq!(pr.state, "closed");
2033 }
2034
2035 fn token(s: &str) -> SecretString {
2036 SecretString::from(s.to_string())
2037 }
2038
2039 #[test]
2040 fn test_repo_url() {
2041 let client =
2042 GitHubClient::with_base_url("https://api.github.com", "owner", "repo", token("token"));
2043 assert_eq!(
2044 client.repo_url("/issues"),
2045 "https://api.github.com/repos/owner/repo/issues"
2046 );
2047 assert_eq!(
2048 client.repo_url("/pulls/1"),
2049 "https://api.github.com/repos/owner/repo/pulls/1"
2050 );
2051 }
2052
2053 #[test]
2054 fn test_repo_url_strips_trailing_slash() {
2055 let client =
2056 GitHubClient::with_base_url("https://api.github.com/", "owner", "repo", token("token"));
2057 assert_eq!(
2058 client.repo_url("/issues"),
2059 "https://api.github.com/repos/owner/repo/issues"
2060 );
2061 }
2062
2063 #[test]
2064 fn test_provider_name() {
2065 let client = GitHubClient::new("owner", "repo", token("token"));
2066 assert_eq!(IssueProvider::provider_name(&client), "github");
2067 assert_eq!(MergeRequestProvider::provider_name(&client), "github");
2068 }
2069
2070 mod integration {
2075 use super::*;
2076 use httpmock::prelude::*;
2077
2078 fn create_test_client(server: &MockServer) -> GitHubClient {
2079 GitHubClient::with_base_url(server.base_url(), "owner", "repo", token("test-token"))
2080 }
2081
2082 fn sample_issue_json() -> serde_json::Value {
2083 serde_json::json!({
2084 "id": 1,
2085 "number": 42,
2086 "title": "Test Issue",
2087 "body": "Issue body",
2088 "state": "open",
2089 "html_url": "https://github.com/owner/repo/issues/42",
2090 "user": {"id": 1, "login": "author"},
2091 "assignees": [],
2092 "labels": [{"id": 1, "name": "bug"}],
2093 "created_at": "2024-01-01T00:00:00Z",
2094 "updated_at": "2024-01-02T00:00:00Z"
2095 })
2096 }
2097
2098 fn sample_pr_json() -> serde_json::Value {
2099 serde_json::json!({
2100 "id": 1,
2101 "number": 10,
2102 "title": "Test PR",
2103 "body": "PR body",
2104 "state": "open",
2105 "html_url": "https://github.com/owner/repo/pull/10",
2106 "draft": false,
2107 "merged": false,
2108 "user": {"id": 1, "login": "author"},
2109 "assignees": [],
2110 "requested_reviewers": [],
2111 "labels": [],
2112 "head": {"ref": "feature", "sha": "abc123"},
2113 "base": {"ref": "main", "sha": "def456"},
2114 "created_at": "2024-01-01T00:00:00Z",
2115 "updated_at": "2024-01-02T00:00:00Z"
2116 })
2117 }
2118
2119 #[tokio::test]
2120 async fn test_get_issues() {
2121 let server = MockServer::start();
2122
2123 server.mock(|when, then| {
2124 when.method(GET)
2125 .path("/repos/owner/repo/issues")
2126 .header("Authorization", "Bearer test-token");
2127 then.status(200)
2128 .json_body(serde_json::json!([sample_issue_json()]));
2129 });
2130
2131 let client = create_test_client(&server);
2132 let issues = client
2133 .get_issues(IssueFilter {
2134 state: Some("open".to_string()),
2135 ..Default::default()
2136 })
2137 .await
2138 .unwrap()
2139 .items;
2140
2141 assert_eq!(issues.len(), 1);
2142 assert_eq!(issues[0].key, "gh#42");
2143 assert_eq!(issues[0].title, "Test Issue");
2144 }
2145
2146 #[tokio::test]
2147 async fn test_get_issues_filters_pull_requests() {
2148 let server = MockServer::start();
2149
2150 let mut pr_as_issue = sample_issue_json();
2151 pr_as_issue["pull_request"] = serde_json::json!({"url": "..."});
2152 pr_as_issue["number"] = serde_json::json!(99);
2153
2154 server.mock(|when, then| {
2155 when.method(GET).path("/repos/owner/repo/issues");
2156 then.status(200)
2157 .json_body(serde_json::json!([sample_issue_json(), pr_as_issue]));
2158 });
2159
2160 let client = create_test_client(&server);
2161 let issues = client
2162 .get_issues(IssueFilter::default())
2163 .await
2164 .unwrap()
2165 .items;
2166
2167 assert_eq!(issues.len(), 1);
2169 assert_eq!(issues[0].key, "gh#42");
2170 }
2171
2172 #[tokio::test]
2173 async fn test_get_issues_with_all_filters() {
2174 let server = MockServer::start();
2175
2176 server.mock(|when, then| {
2177 when.method(GET)
2178 .path("/repos/owner/repo/issues")
2179 .query_param("state", "closed")
2180 .query_param("labels", "bug,feature")
2181 .query_param("assignee", "user1")
2182 .query_param("per_page", "10")
2183 .query_param("page", "2")
2184 .query_param("sort", "created")
2185 .query_param("direction", "asc");
2186 then.status(200).json_body(serde_json::json!([]));
2187 });
2188
2189 let client = create_test_client(&server);
2190 let issues = client
2191 .get_issues(IssueFilter {
2192 state: Some("closed".to_string()),
2193 labels: Some(vec!["bug".to_string(), "feature".to_string()]),
2194 assignee: Some("user1".to_string()),
2195 limit: Some(10),
2196 offset: Some(10),
2197 sort_by: Some("created_at".to_string()),
2198 sort_order: Some("asc".to_string()),
2199 ..Default::default()
2200 })
2201 .await
2202 .unwrap()
2203 .items;
2204
2205 assert!(issues.is_empty());
2206 }
2207
2208 #[tokio::test]
2209 async fn test_get_issue() {
2210 let server = MockServer::start();
2211
2212 server.mock(|when, then| {
2213 when.method(GET).path("/repos/owner/repo/issues/42");
2214 then.status(200).json_body(sample_issue_json());
2215 });
2216
2217 let client = create_test_client(&server);
2218 let issue = client.get_issue("gh#42").await.unwrap();
2219
2220 assert_eq!(issue.key, "gh#42");
2221 assert_eq!(issue.title, "Test Issue");
2222 }
2223
2224 #[tokio::test]
2225 async fn test_get_issue_rejects_pr() {
2226 let server = MockServer::start();
2227
2228 let mut issue_json = sample_issue_json();
2229 issue_json["pull_request"] = serde_json::json!({"url": "..."});
2230
2231 server.mock(|when, then| {
2232 when.method(GET).path("/repos/owner/repo/issues/42");
2233 then.status(200).json_body(issue_json);
2234 });
2235
2236 let client = create_test_client(&server);
2237 let result = client.get_issue("gh#42").await;
2238 assert!(result.is_err());
2239 }
2240
2241 #[tokio::test]
2242 async fn test_create_issue() {
2243 let server = MockServer::start();
2244
2245 server.mock(|when, then| {
2246 when.method(POST)
2247 .path("/repos/owner/repo/issues")
2248 .body_includes("\"title\":\"New Issue\"");
2249 then.status(201).json_body(sample_issue_json());
2250 });
2251
2252 let client = create_test_client(&server);
2253 let issue = client
2254 .create_issue(CreateIssueInput {
2255 title: "New Issue".to_string(),
2256 description: Some("Body".to_string()),
2257 labels: vec!["bug".to_string()],
2258 ..Default::default()
2259 })
2260 .await
2261 .unwrap();
2262
2263 assert_eq!(issue.key, "gh#42");
2264 }
2265
2266 #[tokio::test]
2267 async fn test_update_issue() {
2268 let server = MockServer::start();
2269
2270 server.mock(|when, then| {
2271 when.method(PATCH)
2272 .path("/repos/owner/repo/issues/42")
2273 .body_includes("\"state\":\"closed\"");
2274 then.status(200).json_body(sample_issue_json());
2275 });
2276
2277 let client = create_test_client(&server);
2278 let issue = client
2279 .update_issue(
2280 "gh#42",
2281 UpdateIssueInput {
2282 state: Some("closed".to_string()),
2283 ..Default::default()
2284 },
2285 )
2286 .await
2287 .unwrap();
2288
2289 assert_eq!(issue.key, "gh#42");
2290 }
2291
2292 #[tokio::test]
2293 async fn test_update_issue_state_mapping() {
2294 let server = MockServer::start();
2295
2296 server.mock(|when, then| {
2297 when.method(PATCH)
2298 .path("/repos/owner/repo/issues/42")
2299 .body_includes("\"state\":\"open\"");
2300 then.status(200).json_body(sample_issue_json());
2301 });
2302
2303 let client = create_test_client(&server);
2304 let result = client
2305 .update_issue(
2306 "gh#42",
2307 UpdateIssueInput {
2308 state: Some("opened".to_string()),
2309 ..Default::default()
2310 },
2311 )
2312 .await;
2313
2314 assert!(result.is_ok());
2315 }
2316
2317 #[tokio::test]
2318 async fn test_get_comments() {
2319 let server = MockServer::start();
2320
2321 server.mock(|when, then| {
2322 when.method(GET)
2323 .path("/repos/owner/repo/issues/42/comments");
2324 then.status(200).json_body(serde_json::json!([{
2325 "id": 1,
2326 "body": "Comment text",
2327 "user": {"id": 1, "login": "commenter"},
2328 "created_at": "2024-01-15T10:00:00Z"
2329 }]));
2330 });
2331
2332 let client = create_test_client(&server);
2333 let comments = client.get_comments("gh#42").await.unwrap().items;
2334
2335 assert_eq!(comments.len(), 1);
2336 assert_eq!(comments[0].body, "Comment text");
2337 }
2338
2339 #[tokio::test]
2340 async fn test_add_comment() {
2341 let server = MockServer::start();
2342
2343 server.mock(|when, then| {
2344 when.method(POST)
2345 .path("/repos/owner/repo/issues/42/comments")
2346 .body_includes("\"body\":\"My comment\"");
2347 then.status(201).json_body(serde_json::json!({
2348 "id": 1,
2349 "body": "My comment",
2350 "user": {"id": 1, "login": "me"},
2351 "created_at": "2024-01-15T10:00:00Z"
2352 }));
2353 });
2354
2355 let client = create_test_client(&server);
2356 let comment = IssueProvider::add_comment(&client, "gh#42", "My comment")
2357 .await
2358 .unwrap();
2359
2360 assert_eq!(comment.body, "My comment");
2361 }
2362
2363 #[tokio::test]
2364 async fn test_get_pull_request() {
2365 let server = MockServer::start();
2366
2367 server.mock(|when, then| {
2368 when.method(GET).path("/repos/owner/repo/pulls/10");
2369 then.status(200).json_body(sample_pr_json());
2370 });
2371
2372 let client = create_test_client(&server);
2373 let mr = client.get_merge_request("pr#10").await.unwrap();
2374
2375 assert_eq!(mr.key, "pr#10");
2376 assert_eq!(mr.title, "Test PR");
2377 assert_eq!(mr.source_branch, "feature");
2378 assert_eq!(mr.target_branch, "main");
2379 }
2380
2381 #[tokio::test]
2382 async fn test_get_pull_requests() {
2383 let server = MockServer::start();
2384
2385 server.mock(|when, then| {
2386 when.method(GET).path("/repos/owner/repo/pulls");
2387 then.status(200)
2388 .json_body(serde_json::json!([sample_pr_json()]));
2389 });
2390
2391 let client = create_test_client(&server);
2392 let mrs = client
2393 .get_merge_requests(MrFilter::default())
2394 .await
2395 .unwrap()
2396 .items;
2397
2398 assert_eq!(mrs.len(), 1);
2399 assert_eq!(mrs[0].key, "pr#10");
2400 }
2401
2402 #[tokio::test]
2403 async fn test_get_pull_requests_with_filters() {
2404 let server = MockServer::start();
2405
2406 server.mock(|when, then| {
2407 when.method(GET)
2408 .path("/repos/owner/repo/pulls")
2409 .query_param("state", "closed")
2410 .query_param("head", "feature")
2411 .query_param("base", "main")
2412 .query_param("per_page", "5");
2413 then.status(200).json_body(serde_json::json!([]));
2414 });
2415
2416 let client = create_test_client(&server);
2417 let mrs = client
2418 .get_merge_requests(MrFilter {
2419 state: Some("closed".to_string()),
2420 source_branch: Some("feature".to_string()),
2421 target_branch: Some("main".to_string()),
2422 limit: Some(5),
2423 ..Default::default()
2424 })
2425 .await
2426 .unwrap()
2427 .items;
2428
2429 assert!(mrs.is_empty());
2430 }
2431
2432 #[tokio::test]
2433 async fn test_get_pull_requests_merged_filter() {
2434 let server = MockServer::start();
2435
2436 let mut merged_pr = sample_pr_json();
2437 merged_pr["merged"] = serde_json::json!(true);
2438 merged_pr["state"] = serde_json::json!("closed");
2439
2440 let open_pr = sample_pr_json();
2441
2442 server.mock(|when, then| {
2443 when.method(GET)
2444 .path("/repos/owner/repo/pulls")
2445 .query_param("state", "closed");
2446 then.status(200)
2447 .json_body(serde_json::json!([merged_pr, open_pr]));
2448 });
2449
2450 let client = create_test_client(&server);
2451 let mrs = client
2452 .get_merge_requests(MrFilter {
2453 state: Some("merged".to_string()),
2454 ..Default::default()
2455 })
2456 .await
2457 .unwrap()
2458 .items;
2459
2460 assert_eq!(mrs.len(), 1);
2462 assert_eq!(mrs[0].state, "merged");
2463 }
2464
2465 #[tokio::test]
2466 async fn test_get_discussions() {
2467 let server = MockServer::start();
2468
2469 server.mock(|when, then| {
2471 when.method(GET).path("/repos/owner/repo/pulls/10/reviews");
2472 then.status(200).json_body(serde_json::json!([{
2473 "id": 1,
2474 "user": {"id": 1, "login": "reviewer"},
2475 "body": "LGTM",
2476 "state": "APPROVED",
2477 "submitted_at": "2024-01-15T10:00:00Z"
2478 }]));
2479 });
2480
2481 server.mock(|when, then| {
2483 when.method(GET).path("/repos/owner/repo/pulls/10/comments");
2484 then.status(200).json_body(serde_json::json!([{
2485 "id": 100,
2486 "body": "Fix this line",
2487 "user": {"id": 2, "login": "reviewer2"},
2488 "created_at": "2024-01-15T11:00:00Z",
2489 "path": "src/main.rs",
2490 "line": 42,
2491 "side": "RIGHT"
2492 }]));
2493 });
2494
2495 server.mock(|when, then| {
2497 when.method(GET)
2498 .path("/repos/owner/repo/issues/10/comments");
2499 then.status(200).json_body(serde_json::json!([{
2500 "id": 200,
2501 "body": "General comment",
2502 "user": {"id": 3, "login": "user3"},
2503 "created_at": "2024-01-15T12:00:00Z"
2504 }]));
2505 });
2506
2507 let client = create_test_client(&server);
2508 let discussions = client.get_discussions("pr#10").await.unwrap().items;
2509
2510 assert_eq!(discussions.len(), 3);
2512 }
2513
2514 #[tokio::test]
2515 async fn test_get_diffs() {
2516 let server = MockServer::start();
2517
2518 server.mock(|when, then| {
2519 when.method(GET).path("/repos/owner/repo/pulls/10/files");
2520 then.status(200).json_body(serde_json::json!([{
2521 "sha": "abc123",
2522 "filename": "src/main.rs",
2523 "status": "modified",
2524 "additions": 10,
2525 "deletions": 3,
2526 "changes": 13,
2527 "patch": "@@ +new code"
2528 }]));
2529 });
2530
2531 let client = create_test_client(&server);
2532 let diffs = client.get_diffs("pr#10").await.unwrap().items;
2533
2534 assert_eq!(diffs.len(), 1);
2535 assert_eq!(diffs[0].file_path, "src/main.rs");
2536 assert_eq!(diffs[0].additions, Some(10));
2537 }
2538
2539 #[tokio::test]
2540 async fn test_add_mr_comment_general() {
2541 let server = MockServer::start();
2542
2543 server.mock(|when, then| {
2545 when.method(GET).path("/repos/owner/repo/pulls/10");
2546 then.status(200).json_body(sample_pr_json());
2547 });
2548
2549 server.mock(|when, then| {
2551 when.method(POST)
2552 .path("/repos/owner/repo/issues/10/comments");
2553 then.status(201).json_body(serde_json::json!({
2554 "id": 1,
2555 "body": "General comment",
2556 "user": {"id": 1, "login": "me"},
2557 "created_at": "2024-01-15T10:00:00Z"
2558 }));
2559 });
2560
2561 let client = create_test_client(&server);
2562 let comment = MergeRequestProvider::add_comment(
2563 &client,
2564 "pr#10",
2565 CreateCommentInput {
2566 body: "General comment".to_string(),
2567 position: None,
2568 discussion_id: None,
2569 },
2570 )
2571 .await
2572 .unwrap();
2573
2574 assert_eq!(comment.body, "General comment");
2575 }
2576
2577 #[tokio::test]
2578 async fn test_add_mr_comment_inline() {
2579 let server = MockServer::start();
2580
2581 server.mock(|when, then| {
2583 when.method(GET).path("/repos/owner/repo/pulls/10");
2584 then.status(200).json_body(sample_pr_json());
2585 });
2586
2587 server.mock(|when, then| {
2589 when.method(POST)
2590 .path("/repos/owner/repo/pulls/10/comments")
2591 .body_includes("\"path\":\"src/main.rs\"")
2592 .body_includes("\"line\":42");
2593 then.status(201).json_body(serde_json::json!({
2594 "id": 1,
2595 "body": "Inline comment",
2596 "user": {"id": 1, "login": "me"},
2597 "created_at": "2024-01-15T10:00:00Z",
2598 "path": "src/main.rs",
2599 "line": 42,
2600 "side": "RIGHT"
2601 }));
2602 });
2603
2604 let client = create_test_client(&server);
2605 let comment = MergeRequestProvider::add_comment(
2606 &client,
2607 "pr#10",
2608 CreateCommentInput {
2609 body: "Inline comment".to_string(),
2610 position: Some(CodePosition {
2611 file_path: "src/main.rs".to_string(),
2612 line: 42,
2613 line_type: "new".to_string(),
2614 commit_sha: Some("abc123".to_string()),
2615 }),
2616 discussion_id: None,
2617 },
2618 )
2619 .await
2620 .unwrap();
2621
2622 assert_eq!(comment.body, "Inline comment");
2623 }
2624
2625 #[tokio::test]
2626 async fn test_handle_response_401() {
2627 let server = MockServer::start();
2628
2629 server.mock(|when, then| {
2630 when.method(GET).path("/repos/owner/repo/issues");
2631 then.status(401).body("Bad credentials");
2632 });
2633
2634 let client = create_test_client(&server);
2635 let result = client.get_issues(IssueFilter::default()).await;
2636
2637 assert!(result.is_err());
2638 let err = result.unwrap_err();
2639 assert!(matches!(err, Error::Unauthorized(_)));
2640 }
2641
2642 #[tokio::test]
2643 async fn test_handle_response_404() {
2644 let server = MockServer::start();
2645
2646 server.mock(|when, then| {
2647 when.method(GET).path("/repos/owner/repo/issues/999");
2648 then.status(404).body("Not Found");
2649 });
2650
2651 let client = create_test_client(&server);
2652 let result = client.get_issue("gh#999").await;
2653
2654 assert!(result.is_err());
2655 let err = result.unwrap_err();
2656 assert!(matches!(err, Error::NotFound(_)));
2657 }
2658
2659 #[tokio::test]
2660 async fn test_handle_response_500() {
2661 let server = MockServer::start();
2662
2663 server.mock(|when, then| {
2664 when.method(GET).path("/repos/owner/repo/issues");
2665 then.status(500).body("Internal Server Error");
2666 });
2667
2668 let client = create_test_client(&server);
2669 let result = client.get_issues(IssueFilter::default()).await;
2670
2671 assert!(result.is_err());
2672 let err = result.unwrap_err();
2673 assert!(matches!(err, Error::ServerError { .. }));
2674 }
2675
2676 #[tokio::test]
2677 async fn test_get_current_user() {
2678 let server = MockServer::start();
2679
2680 server.mock(|when, then| {
2681 when.method(GET).path("/user");
2682 then.status(200).json_body(serde_json::json!({
2683 "id": 1,
2684 "login": "testuser",
2685 "name": "Test User",
2686 "email": "test@example.com"
2687 }));
2688 });
2689
2690 let client = create_test_client(&server);
2691 let user = client.get_current_user().await.unwrap();
2692
2693 assert_eq!(user.username, "testuser");
2694 assert_eq!(user.name, Some("Test User".to_string()));
2695 }
2696
2697 fn sample_workflow_run_json() -> serde_json::Value {
2702 serde_json::json!({
2703 "id": 100,
2704 "name": "CI",
2705 "status": "completed",
2706 "conclusion": "failure",
2707 "head_branch": "feat/test",
2708 "head_sha": "abc123def456",
2709 "html_url": "https://github.com/owner/repo/actions/runs/100",
2710 "run_started_at": "2024-01-01T00:00:00Z",
2711 "updated_at": "2024-01-01T00:01:00Z"
2712 })
2713 }
2714
2715 fn sample_jobs_json() -> serde_json::Value {
2716 serde_json::json!({
2717 "jobs": [
2718 {
2719 "id": 201,
2720 "name": "Build",
2721 "status": "completed",
2722 "conclusion": "success",
2723 "html_url": "https://github.com/owner/repo/actions/runs/100/job/201",
2724 "started_at": "2024-01-01T00:00:00Z",
2725 "completed_at": "2024-01-01T00:00:30Z"
2726 },
2727 {
2728 "id": 202,
2729 "name": "Test",
2730 "status": "completed",
2731 "conclusion": "failure",
2732 "html_url": "https://github.com/owner/repo/actions/runs/100/job/202",
2733 "started_at": "2024-01-01T00:00:00Z",
2734 "completed_at": "2024-01-01T00:00:45Z"
2735 }
2736 ]
2737 })
2738 }
2739
2740 #[tokio::test]
2741 async fn test_get_pipeline_by_branch() {
2742 let server = MockServer::start();
2743
2744 server.mock(|when, then| {
2746 when.method(GET)
2747 .path("/repos/owner/repo/actions/runs")
2748 .query_param("branch", "main")
2749 .query_param("status", "completed");
2750 then.status(200).json_body(serde_json::json!({
2751 "workflow_runs": [sample_workflow_run_json()]
2752 }));
2753 });
2754
2755 server.mock(|when, then| {
2757 when.method(GET)
2758 .path("/repos/owner/repo/actions/runs")
2759 .query_param("status", "in_progress");
2760 then.status(200)
2761 .json_body(serde_json::json!({ "workflow_runs": [] }));
2762 });
2763
2764 server.mock(|when, then| {
2766 when.method(GET)
2767 .path("/repos/owner/repo/actions/runs/100/jobs");
2768 then.status(200).json_body(sample_jobs_json());
2769 });
2770
2771 server.mock(|when, then| {
2773 when.method(GET)
2774 .path("/repos/owner/repo/actions/jobs/202/logs");
2775 then.status(200)
2776 .body("Step 1\nerror: test failed\nStep 3\n");
2777 });
2778
2779 let client = create_test_client(&server);
2780 let input = devboy_core::GetPipelineInput {
2781 branch: Some("main".into()),
2782 mr_key: None,
2783 include_failed_logs: true,
2784 };
2785
2786 let result = client.get_pipeline(input).await.unwrap();
2787
2788 assert_eq!(result.id, "100");
2789 assert_eq!(result.status, PipelineStatus::Failed);
2790 assert_eq!(result.reference, "main");
2791 assert_eq!(result.summary.total, 2);
2792 assert_eq!(result.summary.success, 1);
2793 assert_eq!(result.summary.failed, 1);
2794 assert_eq!(result.stages.len(), 1);
2795 assert_eq!(result.stages[0].name, "CI");
2796 assert_eq!(result.stages[0].jobs.len(), 2);
2797 assert_eq!(result.failed_jobs.len(), 1);
2798 assert_eq!(result.failed_jobs[0].name, "Test");
2799 assert!(result.failed_jobs[0].error_snippet.is_some());
2800 }
2801
2802 #[tokio::test]
2803 async fn test_get_pipeline_by_mr_key() {
2804 let server = MockServer::start();
2805
2806 server.mock(|when, then| {
2808 when.method(GET).path("/repos/owner/repo/pulls/42");
2809 then.status(200).json_body(sample_pr_json());
2810 });
2811
2812 server.mock(|when, then| {
2814 when.method(GET)
2815 .path("/repos/owner/repo/actions/runs")
2816 .query_param("status", "completed");
2817 then.status(200).json_body(serde_json::json!({
2818 "workflow_runs": [sample_workflow_run_json()]
2819 }));
2820 });
2821
2822 server.mock(|when, then| {
2824 when.method(GET)
2825 .path("/repos/owner/repo/actions/runs")
2826 .query_param("status", "in_progress");
2827 then.status(200)
2828 .json_body(serde_json::json!({ "workflow_runs": [] }));
2829 });
2830
2831 server.mock(|when, then| {
2833 when.method(GET)
2834 .path("/repos/owner/repo/actions/runs/100/jobs");
2835 then.status(200).json_body(sample_jobs_json());
2836 });
2837
2838 let client = create_test_client(&server);
2839 let input = devboy_core::GetPipelineInput {
2840 branch: None,
2841 mr_key: Some("pr#42".into()),
2842 include_failed_logs: false,
2843 };
2844
2845 let result = client.get_pipeline(input).await.unwrap();
2846 assert_eq!(result.id, "100");
2847 }
2848
2849 #[tokio::test]
2850 async fn test_get_job_logs_smart_mode() {
2851 let server = MockServer::start();
2852
2853 server.mock(|when, then| {
2854 when.method(GET)
2855 .path("/repos/owner/repo/actions/jobs/202/logs");
2856 then.status(200)
2857 .body("Building...\nCompiling...\nerror: cannot find module 'foo'\nDone.\n");
2858 });
2859
2860 let client = create_test_client(&server);
2861 let options = devboy_core::JobLogOptions {
2862 mode: devboy_core::JobLogMode::Smart,
2863 };
2864
2865 let result = client.get_job_logs("202", options).await.unwrap();
2866 assert_eq!(result.job_id, "202");
2867 assert_eq!(result.mode, "smart");
2868 assert!(result.content.contains("cannot find module"));
2869 }
2870
2871 #[tokio::test]
2872 async fn test_get_job_logs_search_mode() {
2873 let server = MockServer::start();
2874
2875 server.mock(|when, then| {
2876 when.method(GET)
2877 .path("/repos/owner/repo/actions/jobs/202/logs");
2878 then.status(200)
2879 .body("Line 1\nLine 2\nERROR: something broke\nLine 4\nLine 5\n");
2880 });
2881
2882 let client = create_test_client(&server);
2883 let options = devboy_core::JobLogOptions {
2884 mode: devboy_core::JobLogMode::Search {
2885 pattern: "ERROR".into(),
2886 context: 1,
2887 max_matches: 5,
2888 },
2889 };
2890
2891 let result = client.get_job_logs("202", options).await.unwrap();
2892 assert_eq!(result.mode, "search");
2893 assert!(result.content.contains("ERROR: something broke"));
2894 assert!(result.content.contains("Match at line 3"));
2895 }
2896
2897 #[tokio::test]
2898 async fn test_get_job_logs_paginated_mode() {
2899 let server = MockServer::start();
2900
2901 server.mock(|when, then| {
2902 when.method(GET)
2903 .path("/repos/owner/repo/actions/jobs/202/logs");
2904 then.status(200)
2905 .body("Line 1\nLine 2\nLine 3\nLine 4\nLine 5\n");
2906 });
2907
2908 let client = create_test_client(&server);
2909 let options = devboy_core::JobLogOptions {
2910 mode: devboy_core::JobLogMode::Paginated {
2911 offset: 1,
2912 limit: 2,
2913 },
2914 };
2915
2916 let result = client.get_job_logs("202", options).await.unwrap();
2917 assert_eq!(result.mode, "paginated");
2918 assert!(result.content.contains("Line 2"));
2919 assert!(result.content.contains("Line 3"));
2920 assert!(!result.content.contains("Line 1"));
2921 assert!(!result.content.contains("Line 4"));
2922 }
2923
2924 #[tokio::test]
2929 async fn test_get_issue_attachments_parses_body_and_comments() {
2930 let server = MockServer::start();
2931
2932 server.mock(|when, then| {
2933 when.method(GET).path("/repos/owner/repo/issues/42");
2934 then.status(200).json_body(serde_json::json!({
2935 "id": 1,
2936 "number": 42,
2937 "title": "bug",
2938 "body": "Error: ",
2939 "state": "open",
2940 "html_url": "https://github.com/owner/repo/issues/42",
2941 "created_at": "2024-01-01T00:00:00Z",
2942 "updated_at": "2024-01-02T00:00:00Z"
2943 }));
2944 });
2945 server.mock(|when, then| {
2946 when.method(GET)
2947 .path("/repos/owner/repo/issues/42/comments");
2948 then.status(200).json_body(serde_json::json!([
2949 {
2950 "id": 10,
2951 "body": "Log [here](https://user-images.githubusercontent.com/1/log.txt)",
2952 "html_url": "https://github.com/owner/repo/issues/42#issuecomment-10",
2953 "created_at": "2024-01-03T00:00:00Z",
2954 "updated_at": "2024-01-03T00:00:00Z"
2955 }
2956 ]));
2957 });
2958
2959 let client = create_test_client(&server);
2960 let attachments = client.get_issue_attachments("gh#42").await.unwrap();
2961 assert_eq!(attachments.len(), 2);
2962 assert_eq!(attachments[0].filename, "screen");
2963 assert_eq!(attachments[1].filename, "here");
2964 }
2965
2966 #[tokio::test]
2967 async fn test_download_attachment_fetches_url() {
2968 let server = MockServer::start();
2969
2970 server.mock(|when, then| {
2971 when.method(GET).path("/cdn/file.txt");
2972 then.status(200).body("github-bytes");
2973 });
2974
2975 let client = create_test_client(&server);
2976 let url = format!("{}/cdn/file.txt", server.base_url());
2977 let bytes = client.download_attachment("gh#42", &url).await.unwrap();
2978 assert_eq!(bytes, b"github-bytes");
2979 }
2980
2981 #[tokio::test]
2982 async fn test_github_asset_capabilities() {
2983 let server = MockServer::start();
2984 let client = create_test_client(&server);
2985 let caps = client.asset_capabilities();
2986 assert!(!caps.issue.upload, "GitHub has no public upload API");
2987 assert!(caps.issue.download);
2988 assert!(caps.issue.list);
2989 assert!(!caps.issue.delete);
2990 assert!(!caps.merge_request.upload);
2991 assert!(caps.merge_request.download);
2992 }
2993 }
2994
2995 #[test]
3000 fn test_map_gh_status() {
3001 assert_eq!(
3002 map_gh_status(Some("completed"), Some("success")),
3003 PipelineStatus::Success
3004 );
3005 assert_eq!(
3006 map_gh_status(Some("completed"), Some("failure")),
3007 PipelineStatus::Failed
3008 );
3009 assert_eq!(
3010 map_gh_status(Some("in_progress"), None),
3011 PipelineStatus::Running
3012 );
3013 assert_eq!(map_gh_status(Some("queued"), None), PipelineStatus::Pending);
3014 assert_eq!(
3015 map_gh_status(Some("completed"), Some("cancelled")),
3016 PipelineStatus::Canceled
3017 );
3018 assert_eq!(map_gh_status(None, None), PipelineStatus::Unknown);
3019 }
3020
3021 #[test]
3022 fn test_strip_ansi() {
3023 assert_eq!(strip_ansi("\x1b[31merror\x1b[0m"), "error");
3024 assert_eq!(strip_ansi("no ansi here"), "no ansi here");
3025 assert_eq!(strip_ansi("\x1b[1m\x1b[32mgreen\x1b[0m"), "green");
3026 }
3027
3028 #[test]
3029 fn test_extract_errors_finds_patterns() {
3030 let log = "Step 1: build\nStep 2: test\nerror: test failed at line 42\nStep 4: done\n";
3031 let result = extract_errors(log, 10).unwrap();
3032 assert!(result.contains("error: test failed"));
3033 }
3034
3035 #[test]
3036 fn test_extract_errors_fallback_to_tail() {
3037 let log = "Line 1\nLine 2\nLine 3\n";
3038 let result = extract_errors(log, 10).unwrap();
3039 assert!(result.contains("Line 3"));
3040 }
3041
3042 #[test]
3043 fn test_extract_errors_empty_log() {
3044 assert!(extract_errors("", 10).is_none());
3045 }
3046
3047 #[test]
3048 fn test_estimate_duration() {
3049 let d = estimate_duration(Some("2024-01-01T00:00:00Z"), Some("2024-01-01T00:01:30Z"));
3050 assert_eq!(d, Some(90));
3051 }
3052
3053 #[test]
3054 fn test_estimate_duration_invalid() {
3055 assert!(estimate_duration(None, Some("2024-01-01T00:00:00Z")).is_none());
3056 assert!(estimate_duration(Some("not-a-date"), Some("2024-01-01T00:00:00Z")).is_none());
3057 }
3058}