1use anyhow::{Context, Result};
9use octocrab::Octocrab;
10use tracing::{debug, instrument};
11
12use super::{ReferenceKind, parse_github_reference};
13use crate::ai::types::{PrDetails, PrFile, PrReviewComment, ReviewEvent};
14use crate::error::{AptuError, ResourceType};
15use crate::triage::render_pr_review_comment_body;
16
17#[derive(Debug, serde::Serialize)]
19pub struct PrCreateResult {
20 pub pr_number: u64,
22 pub url: String,
24 pub branch: String,
26 pub base: String,
28 pub title: String,
30 pub draft: bool,
32 pub files_changed: u32,
34 pub additions: u64,
36 pub deletions: u64,
38}
39
40pub fn parse_pr_reference(
60 reference: &str,
61 repo_context: Option<&str>,
62) -> Result<(String, String, u64)> {
63 parse_github_reference(ReferenceKind::Pull, reference, repo_context)
64}
65
66#[instrument(skip(client), fields(owner = %owner, repo = %repo, number = number))]
85pub async fn fetch_pr_details(
86 client: &Octocrab,
87 owner: &str,
88 repo: &str,
89 number: u64,
90 review_config: &crate::config::ReviewConfig,
91) -> Result<PrDetails> {
92 debug!("Fetching PR details");
93
94 let pr = match client.pulls(owner, repo).get(number).await {
96 Ok(pr) => pr,
97 Err(e) => {
98 if let octocrab::Error::GitHub { source, .. } = &e
100 && source.status_code == 404
101 {
102 if (client.issues(owner, repo).get(number).await).is_ok() {
104 return Err(AptuError::TypeMismatch {
105 number,
106 expected: ResourceType::PullRequest,
107 actual: ResourceType::Issue,
108 }
109 .into());
110 }
111 }
113 return Err(e)
114 .with_context(|| format!("Failed to fetch PR #{number} from {owner}/{repo}"));
115 }
116 };
117
118 let files = client
120 .pulls(owner, repo)
121 .list_files(number)
122 .await
123 .with_context(|| format!("Failed to fetch files for PR #{number}"))?;
124
125 let pr_files: Vec<PrFile> = files
127 .items
128 .into_iter()
129 .map(|f| PrFile {
130 filename: f.filename,
131 status: format!("{:?}", f.status),
132 additions: f.additions,
133 deletions: f.deletions,
134 patch: f.patch,
135 full_content: None,
136 })
137 .collect();
138
139 let file_contents = fetch_file_contents(
141 client,
142 owner,
143 repo,
144 &pr_files,
145 &pr.head.sha,
146 review_config.max_full_content_files,
147 review_config.max_chars_per_file,
148 )
149 .await;
150
151 debug_assert_eq!(
153 pr_files.len(),
154 file_contents.len(),
155 "fetch_file_contents must return one entry per file"
156 );
157 let pr_files: Vec<PrFile> = pr_files
158 .into_iter()
159 .zip(file_contents)
160 .map(|(mut file, content)| {
161 file.full_content = content;
162 file
163 })
164 .collect();
165
166 let labels: Vec<String> = pr
167 .labels
168 .iter()
169 .flat_map(|labels_vec| labels_vec.iter().map(|l| l.name.clone()))
170 .collect();
171
172 let details = PrDetails {
173 owner: owner.to_string(),
174 repo: repo.to_string(),
175 number,
176 title: pr.title.unwrap_or_default(),
177 body: pr.body.unwrap_or_default(),
178 base_branch: pr.base.ref_field,
179 head_branch: pr.head.ref_field,
180 head_sha: pr.head.sha,
181 files: pr_files,
182 url: pr.html_url.map_or_else(String::new, |u| u.to_string()),
183 labels,
184 };
185
186 debug!(
187 file_count = details.files.len(),
188 "PR details fetched successfully"
189 );
190
191 Ok(details)
192}
193
194#[instrument(skip(client, files), fields(owner = %owner, repo = %repo, max_files = max_files))]
216async fn fetch_file_contents(
217 client: &Octocrab,
218 owner: &str,
219 repo: &str,
220 files: &[PrFile],
221 head_sha: &str,
222 max_files: usize,
223 max_chars_per_file: usize,
224) -> Vec<Option<String>> {
225 let mut results = Vec::with_capacity(files.len());
226 let mut fetched_count = 0usize;
227
228 for file in files {
229 if should_skip_file(&file.filename, &file.status, file.patch.as_ref()) {
230 results.push(None);
231 continue;
232 }
233
234 if fetched_count >= max_files {
236 debug!(
237 file = %file.filename,
238 fetched_count = fetched_count,
239 max_files = max_files,
240 "Fetched file count exceeds max_files cap"
241 );
242 results.push(None);
243 continue;
244 }
245
246 match client
248 .repos(owner, repo)
249 .get_content()
250 .path(&file.filename)
251 .r#ref(head_sha)
252 .send()
253 .await
254 {
255 Ok(content) => {
256 if let Some(item) = content.items.first() {
258 if let Some(decoded) = item.decoded_content() {
259 let truncated = if decoded.len() > max_chars_per_file {
260 decoded.chars().take(max_chars_per_file).collect::<String>()
261 } else {
262 decoded
263 };
264 debug!(
265 file = %file.filename,
266 content_len = truncated.len(),
267 "File content fetched and truncated"
268 );
269 results.push(Some(truncated));
270 fetched_count += 1;
271 } else {
272 tracing::warn!(
273 file = %file.filename,
274 "Failed to decode file content; skipping"
275 );
276 results.push(None);
277 }
278 } else {
279 tracing::warn!(
280 file = %file.filename,
281 "File content response was empty; skipping"
282 );
283 results.push(None);
284 }
285 }
286 Err(e) => {
287 tracing::warn!(
288 file = %file.filename,
289 err = %e,
290 "Failed to fetch file content; skipping"
291 );
292 results.push(None);
293 }
294 }
295 }
296
297 results
298}
299
300#[allow(clippy::too_many_arguments)]
324#[instrument(skip(client, comments), fields(owner = %owner, repo = %repo, number = number, event = %event))]
325pub async fn post_pr_review(
326 client: &Octocrab,
327 owner: &str,
328 repo: &str,
329 number: u64,
330 body: &str,
331 event: ReviewEvent,
332 comments: &[PrReviewComment],
333 commit_id: &str,
334) -> Result<u64> {
335 debug!("Posting PR review");
336
337 let route = format!("/repos/{owner}/{repo}/pulls/{number}/reviews");
338
339 let inline_comments: Vec<serde_json::Value> = comments
341 .iter()
342 .filter_map(|c| {
344 c.line.map(|line| {
345 serde_json::json!({
346 "path": c.file,
347 "line": line,
348 "side": "RIGHT",
352 "body": render_pr_review_comment_body(c),
353 })
354 })
355 })
356 .collect();
357
358 let mut payload = serde_json::json!({
359 "body": body,
360 "event": event.to_string(),
361 "comments": inline_comments,
362 });
363
364 if !commit_id.is_empty() {
366 payload["commit_id"] = serde_json::Value::String(commit_id.to_string());
367 }
368
369 #[derive(serde::Deserialize)]
370 struct ReviewResponse {
371 id: u64,
372 }
373
374 let response: ReviewResponse = client.post(route, Some(&payload)).await.with_context(|| {
375 format!(
376 "Failed to post review to PR #{number} in {owner}/{repo}. \
377 Check that you have write access to the repository."
378 )
379 })?;
380
381 debug!(review_id = response.id, "PR review posted successfully");
382
383 Ok(response.id)
384}
385
386#[must_use]
398pub fn labels_from_pr_metadata(title: &str, file_paths: &[String]) -> Vec<String> {
399 let mut labels = std::collections::HashSet::new();
400
401 let prefix = title
404 .split(':')
405 .next()
406 .unwrap_or("")
407 .split('(')
408 .next()
409 .unwrap_or("")
410 .trim();
411
412 let type_label = match prefix {
414 "feat" | "perf" => Some("enhancement"),
415 "fix" => Some("bug"),
416 "docs" => Some("documentation"),
417 "refactor" => Some("refactor"),
418 _ => None,
419 };
420
421 if let Some(label) = type_label {
422 labels.insert(label.to_string());
423 }
424
425 for path in file_paths {
427 let scope = if path.starts_with("crates/aptu-cli/") {
428 Some("cli")
429 } else if path.starts_with("crates/aptu-ffi/") || path.starts_with("AptuApp/") {
430 Some("ios")
431 } else if path.starts_with("docs/") {
432 Some("documentation")
433 } else {
434 None
435 };
436
437 if let Some(label) = scope {
438 labels.insert(label.to_string());
439 }
440 }
441
442 labels.into_iter().collect()
443}
444
445#[instrument(skip(client), fields(owner = %owner, repo = %repo, head = %head_branch, base = %base_branch))]
465pub async fn create_pull_request(
466 client: &Octocrab,
467 owner: &str,
468 repo: &str,
469 title: &str,
470 head_branch: &str,
471 base_branch: &str,
472 body: Option<&str>,
473) -> anyhow::Result<PrCreateResult> {
474 debug!("Creating pull request");
475
476 let pr = client
477 .pulls(owner, repo)
478 .create(title, head_branch, base_branch)
479 .body(body.unwrap_or_default())
480 .draft(false)
481 .send()
482 .await
483 .with_context(|| {
484 format!("Failed to create PR in {owner}/{repo} ({head_branch} -> {base_branch})")
485 })?;
486
487 let result = PrCreateResult {
488 pr_number: pr.number,
489 url: pr.html_url.map_or_else(String::new, |u| u.to_string()),
490 branch: pr.head.ref_field,
491 base: pr.base.ref_field,
492 title: pr.title.unwrap_or_default(),
493 draft: pr.draft.unwrap_or(false),
494 files_changed: u32::try_from(pr.changed_files.unwrap_or_default()).unwrap_or(u32::MAX),
495 additions: pr.additions.unwrap_or_default(),
496 deletions: pr.deletions.unwrap_or_default(),
497 };
498
499 debug!(
500 pr_number = result.pr_number,
501 "Pull request created successfully"
502 );
503
504 Ok(result)
505}
506
507fn should_skip_file(filename: &str, status: &str, patch: Option<&String>) -> bool {
511 if status.to_lowercase().contains("removed") {
512 debug!(file = %filename, "Skipping removed file");
513 return true;
514 }
515 if patch.is_none_or(String::is_empty) {
516 debug!(file = %filename, "Skipping file with empty patch");
517 return true;
518 }
519 false
520}
521
522#[cfg(test)]
523mod tests {
524 use super::*;
525 use crate::ai::types::CommentSeverity;
526
527 fn decode_content(encoded: &str, max_chars: usize) -> Option<String> {
528 use base64::Engine;
529 let engine = base64::engine::general_purpose::STANDARD;
530 let decoded_bytes = engine.decode(encoded).ok()?;
531 let decoded_str = String::from_utf8(decoded_bytes).ok()?;
532
533 if decoded_str.len() <= max_chars {
534 Some(decoded_str)
535 } else {
536 Some(decoded_str.chars().take(max_chars).collect::<String>())
537 }
538 }
539
540 #[test]
541 fn test_pr_create_result_fields() {
542 let result = PrCreateResult {
544 pr_number: 42,
545 url: "https://github.com/owner/repo/pull/42".to_string(),
546 branch: "feat/my-feature".to_string(),
547 base: "main".to_string(),
548 title: "feat: add feature".to_string(),
549 draft: false,
550 files_changed: 3,
551 additions: 100,
552 deletions: 10,
553 };
554
555 assert_eq!(result.pr_number, 42);
557 assert_eq!(result.url, "https://github.com/owner/repo/pull/42");
558 assert_eq!(result.branch, "feat/my-feature");
559 assert_eq!(result.base, "main");
560 assert_eq!(result.title, "feat: add feature");
561 assert!(!result.draft);
562 assert_eq!(result.files_changed, 3);
563 assert_eq!(result.additions, 100);
564 assert_eq!(result.deletions, 10);
565 }
566
567 fn build_inline_comments(comments: &[PrReviewComment]) -> Vec<serde_json::Value> {
574 comments
575 .iter()
576 .filter_map(|c| {
577 c.line.map(|line| {
578 serde_json::json!({
579 "path": c.file,
580 "line": line,
581 "side": "RIGHT",
582 "body": render_pr_review_comment_body(c),
583 })
584 })
585 })
586 .collect()
587 }
588
589 #[test]
590 fn test_post_pr_review_payload_with_comments() {
591 let comments = vec![PrReviewComment {
593 file: "src/main.rs".to_string(),
594 line: Some(42),
595 comment: "Consider using a match here.".to_string(),
596 severity: CommentSeverity::Suggestion,
597 suggested_code: None,
598 }];
599
600 let inline = build_inline_comments(&comments);
602
603 assert_eq!(inline.len(), 1);
605 assert_eq!(inline[0]["path"], "src/main.rs");
606 assert_eq!(inline[0]["line"], 42);
607 assert_eq!(inline[0]["side"], "RIGHT");
608 assert_eq!(inline[0]["body"], "Consider using a match here.");
609 }
610
611 #[test]
612 fn test_post_pr_review_skips_none_line_comments() {
613 let comments = vec![
615 PrReviewComment {
616 file: "src/lib.rs".to_string(),
617 line: None,
618 comment: "General file comment.".to_string(),
619 severity: CommentSeverity::Info,
620 suggested_code: None,
621 },
622 PrReviewComment {
623 file: "src/lib.rs".to_string(),
624 line: Some(10),
625 comment: "Inline comment.".to_string(),
626 severity: CommentSeverity::Warning,
627 suggested_code: None,
628 },
629 ];
630
631 let inline = build_inline_comments(&comments);
633
634 assert_eq!(inline.len(), 1);
636 assert_eq!(inline[0]["line"], 10);
637 }
638
639 #[test]
640 fn test_post_pr_review_empty_comments() {
641 let comments: Vec<PrReviewComment> = vec![];
643
644 let inline = build_inline_comments(&comments);
646
647 assert!(inline.is_empty());
649 let serialized = serde_json::to_string(&inline).unwrap();
650 assert_eq!(serialized, "[]");
651 }
652
653 #[test]
660 fn test_parse_pr_reference_delegates_to_shared() {
661 let (owner, repo, number) =
662 parse_pr_reference("https://github.com/block/goose/pull/123", None).unwrap();
663 assert_eq!(owner, "block");
664 assert_eq!(repo, "goose");
665 assert_eq!(number, 123);
666 }
667
668 #[test]
669 fn test_title_prefix_to_label_mapping() {
670 let cases = vec![
671 (
672 "feat: add new feature",
673 vec!["enhancement"],
674 "feat should map to enhancement",
675 ),
676 ("fix: resolve bug", vec!["bug"], "fix should map to bug"),
677 (
678 "docs: update readme",
679 vec!["documentation"],
680 "docs should map to documentation",
681 ),
682 (
683 "refactor: improve code",
684 vec!["refactor"],
685 "refactor should map to refactor",
686 ),
687 (
688 "perf: optimize",
689 vec!["enhancement"],
690 "perf should map to enhancement",
691 ),
692 (
693 "chore: update deps",
694 vec![],
695 "chore should produce no labels",
696 ),
697 ];
698
699 for (title, expected_labels, msg) in cases {
700 let labels = labels_from_pr_metadata(title, &[]);
701 for expected in &expected_labels {
702 assert!(
703 labels.contains(&expected.to_string()),
704 "{msg}: expected '{expected}' in {labels:?}",
705 );
706 }
707 if expected_labels.is_empty() {
708 assert!(labels.is_empty(), "{msg}: expected empty, got {labels:?}",);
709 }
710 }
711 }
712
713 #[test]
714 fn test_file_path_to_scope_mapping() {
715 let cases = vec![
716 (
717 "feat: cli",
718 vec!["crates/aptu-cli/src/main.rs"],
719 vec!["enhancement", "cli"],
720 "cli path should map to cli scope",
721 ),
722 (
723 "feat: ios",
724 vec!["crates/aptu-ffi/src/lib.rs"],
725 vec!["enhancement", "ios"],
726 "ffi path should map to ios scope",
727 ),
728 (
729 "feat: ios",
730 vec!["AptuApp/ContentView.swift"],
731 vec!["enhancement", "ios"],
732 "app path should map to ios scope",
733 ),
734 (
735 "feat: docs",
736 vec!["docs/GITHUB_ACTION.md"],
737 vec!["enhancement", "documentation"],
738 "docs path should map to documentation scope",
739 ),
740 (
741 "feat: workflow",
742 vec![".github/workflows/test.yml"],
743 vec!["enhancement"],
744 "workflow path should be ignored",
745 ),
746 ];
747
748 for (title, paths, expected_labels, msg) in cases {
749 let labels = labels_from_pr_metadata(
750 title,
751 &paths
752 .iter()
753 .map(std::string::ToString::to_string)
754 .collect::<Vec<_>>(),
755 );
756 for expected in expected_labels {
757 assert!(
758 labels.contains(&expected.to_string()),
759 "{msg}: expected '{expected}' in {labels:?}",
760 );
761 }
762 }
763 }
764
765 #[test]
766 fn test_combined_title_and_paths() {
767 let labels = labels_from_pr_metadata(
768 "feat: multi",
769 &[
770 "crates/aptu-cli/src/main.rs".to_string(),
771 "docs/README.md".to_string(),
772 ],
773 );
774 assert!(
775 labels.contains(&"enhancement".to_string()),
776 "should include enhancement from feat prefix"
777 );
778 assert!(
779 labels.contains(&"cli".to_string()),
780 "should include cli from path"
781 );
782 assert!(
783 labels.contains(&"documentation".to_string()),
784 "should include documentation from path"
785 );
786 }
787
788 #[test]
789 fn test_no_match_returns_empty() {
790 let cases = vec![
791 (
792 "Random title",
793 vec![],
794 "unrecognized prefix should return empty",
795 ),
796 (
797 "chore: update",
798 vec![],
799 "ignored prefix should return empty",
800 ),
801 ];
802
803 for (title, paths, msg) in cases {
804 let labels = labels_from_pr_metadata(title, &paths);
805 assert!(labels.is_empty(), "{msg}: got {labels:?}");
806 }
807 }
808
809 #[test]
810 fn test_scoped_prefix_extracts_type() {
811 let labels = labels_from_pr_metadata("feat(cli): add new feature", &[]);
812 assert!(
813 labels.contains(&"enhancement".to_string()),
814 "scoped prefix should extract type from feat(cli)"
815 );
816 }
817
818 #[test]
819 fn test_duplicate_labels_deduplicated() {
820 let labels = labels_from_pr_metadata("docs: update", &["docs/README.md".to_string()]);
821 assert_eq!(
822 labels.len(),
823 1,
824 "should have exactly one label when title and path both map to documentation"
825 );
826 assert!(
827 labels.contains(&"documentation".to_string()),
828 "should contain documentation label"
829 );
830 }
831
832 #[test]
833 fn test_should_skip_file_respects_fetched_count_cap() {
834 let removed_file = PrFile {
837 filename: "removed.rs".to_string(),
838 status: "removed".to_string(),
839 additions: 0,
840 deletions: 5,
841 patch: None,
842 full_content: None,
843 };
844 let modified_file = PrFile {
845 filename: "file_0.rs".to_string(),
846 status: "modified".to_string(),
847 additions: 1,
848 deletions: 0,
849 patch: Some("+ new code".to_string()),
850 full_content: None,
851 };
852 let no_patch_file = PrFile {
853 filename: "file_1.rs".to_string(),
854 status: "modified".to_string(),
855 additions: 1,
856 deletions: 0,
857 patch: None,
858 full_content: None,
859 };
860
861 assert!(
863 should_skip_file(
864 &removed_file.filename,
865 &removed_file.status,
866 removed_file.patch.as_ref()
867 ),
868 "removed files should be skipped"
869 );
870
871 assert!(
873 !should_skip_file(
874 &modified_file.filename,
875 &modified_file.status,
876 modified_file.patch.as_ref()
877 ),
878 "modified files with patch should not be skipped"
879 );
880
881 assert!(
883 should_skip_file(
884 &no_patch_file.filename,
885 &no_patch_file.status,
886 no_patch_file.patch.as_ref()
887 ),
888 "files without patch should be skipped"
889 );
890 }
891
892 #[test]
893 fn test_decode_content_valid_base64() {
894 use base64::Engine;
896 let engine = base64::engine::general_purpose::STANDARD;
897 let original = "Hello, World!";
898 let encoded = engine.encode(original);
899
900 let result = decode_content(&encoded, 1000);
902
903 assert_eq!(
905 result,
906 Some(original.to_string()),
907 "valid base64 should decode successfully"
908 );
909 }
910
911 #[test]
912 fn test_decode_content_invalid_base64() {
913 let invalid_base64 = "!!!invalid!!!";
915
916 let result = decode_content(invalid_base64, 1000);
918
919 assert_eq!(result, None, "invalid base64 should return None");
921 }
922
923 #[test]
924 fn test_decode_content_truncates_at_max_chars() {
925 use base64::Engine;
927 let engine = base64::engine::general_purpose::STANDARD;
928 let original = "こんにちは".repeat(10); let encoded = engine.encode(&original);
930 let max_chars = 10;
931
932 let result = decode_content(&encoded, max_chars);
934
935 assert!(result.is_some(), "decoding should succeed");
937 let decoded = result.unwrap();
938 assert_eq!(
939 decoded.chars().count(),
940 max_chars,
941 "output should be truncated to max_chars on character boundary"
942 );
943 assert!(
944 decoded.is_char_boundary(decoded.len()),
945 "output should be valid UTF-8 (truncated on char boundary)"
946 );
947 }
948}