Skip to main content

aptu_core/github/
pulls.rs

1// SPDX-License-Identifier: Apache-2.0
2
3//! Pull request fetching via Octocrab.
4//!
5//! Provides functions to parse PR references and fetch PR details
6//! including file diffs for AI review.
7
8use anyhow::{Context, Result};
9use octocrab::Octocrab;
10use tracing::{debug, instrument};
11
12use super::{ReferenceKind, parse_github_reference};
13use crate::ai::types::{PrDetails, PrFile, PrReviewComment, ReviewEvent};
14use crate::error::{AptuError, ResourceType};
15use crate::triage::render_pr_review_comment_body;
16
17/// Result from creating a pull request.
18#[derive(Debug, serde::Serialize)]
19pub struct PrCreateResult {
20    /// PR number.
21    pub pr_number: u64,
22    /// PR URL.
23    pub url: String,
24    /// Head branch.
25    pub branch: String,
26    /// Base branch.
27    pub base: String,
28    /// PR title.
29    pub title: String,
30    /// Whether the PR is a draft.
31    pub draft: bool,
32    /// Number of files changed.
33    pub files_changed: u32,
34    /// Number of additions.
35    pub additions: u64,
36    /// Number of deletions.
37    pub deletions: u64,
38}
39
40/// Parses a PR reference into (owner, repo, number).
41///
42/// Supports multiple formats:
43/// - Full URL: `https://github.com/owner/repo/pull/123`
44/// - Short form: `owner/repo#123`
45/// - Bare number: `123` (requires `repo_context`)
46///
47/// # Arguments
48///
49/// * `reference` - PR reference string
50/// * `repo_context` - Optional repository context for bare numbers (e.g., "owner/repo")
51///
52/// # Returns
53///
54/// Tuple of (owner, repo, number)
55///
56/// # Errors
57///
58/// Returns an error if the reference format is invalid or `repo_context` is missing for bare numbers.
59pub fn parse_pr_reference(
60    reference: &str,
61    repo_context: Option<&str>,
62) -> Result<(String, String, u64)> {
63    parse_github_reference(ReferenceKind::Pull, reference, repo_context)
64}
65
66/// Fetches PR details including file diffs from GitHub.
67///
68/// Uses Octocrab to fetch PR metadata and file changes.
69///
70/// # Arguments
71///
72/// * `client` - Authenticated Octocrab client
73/// * `owner` - Repository owner
74/// * `repo` - Repository name
75/// * `number` - PR number
76///
77/// # Returns
78///
79/// `PrDetails` struct with PR metadata and file diffs.
80///
81/// # Errors
82///
83/// Returns an error if the API call fails or PR is not found.
84#[instrument(skip(client), fields(owner = %owner, repo = %repo, number = number))]
85pub async fn fetch_pr_details(
86    client: &Octocrab,
87    owner: &str,
88    repo: &str,
89    number: u64,
90    review_config: &crate::config::ReviewConfig,
91) -> Result<PrDetails> {
92    debug!("Fetching PR details");
93
94    // Fetch PR metadata
95    let pr = match client.pulls(owner, repo).get(number).await {
96        Ok(pr) => pr,
97        Err(e) => {
98            // Check if this is a 404 error and if an issue exists instead
99            if let octocrab::Error::GitHub { source, .. } = &e
100                && source.status_code == 404
101            {
102                // Try to fetch as an issue to provide a better error message
103                if (client.issues(owner, repo).get(number).await).is_ok() {
104                    return Err(AptuError::TypeMismatch {
105                        number,
106                        expected: ResourceType::PullRequest,
107                        actual: ResourceType::Issue,
108                    }
109                    .into());
110                }
111                // Issue check failed, fall back to original error
112            }
113            return Err(e)
114                .with_context(|| format!("Failed to fetch PR #{number} from {owner}/{repo}"));
115        }
116    };
117
118    // Fetch PR files (diffs)
119    let files = client
120        .pulls(owner, repo)
121        .list_files(number)
122        .await
123        .with_context(|| format!("Failed to fetch files for PR #{number}"))?;
124
125    // Convert to our types
126    let pr_files: Vec<PrFile> = files
127        .items
128        .into_iter()
129        .map(|f| PrFile {
130            filename: f.filename,
131            status: format!("{:?}", f.status),
132            additions: f.additions,
133            deletions: f.deletions,
134            patch: f.patch,
135            full_content: None,
136        })
137        .collect();
138
139    // Fetch full file contents for eligible files (default: up to 10 files, max 4000 chars each)
140    let file_contents = fetch_file_contents(
141        client,
142        owner,
143        repo,
144        &pr_files,
145        &pr.head.sha,
146        review_config.max_full_content_files,
147        review_config.max_chars_per_file,
148    )
149    .await;
150
151    // Merge file contents back into pr_files
152    debug_assert_eq!(
153        pr_files.len(),
154        file_contents.len(),
155        "fetch_file_contents must return one entry per file"
156    );
157    let pr_files: Vec<PrFile> = pr_files
158        .into_iter()
159        .zip(file_contents)
160        .map(|(mut file, content)| {
161            file.full_content = content;
162            file
163        })
164        .collect();
165
166    let labels: Vec<String> = pr
167        .labels
168        .iter()
169        .flat_map(|labels_vec| labels_vec.iter().map(|l| l.name.clone()))
170        .collect();
171
172    let details = PrDetails {
173        owner: owner.to_string(),
174        repo: repo.to_string(),
175        number,
176        title: pr.title.unwrap_or_default(),
177        body: pr.body.unwrap_or_default(),
178        base_branch: pr.base.ref_field,
179        head_branch: pr.head.ref_field,
180        head_sha: pr.head.sha,
181        files: pr_files,
182        url: pr.html_url.map_or_else(String::new, |u| u.to_string()),
183        labels,
184    };
185
186    debug!(
187        file_count = details.files.len(),
188        "PR details fetched successfully"
189    );
190
191    Ok(details)
192}
193
194/// Fetches full file contents for PR files from GitHub Contents API.
195///
196/// Fetches content for eligible files up to a specified limit and truncates each to a character limit.
197/// Skips deleted files and files with empty patches. Per-file errors are non-fatal: they produce
198/// `None` entries and log warnings.
199///
200/// # Arguments
201///
202/// * `client` - Authenticated Octocrab client
203/// * `owner` - Repository owner
204/// * `repo` - Repository name
205/// * `files` - Slice of PR files to fetch
206/// * `head_sha` - PR head commit SHA to fetch from
207/// * `max_files` - Maximum number of files to fetch content for
208/// * `max_chars_per_file` - Truncate each file's content at this character limit
209///
210/// # Returns
211///
212/// Vector of `Option<String>` with one entry per input file (in order):
213/// - `Some(content)` if fetch succeeded
214/// - `None` if fetch failed, file was skipped, or file index exceeded `max_files`
215#[instrument(skip(client, files), fields(owner = %owner, repo = %repo, max_files = max_files))]
216async fn fetch_file_contents(
217    client: &Octocrab,
218    owner: &str,
219    repo: &str,
220    files: &[PrFile],
221    head_sha: &str,
222    max_files: usize,
223    max_chars_per_file: usize,
224) -> Vec<Option<String>> {
225    let mut results = Vec::with_capacity(files.len());
226    let mut fetched_count = 0usize;
227
228    for file in files {
229        if should_skip_file(&file.filename, &file.status, file.patch.as_ref()) {
230            results.push(None);
231            continue;
232        }
233
234        // Skip if beyond max_files cap (count only successfully-fetched files)
235        if fetched_count >= max_files {
236            debug!(
237                file = %file.filename,
238                fetched_count = fetched_count,
239                max_files = max_files,
240                "Fetched file count exceeds max_files cap"
241            );
242            results.push(None);
243            continue;
244        }
245
246        // Attempt to fetch file content
247        match client
248            .repos(owner, repo)
249            .get_content()
250            .path(&file.filename)
251            .r#ref(head_sha)
252            .send()
253            .await
254        {
255            Ok(content) => {
256                // Try to decode the first item (should be the file, not a directory listing)
257                if let Some(item) = content.items.first() {
258                    if let Some(decoded) = item.decoded_content() {
259                        let truncated = if decoded.len() > max_chars_per_file {
260                            decoded.chars().take(max_chars_per_file).collect::<String>()
261                        } else {
262                            decoded
263                        };
264                        debug!(
265                            file = %file.filename,
266                            content_len = truncated.len(),
267                            "File content fetched and truncated"
268                        );
269                        results.push(Some(truncated));
270                        fetched_count += 1;
271                    } else {
272                        tracing::warn!(
273                            file = %file.filename,
274                            "Failed to decode file content; skipping"
275                        );
276                        results.push(None);
277                    }
278                } else {
279                    tracing::warn!(
280                        file = %file.filename,
281                        "File content response was empty; skipping"
282                    );
283                    results.push(None);
284                }
285            }
286            Err(e) => {
287                tracing::warn!(
288                    file = %file.filename,
289                    err = %e,
290                    "Failed to fetch file content; skipping"
291                );
292                results.push(None);
293            }
294        }
295    }
296
297    results
298}
299
300/// Posts a PR review to GitHub.
301///
302/// Uses Octocrab's custom HTTP POST to create a review with the specified event type.
303/// Requires write access to the repository.
304///
305/// # Arguments
306///
307/// * `client` - Authenticated Octocrab client
308/// * `owner` - Repository owner
309/// * `repo` - Repository name
310/// * `number` - PR number
311/// * `body` - Review comment text
312/// * `event` - Review event type (Comment, Approve, or `RequestChanges`)
313/// * `comments` - Inline review comments to attach; entries with `line = None` are silently skipped
314/// * `commit_id` - Head commit SHA to associate with the review; omitted from payload if empty
315///
316/// # Returns
317///
318/// Review ID on success.
319///
320/// # Errors
321///
322/// Returns an error if the API call fails, user lacks write access, or PR is not found.
323#[allow(clippy::too_many_arguments)]
324#[instrument(skip(client, comments), fields(owner = %owner, repo = %repo, number = number, event = %event))]
325pub async fn post_pr_review(
326    client: &Octocrab,
327    owner: &str,
328    repo: &str,
329    number: u64,
330    body: &str,
331    event: ReviewEvent,
332    comments: &[PrReviewComment],
333    commit_id: &str,
334) -> Result<u64> {
335    debug!("Posting PR review");
336
337    let route = format!("/repos/{owner}/{repo}/pulls/{number}/reviews");
338
339    // Build inline comments array; skip entries without a line number.
340    let inline_comments: Vec<serde_json::Value> = comments
341        .iter()
342        // Comments without a line number cannot be anchored to the diff; skip silently.
343        .filter_map(|c| {
344            c.line.map(|line| {
345                serde_json::json!({
346                    "path": c.file,
347                    "line": line,
348                    // RIGHT = new version of the file (added/changed lines).
349                    // Use line (file line number) rather than the deprecated
350                    // position (diff hunk offset) so no hunk parsing is needed.
351                    "side": "RIGHT",
352                    "body": render_pr_review_comment_body(c),
353                })
354            })
355        })
356        .collect();
357
358    let mut payload = serde_json::json!({
359        "body": body,
360        "event": event.to_string(),
361        "comments": inline_comments,
362    });
363
364    // commit_id is optional; include only when non-empty.
365    if !commit_id.is_empty() {
366        payload["commit_id"] = serde_json::Value::String(commit_id.to_string());
367    }
368
369    #[derive(serde::Deserialize)]
370    struct ReviewResponse {
371        id: u64,
372    }
373
374    let response: ReviewResponse = client.post(route, Some(&payload)).await.with_context(|| {
375        format!(
376            "Failed to post review to PR #{number} in {owner}/{repo}. \
377                 Check that you have write access to the repository."
378        )
379    })?;
380
381    debug!(review_id = response.id, "PR review posted successfully");
382
383    Ok(response.id)
384}
385
386/// Extract labels from PR metadata (title and file paths).
387///
388/// Parses conventional commit prefix from PR title and maps file paths to scope labels.
389/// Returns a vector of label names to apply to the PR.
390///
391/// # Arguments
392/// * `title` - PR title (may contain conventional commit prefix)
393/// * `file_paths` - List of file paths changed in the PR
394///
395/// # Returns
396/// Vector of label names to apply
397#[must_use]
398pub fn labels_from_pr_metadata(title: &str, file_paths: &[String]) -> Vec<String> {
399    let mut labels = std::collections::HashSet::new();
400
401    // Extract conventional commit prefix from title
402    // Handle both "feat: ..." and "feat(scope): ..." formats
403    let prefix = title
404        .split(':')
405        .next()
406        .unwrap_or("")
407        .split('(')
408        .next()
409        .unwrap_or("")
410        .trim();
411
412    // Map conventional commit type to label
413    let type_label = match prefix {
414        "feat" | "perf" => Some("enhancement"),
415        "fix" => Some("bug"),
416        "docs" => Some("documentation"),
417        "refactor" => Some("refactor"),
418        _ => None,
419    };
420
421    if let Some(label) = type_label {
422        labels.insert(label.to_string());
423    }
424
425    // Map file paths to scope labels
426    for path in file_paths {
427        let scope = if path.starts_with("crates/aptu-cli/") {
428            Some("cli")
429        } else if path.starts_with("crates/aptu-ffi/") || path.starts_with("AptuApp/") {
430            Some("ios")
431        } else if path.starts_with("docs/") {
432            Some("documentation")
433        } else {
434            None
435        };
436
437        if let Some(label) = scope {
438            labels.insert(label.to_string());
439        }
440    }
441
442    labels.into_iter().collect()
443}
444
445/// Creates a pull request on GitHub.
446///
447/// # Arguments
448///
449/// * `client` - Authenticated Octocrab client
450/// * `owner` - Repository owner
451/// * `repo` - Repository name
452/// * `title` - PR title
453/// * `head_branch` - Head branch (the branch with changes)
454/// * `base_branch` - Base branch (the branch to merge into)
455/// * `body` - Optional PR body text
456///
457/// # Returns
458///
459/// `PrCreateResult` with PR metadata.
460///
461/// # Errors
462///
463/// Returns an error if the API call fails or the user lacks write access.
464#[instrument(skip(client), fields(owner = %owner, repo = %repo, head = %head_branch, base = %base_branch))]
465pub async fn create_pull_request(
466    client: &Octocrab,
467    owner: &str,
468    repo: &str,
469    title: &str,
470    head_branch: &str,
471    base_branch: &str,
472    body: Option<&str>,
473) -> anyhow::Result<PrCreateResult> {
474    debug!("Creating pull request");
475
476    let pr = client
477        .pulls(owner, repo)
478        .create(title, head_branch, base_branch)
479        .body(body.unwrap_or_default())
480        .draft(false)
481        .send()
482        .await
483        .with_context(|| {
484            format!("Failed to create PR in {owner}/{repo} ({head_branch} -> {base_branch})")
485        })?;
486
487    let result = PrCreateResult {
488        pr_number: pr.number,
489        url: pr.html_url.map_or_else(String::new, |u| u.to_string()),
490        branch: pr.head.ref_field,
491        base: pr.base.ref_field,
492        title: pr.title.unwrap_or_default(),
493        draft: pr.draft.unwrap_or(false),
494        files_changed: u32::try_from(pr.changed_files.unwrap_or_default()).unwrap_or(u32::MAX),
495        additions: pr.additions.unwrap_or_default(),
496        deletions: pr.deletions.unwrap_or_default(),
497    };
498
499    debug!(
500        pr_number = result.pr_number,
501        "Pull request created successfully"
502    );
503
504    Ok(result)
505}
506
507/// Determines whether a file should be skipped during fetch based on status and patch.
508/// Emits a debug log with the skip reason. Returns true if the file should be skipped
509/// (removed status or no patch), false otherwise.
510fn should_skip_file(filename: &str, status: &str, patch: Option<&String>) -> bool {
511    if status.to_lowercase().contains("removed") {
512        debug!(file = %filename, "Skipping removed file");
513        return true;
514    }
515    if patch.is_none_or(String::is_empty) {
516        debug!(file = %filename, "Skipping file with empty patch");
517        return true;
518    }
519    false
520}
521
522#[cfg(test)]
523mod tests {
524    use super::*;
525    use crate::ai::types::CommentSeverity;
526
527    fn decode_content(encoded: &str, max_chars: usize) -> Option<String> {
528        use base64::Engine;
529        let engine = base64::engine::general_purpose::STANDARD;
530        let decoded_bytes = engine.decode(encoded).ok()?;
531        let decoded_str = String::from_utf8(decoded_bytes).ok()?;
532
533        if decoded_str.len() <= max_chars {
534            Some(decoded_str)
535        } else {
536            Some(decoded_str.chars().take(max_chars).collect::<String>())
537        }
538    }
539
540    #[test]
541    fn test_pr_create_result_fields() {
542        // Arrange / Act: construct directly (no network call needed)
543        let result = PrCreateResult {
544            pr_number: 42,
545            url: "https://github.com/owner/repo/pull/42".to_string(),
546            branch: "feat/my-feature".to_string(),
547            base: "main".to_string(),
548            title: "feat: add feature".to_string(),
549            draft: false,
550            files_changed: 3,
551            additions: 100,
552            deletions: 10,
553        };
554
555        // Assert
556        assert_eq!(result.pr_number, 42);
557        assert_eq!(result.url, "https://github.com/owner/repo/pull/42");
558        assert_eq!(result.branch, "feat/my-feature");
559        assert_eq!(result.base, "main");
560        assert_eq!(result.title, "feat: add feature");
561        assert!(!result.draft);
562        assert_eq!(result.files_changed, 3);
563        assert_eq!(result.additions, 100);
564        assert_eq!(result.deletions, 10);
565    }
566
567    // ---------------------------------------------------------------------------
568    // post_pr_review payload construction
569    // ---------------------------------------------------------------------------
570
571    /// Helper: build the inline comments JSON array using the same logic as
572    /// `post_pr_review`, without making a live HTTP call.
573    fn build_inline_comments(comments: &[PrReviewComment]) -> Vec<serde_json::Value> {
574        comments
575            .iter()
576            .filter_map(|c| {
577                c.line.map(|line| {
578                    serde_json::json!({
579                        "path": c.file,
580                        "line": line,
581                        "side": "RIGHT",
582                        "body": render_pr_review_comment_body(c),
583                    })
584                })
585            })
586            .collect()
587    }
588
589    #[test]
590    fn test_post_pr_review_payload_with_comments() {
591        // Arrange
592        let comments = vec![PrReviewComment {
593            file: "src/main.rs".to_string(),
594            line: Some(42),
595            comment: "Consider using a match here.".to_string(),
596            severity: CommentSeverity::Suggestion,
597            suggested_code: None,
598        }];
599
600        // Act
601        let inline = build_inline_comments(&comments);
602
603        // Assert
604        assert_eq!(inline.len(), 1);
605        assert_eq!(inline[0]["path"], "src/main.rs");
606        assert_eq!(inline[0]["line"], 42);
607        assert_eq!(inline[0]["side"], "RIGHT");
608        assert_eq!(inline[0]["body"], "Consider using a match here.");
609    }
610
611    #[test]
612    fn test_post_pr_review_skips_none_line_comments() {
613        // Arrange: one comment with a line, one without.
614        let comments = vec![
615            PrReviewComment {
616                file: "src/lib.rs".to_string(),
617                line: None,
618                comment: "General file comment.".to_string(),
619                severity: CommentSeverity::Info,
620                suggested_code: None,
621            },
622            PrReviewComment {
623                file: "src/lib.rs".to_string(),
624                line: Some(10),
625                comment: "Inline comment.".to_string(),
626                severity: CommentSeverity::Warning,
627                suggested_code: None,
628            },
629        ];
630
631        // Act
632        let inline = build_inline_comments(&comments);
633
634        // Assert: only the comment with a line is included.
635        assert_eq!(inline.len(), 1);
636        assert_eq!(inline[0]["line"], 10);
637    }
638
639    #[test]
640    fn test_post_pr_review_empty_comments() {
641        // Arrange
642        let comments: Vec<PrReviewComment> = vec![];
643
644        // Act
645        let inline = build_inline_comments(&comments);
646
647        // Assert: empty slice produces empty array, which serializes as [].
648        assert!(inline.is_empty());
649        let serialized = serde_json::to_string(&inline).unwrap();
650        assert_eq!(serialized, "[]");
651    }
652
653    // ---------------------------------------------------------------------------
654    // Existing tests
655    // ---------------------------------------------------------------------------
656
657    // Smoke test to verify parse_pr_reference delegates correctly.
658    // Comprehensive parsing tests are in github/mod.rs.
659    #[test]
660    fn test_parse_pr_reference_delegates_to_shared() {
661        let (owner, repo, number) =
662            parse_pr_reference("https://github.com/block/goose/pull/123", None).unwrap();
663        assert_eq!(owner, "block");
664        assert_eq!(repo, "goose");
665        assert_eq!(number, 123);
666    }
667
668    #[test]
669    fn test_title_prefix_to_label_mapping() {
670        let cases = vec![
671            (
672                "feat: add new feature",
673                vec!["enhancement"],
674                "feat should map to enhancement",
675            ),
676            ("fix: resolve bug", vec!["bug"], "fix should map to bug"),
677            (
678                "docs: update readme",
679                vec!["documentation"],
680                "docs should map to documentation",
681            ),
682            (
683                "refactor: improve code",
684                vec!["refactor"],
685                "refactor should map to refactor",
686            ),
687            (
688                "perf: optimize",
689                vec!["enhancement"],
690                "perf should map to enhancement",
691            ),
692            (
693                "chore: update deps",
694                vec![],
695                "chore should produce no labels",
696            ),
697        ];
698
699        for (title, expected_labels, msg) in cases {
700            let labels = labels_from_pr_metadata(title, &[]);
701            for expected in &expected_labels {
702                assert!(
703                    labels.contains(&expected.to_string()),
704                    "{msg}: expected '{expected}' in {labels:?}",
705                );
706            }
707            if expected_labels.is_empty() {
708                assert!(labels.is_empty(), "{msg}: expected empty, got {labels:?}",);
709            }
710        }
711    }
712
713    #[test]
714    fn test_file_path_to_scope_mapping() {
715        let cases = vec![
716            (
717                "feat: cli",
718                vec!["crates/aptu-cli/src/main.rs"],
719                vec!["enhancement", "cli"],
720                "cli path should map to cli scope",
721            ),
722            (
723                "feat: ios",
724                vec!["crates/aptu-ffi/src/lib.rs"],
725                vec!["enhancement", "ios"],
726                "ffi path should map to ios scope",
727            ),
728            (
729                "feat: ios",
730                vec!["AptuApp/ContentView.swift"],
731                vec!["enhancement", "ios"],
732                "app path should map to ios scope",
733            ),
734            (
735                "feat: docs",
736                vec!["docs/GITHUB_ACTION.md"],
737                vec!["enhancement", "documentation"],
738                "docs path should map to documentation scope",
739            ),
740            (
741                "feat: workflow",
742                vec![".github/workflows/test.yml"],
743                vec!["enhancement"],
744                "workflow path should be ignored",
745            ),
746        ];
747
748        for (title, paths, expected_labels, msg) in cases {
749            let labels = labels_from_pr_metadata(
750                title,
751                &paths
752                    .iter()
753                    .map(std::string::ToString::to_string)
754                    .collect::<Vec<_>>(),
755            );
756            for expected in expected_labels {
757                assert!(
758                    labels.contains(&expected.to_string()),
759                    "{msg}: expected '{expected}' in {labels:?}",
760                );
761            }
762        }
763    }
764
765    #[test]
766    fn test_combined_title_and_paths() {
767        let labels = labels_from_pr_metadata(
768            "feat: multi",
769            &[
770                "crates/aptu-cli/src/main.rs".to_string(),
771                "docs/README.md".to_string(),
772            ],
773        );
774        assert!(
775            labels.contains(&"enhancement".to_string()),
776            "should include enhancement from feat prefix"
777        );
778        assert!(
779            labels.contains(&"cli".to_string()),
780            "should include cli from path"
781        );
782        assert!(
783            labels.contains(&"documentation".to_string()),
784            "should include documentation from path"
785        );
786    }
787
788    #[test]
789    fn test_no_match_returns_empty() {
790        let cases = vec![
791            (
792                "Random title",
793                vec![],
794                "unrecognized prefix should return empty",
795            ),
796            (
797                "chore: update",
798                vec![],
799                "ignored prefix should return empty",
800            ),
801        ];
802
803        for (title, paths, msg) in cases {
804            let labels = labels_from_pr_metadata(title, &paths);
805            assert!(labels.is_empty(), "{msg}: got {labels:?}");
806        }
807    }
808
809    #[test]
810    fn test_scoped_prefix_extracts_type() {
811        let labels = labels_from_pr_metadata("feat(cli): add new feature", &[]);
812        assert!(
813            labels.contains(&"enhancement".to_string()),
814            "scoped prefix should extract type from feat(cli)"
815        );
816    }
817
818    #[test]
819    fn test_duplicate_labels_deduplicated() {
820        let labels = labels_from_pr_metadata("docs: update", &["docs/README.md".to_string()]);
821        assert_eq!(
822            labels.len(),
823            1,
824            "should have exactly one label when title and path both map to documentation"
825        );
826        assert!(
827            labels.contains(&"documentation".to_string()),
828            "should contain documentation label"
829        );
830    }
831
832    #[test]
833    fn test_should_skip_file_respects_fetched_count_cap() {
834        // Test that should_skip_file correctly identifies files to skip.
835        // Files with removed status or no patch should be skipped.
836        let removed_file = PrFile {
837            filename: "removed.rs".to_string(),
838            status: "removed".to_string(),
839            additions: 0,
840            deletions: 5,
841            patch: None,
842            full_content: None,
843        };
844        let modified_file = PrFile {
845            filename: "file_0.rs".to_string(),
846            status: "modified".to_string(),
847            additions: 1,
848            deletions: 0,
849            patch: Some("+ new code".to_string()),
850            full_content: None,
851        };
852        let no_patch_file = PrFile {
853            filename: "file_1.rs".to_string(),
854            status: "modified".to_string(),
855            additions: 1,
856            deletions: 0,
857            patch: None,
858            full_content: None,
859        };
860
861        // Assert: removed files are skipped
862        assert!(
863            should_skip_file(
864                &removed_file.filename,
865                &removed_file.status,
866                removed_file.patch.as_ref()
867            ),
868            "removed files should be skipped"
869        );
870
871        // Assert: modified files with patch are not skipped
872        assert!(
873            !should_skip_file(
874                &modified_file.filename,
875                &modified_file.status,
876                modified_file.patch.as_ref()
877            ),
878            "modified files with patch should not be skipped"
879        );
880
881        // Assert: files without patch are skipped
882        assert!(
883            should_skip_file(
884                &no_patch_file.filename,
885                &no_patch_file.status,
886                no_patch_file.patch.as_ref()
887            ),
888            "files without patch should be skipped"
889        );
890    }
891
892    #[test]
893    fn test_decode_content_valid_base64() {
894        // Arrange: valid base64-encoded string
895        use base64::Engine;
896        let engine = base64::engine::general_purpose::STANDARD;
897        let original = "Hello, World!";
898        let encoded = engine.encode(original);
899
900        // Act: decode with sufficient max_chars
901        let result = decode_content(&encoded, 1000);
902
903        // Assert: decoding succeeds and matches original
904        assert_eq!(
905            result,
906            Some(original.to_string()),
907            "valid base64 should decode successfully"
908        );
909    }
910
911    #[test]
912    fn test_decode_content_invalid_base64() {
913        // Arrange: invalid base64 string
914        let invalid_base64 = "!!!invalid!!!";
915
916        // Act: attempt to decode
917        let result = decode_content(invalid_base64, 1000);
918
919        // Assert: decoding fails gracefully
920        assert_eq!(result, None, "invalid base64 should return None");
921    }
922
923    #[test]
924    fn test_decode_content_truncates_at_max_chars() {
925        // Arrange: multi-byte UTF-8 string (Japanese characters)
926        use base64::Engine;
927        let engine = base64::engine::general_purpose::STANDARD;
928        let original = "こんにちは".repeat(10); // 50 characters total
929        let encoded = engine.encode(&original);
930        let max_chars = 10;
931
932        // Act: decode with max_chars limit
933        let result = decode_content(&encoded, max_chars);
934
935        // Assert: result is truncated to max_chars on character boundary
936        assert!(result.is_some(), "decoding should succeed");
937        let decoded = result.unwrap();
938        assert_eq!(
939            decoded.chars().count(),
940            max_chars,
941            "output should be truncated to max_chars on character boundary"
942        );
943        assert!(
944            decoded.is_char_boundary(decoded.len()),
945            "output should be valid UTF-8 (truncated on char boundary)"
946        );
947    }
948}