use crate::agent::shell::{cmd_capture, cmd_run, cmd_stdout, cmd_stdout_or_die, log};
pub use cli_common::{PendingIssue, PrAuthor, PrSummary, TrackerInfo};
use std::collections::{HashMap, HashSet};
#[allow(dead_code)]
pub mod labels {
pub const TRACKER: &str = "tracker";
pub const IDEATION: &str = "ideation";
pub const UXR_SYNTHESIS: &str = "uxr-synthesis";
pub const STRATEGIC_REVIEW: &str = "strategic-review";
pub const ROADMAP: &str = "roadmap";
pub const SPRINT: &str = "sprint";
pub const CODE_REVIEW: &str = "code-review";
pub const SECURITY: &str = "security";
pub const RETROSPECTIVE: &str = "retrospective";
pub const DEV_UI: &str = "dev-ui";
pub const AREA_DEV_UI: &str = "area:dev-ui";
pub const AREA_EDGE_NODE: &str = "area:edge-node";
pub const AREA_GATEWAY_NODE: &str = "area:gateway-node";
pub const AREA_NETWORK_NODE: &str = "area:network-node";
pub const AREA_SERVICE_NODE: &str = "area:service-node";
pub const AREA_CONSOLE_NODE: &str = "area:console-node";
pub const AREA_CARETTA_CLI: &str = "area:caretta-cli";
pub const AREA_DOCS: &str = "area:docs";
pub const AREA_CI: &str = "area:ci";
pub const KIND_BUG: &str = "kind:bug";
pub const KIND_FEATURE: &str = "kind:feature";
pub const KIND_REFACTOR: &str = "kind:refactor";
pub const KIND_PERF: &str = "kind:perf";
pub const KIND_TEST: &str = "kind:test";
pub const KIND_DOCS: &str = "kind:docs";
pub const KIND_CHORE: &str = "kind:chore";
pub const KIND_SECURITY: &str = "kind:security";
pub const SEVERITY_CRITICAL: &str = "severity:critical";
pub const SEVERITY_HIGH: &str = "severity:high";
pub const SEVERITY_MEDIUM: &str = "severity:medium";
pub const SEVERITY_LOW: &str = "severity:low";
pub const SEVERITY_INFO: &str = "severity:info";
pub const PRIORITY_P0: &str = "priority:p0";
pub const PRIORITY_P1: &str = "priority:p1";
pub const PRIORITY_P2: &str = "priority:p2";
pub const PRIORITY_P3: &str = "priority:p3";
pub const STATUS_BLOCKED: &str = "status:blocked";
pub const STATUS_NEEDS_REVIEW: &str = "status:needs-review";
pub const STATUS_WONTFIX: &str = "status:wontfix";
}
pub fn extract_issue_refs(s: &str) -> Vec<u32> {
let mut nums = Vec::new();
let bytes = s.as_bytes();
let mut i = 0;
while i < bytes.len() {
if bytes[i] == b'#' {
i += 1;
while i < bytes.len() && bytes[i].is_ascii_whitespace() {
i += 1;
}
let start = i;
while i < bytes.len() && bytes[i].is_ascii_digit() {
i += 1;
}
if i > start
&& let Ok(n) = s[start..i].parse::<u32>()
{
nums.push(n);
}
} else {
i += 1;
}
}
nums
}
pub fn extract_bare_numbers(s: &str) -> Vec<u32> {
let mut nums = Vec::new();
let bytes = s.as_bytes();
let mut i = 0;
while i < bytes.len() {
if bytes[i].is_ascii_digit() {
let start = i;
while i < bytes.len() && bytes[i].is_ascii_digit() {
i += 1;
}
if let Ok(n) = s[start..i].parse::<u32>() {
nums.push(n);
}
} else {
i += 1;
}
}
nums
}
pub fn extract_blockers(tail: &str) -> Vec<u32> {
let refs = extract_issue_refs(tail);
if !refs.is_empty() {
refs
} else {
extract_bare_numbers(tail)
}
}
pub fn parse_completed(body: &str) -> HashSet<u32> {
let mut set = HashSet::new();
for line in body.lines() {
let lower = line.to_lowercase();
let is_done = lower.contains("[x]")
|| lower.contains("✅")
|| lower.contains("✔️")
|| lower.contains("☑️")
|| lower.contains("done")
|| lower.contains("complete");
if is_done {
let refs = extract_issue_refs(line);
if line.contains('|') {
if let Some(&first) = refs.first() {
set.insert(first);
}
} else {
for num in refs {
set.insert(num);
}
}
}
}
set
}
pub fn parse_pending(body: &str) -> Vec<PendingIssue> {
let completed = parse_completed(body);
let mut issues = Vec::new();
let mut seen = HashSet::new();
for line in body.lines() {
let lower = line.to_lowercase();
let is_pending = lower.contains("[ ]") || lower.contains("🟡") || lower.contains("🔴");
if !is_pending {
continue;
}
let refs = extract_issue_refs(line);
let Some(&number) = refs.first() else {
continue;
};
if completed.contains(&number) || !seen.insert(number) {
continue;
}
let blockers = match lower.find("blocked by") {
Some(idx) => {
let tail = &line[idx + "blocked by".len()..];
extract_blockers(tail)
}
None => {
if line.contains('|') {
let parts: Vec<&str> = line.split('|').collect();
if parts.len() >= 3 {
extract_issue_refs(parts[2])
} else {
vec![]
}
} else {
vec![]
}
}
};
let title = {
let after_ref = if let Some(pos) = line.find(&format!("#{number}")) {
let skip = pos + format!("#{number}").len();
line[skip..].trim_start_matches(|c: char| {
c == '*' || c == '_' || c == ' ' || c == ':' || c == ')'
})
} else {
""
};
let end = after_ref
.find('|')
.or_else(|| after_ref.to_lowercase().find("blocked"))
.unwrap_or(after_ref.len());
after_ref[..end]
.trim()
.trim_end_matches(['*', '_'])
.to_string()
};
issues.push(PendingIssue {
number,
title,
blockers,
pr_number: None,
});
}
issues
}
pub fn is_ready(issue: &PendingIssue, completed: &HashSet<u32>) -> bool {
issue.blockers.iter().all(|b| completed.contains(b))
}
pub fn pending_issues_execution_order(body: &str) -> Vec<u32> {
let completed = parse_completed(body);
let pending = parse_pending(body);
if pending.is_empty() {
return Vec::new();
}
let pending_set: HashSet<u32> = pending.iter().map(|p| p.number).collect();
let doc_rank: HashMap<u32, usize> = pending
.iter()
.enumerate()
.map(|(idx, p)| (p.number, idx))
.collect();
fn blockers_satisfied_for_pick(
blockers: &[u32],
pending_set: &HashSet<u32>,
completed: &HashSet<u32>,
picked: &HashSet<u32>,
) -> bool {
blockers.iter().all(|b| {
if completed.contains(b) {
return true;
}
if !pending_set.contains(b) {
return true;
}
picked.contains(b)
})
}
let mut ordered = Vec::with_capacity(pending.len());
let mut picked: HashSet<u32> = HashSet::new();
while picked.len() < pending.len() {
let mut ready: Vec<u32> = pending
.iter()
.filter(|p| !picked.contains(&p.number))
.filter(|p| blockers_satisfied_for_pick(&p.blockers, &pending_set, &completed, &picked))
.map(|p| p.number)
.collect();
if ready.is_empty() {
let mut rest: Vec<u32> = pending
.iter()
.filter(|p| !picked.contains(&p.number))
.map(|p| p.number)
.collect();
rest.sort_by_key(|n| doc_rank[n]);
for n in rest {
ordered.push(n);
picked.insert(n);
}
break;
}
ready.sort_by_key(|n| doc_rank[n]);
let next = ready[0];
ordered.push(next);
picked.insert(next);
}
ordered
}
pub fn mark_completed(body: &str, issue_num: u32) -> String {
let needle = "- [ ] ";
let mut result = String::with_capacity(body.len());
for line in body.lines() {
if line.contains(needle) {
let refs = extract_issue_refs(line);
if refs.first() == Some(&issue_num) {
result.push_str(&line.replacen("- [ ] ", "- [x] ", 1));
result.push('\n');
continue;
}
}
result.push_str(line);
result.push('\n');
}
if !body.ends_with('\n') && result.ends_with('\n') {
result.pop();
}
result
}
pub fn find_retro_issues() -> Vec<u32> {
let out = cmd_stdout(
"gh",
&[
"issue",
"list",
"--search",
"retro in:title",
"--state",
"open",
"--json",
"number",
"--jq",
".[].number",
],
)
.unwrap_or_default();
out.lines()
.filter_map(|l| l.trim().parse::<u32>().ok())
.collect()
}
pub(crate) fn parse_tracker_list(json: &str) -> Vec<TrackerInfo> {
#[derive(serde::Deserialize)]
struct Row {
number: u32,
title: String,
}
let rows: Vec<Row> = serde_json::from_str(json).unwrap_or_default();
let mut nums: Vec<TrackerInfo> = rows
.into_iter()
.map(|row| TrackerInfo {
number: row.number,
title: row.title,
})
.collect();
nums.sort_by_key(|t| t.number);
nums.dedup_by_key(|t| t.number);
nums
}
pub fn find_tracker() -> Vec<TrackerInfo> {
let out = cmd_stdout(
"gh",
&[
"issue",
"list",
"--label",
labels::TRACKER,
"--state",
"open",
"--json",
"number,title",
],
);
match out {
Some(json) => parse_tracker_list(&json),
None => Vec::new(),
}
}
pub fn open_pr_map_from(prs: &[PrSummary]) -> std::collections::HashMap<u32, u32> {
let mut map = std::collections::HashMap::new();
for pr in prs {
if let Some(rest) = pr.head_ref_name.strip_prefix("agent/issue-")
&& let Ok(issue_num) = rest.parse::<u32>()
{
map.insert(issue_num, pr.number);
}
}
map
}
pub fn get_tracker_body(tracker: u32) -> String {
let num = tracker.to_string();
cmd_stdout_or_die(
"gh",
&["issue", "view", &num, "--json", "body", "--jq", ".body"],
"failed to read tracker body",
)
}
pub fn check_off_issue(tracker: u32, issue_num: u32) {
let body = get_tracker_body(tracker);
let updated = mark_completed(&body, issue_num);
let tracker_s = tracker.to_string();
if !cmd_run("gh", &["issue", "edit", &tracker_s, "--body", &updated]) {
crate::agent::shell::die(&format!("failed to check off #{issue_num} in tracker"));
}
log(&format!("Checked off #{issue_num} in tracker"));
}
pub fn close_issue(issue_num: u32) {
let num_s = issue_num.to_string();
if !cmd_run("gh", &["issue", "close", &num_s]) {
log(&format!("WARNING: failed to close #{issue_num}"));
} else {
log(&format!("Closed #{issue_num}"));
}
}
pub fn find_upstream_branch(blockers: &[u32]) -> String {
for &blocker in blockers {
let head = format!("agent/issue-{blocker}");
let out = cmd_stdout(
"gh",
&[
"pr",
"list",
"--head",
&head,
"--state",
"open",
"--json",
"headRefName",
"--jq",
".[0].headRefName",
],
);
if let Some(branch) = out
&& !branch.is_empty()
{
return branch;
}
}
crate::agent::cmd::origin_default_branch()
}
pub fn fetch_issue(issue_num: u32) -> (String, String) {
let num_s = issue_num.to_string();
let title = cmd_stdout_or_die(
"gh",
&["issue", "view", &num_s, "--json", "title", "--jq", ".title"],
&format!("failed to fetch issue #{issue_num}"),
);
let body = cmd_stdout_or_die(
"gh",
&["issue", "view", &num_s, "--json", "body", "--jq", ".body"],
&format!("failed to fetch issue #{issue_num}"),
);
(title, body)
}
pub fn build_prompt(
project_name: &str,
issue_num: u32,
title: &str,
body: &str,
codebase: &str,
tracker_num: u32,
tracker_body: &str,
) -> String {
let tracker_section = if !tracker_body.is_empty() {
format!(
r#"## Parent Tracker #{tracker_num}
This issue is part of a tracker. Read the tracker body below to understand the
broader scope, sibling dependencies, sprint goal, and any constraints the human
captured before starting work. **Treat the tracker as authoritative for scope**:
do not expand beyond what the tracker authorises, and do not narrow below what
sibling issues depend on you delivering.
{tracker_body}
"#
)
} else {
String::new()
};
let tracker_instruction = if !tracker_body.is_empty() {
"\n- Before diving into implementation, re-read the Parent Tracker section above. If your planned changes conflict with a sibling issue, the dependency hierarchy, or the sprint goal, **stop and surface the conflict as a comment on the tracker** instead of proceeding silently."
} else {
""
};
format!(
r#"You are working on the {project_name} project.
{tracker_section}Implement the following GitHub issue:
## Issue #{issue_num}: {title}
{body}
## Codebase Snapshot
The following is a cleaned snapshot of the entire project. Use this as your primary
reference — avoid re-reading files that are already included below.
{codebase}
## Instructions
- Read AGENTS.md and the relevant skills/ for project conventions before starting.
- Implement the changes described above.
- Validate your changes using the test/build/format commands documented in AGENTS.md.
- Keep idle memory under 10MB — no unnecessary allocations.
- After implementing, update ISSUES.md: set the status of #{issue_num} to ✅ Done in the Task Dependency Hierarchy table.
- Update STATUS.md if this issue changes the status of any tracked feature (e.g., from 🟡 to ✅).
- CRITICAL: Always keep ISSUES.md and STATUS.md in sync with your changes.
- Do NOT commit changes — the calling script handles commits.{tracker_instruction}"#
)
}
#[allow(dead_code)]
pub fn build_fix_prompt(issue_num: u32, output: &str) -> String {
format!(
r#"Testing failed for issue #{issue_num}.
Here is the output:
{output}
Fix the issues reported above. Do NOT commit — the calling script handles commits."#
)
}
pub fn build_lint_fix_prompt(issue_num: u32, clippy_output: &str) -> String {
format!(
r#"The pre-commit hook for issue #{issue_num} failed due to clippy warnings.
Here is the clippy output:
{clippy_output}
Fix ALL clippy warnings above. Common fixes:
- `too_many_arguments`: add `#[allow(clippy::too_many_arguments)]` above the function
- `doc_overindented_list_items`: fix doc comment indentation
- `collapsible_if`: merge nested if-let into one
- Other warnings: follow the clippy suggestion
Do NOT commit — the calling script handles commits."#
)
}
pub fn build_test_fix_prompt(issue_num: u32, test_output: &str) -> String {
format!(
r#"The configured test command for issue #{issue_num} reported failures.
Here is the test output:
{test_output}
Fix ALL test failures above. Common guidance:
- If a test assertion fails, fix the code under test (not the test) unless the test expectation is clearly wrong.
- If a test times out, look for deadlocks, missing signals, or infinite loops in the code being tested.
- If a compilation error prevents tests from running, fix the compilation error.
Do NOT commit — the calling script handles commits."#
)
}
pub fn list_open_prs() -> Vec<PrSummary> {
let out = cmd_stdout(
"gh",
&[
"pr",
"list",
"--state",
"open",
"--json",
"number,title,headRefName,author",
"--limit",
"50",
],
)
.unwrap_or_default();
serde_json::from_str(&out).unwrap_or_default()
}
pub fn open_pr_number_for_head_branch(branch: &str) -> Option<u32> {
let out = cmd_stdout(
"gh",
&[
"pr",
"list",
"--head",
branch,
"--state",
"open",
"--json",
"number",
"--jq",
".[0].number // empty",
],
)?;
let s = out.trim();
if s.is_empty() {
return None;
}
s.parse().ok()
}
pub fn pr_diff(pr_num: u32) -> String {
let num_s = pr_num.to_string();
cmd_stdout_or_die("gh", &["pr", "diff", &num_s], "failed to fetch PR diff")
}
pub fn current_branch_pr() -> Option<PrSummary> {
let out = cmd_stdout("gh", &["pr", "view", "--json", "number,title,headRefName"])?;
serde_json::from_str(&out).ok()
}
fn parse_auto_merge_response(output: Option<String>) -> bool {
match output {
Some(s) => !s.is_empty() && s != "null",
None => false,
}
}
pub fn is_auto_merge_enabled(pr_num: u32) -> bool {
let num_s = pr_num.to_string();
let out = cmd_stdout(
"gh",
&[
"pr",
"view",
&num_s,
"--json",
"autoMergeRequest",
"--jq",
".autoMergeRequest",
],
);
parse_auto_merge_response(out)
}
pub fn enable_auto_merge(pr_num: u32) -> bool {
let num_s = pr_num.to_string();
log(&format!("Enabling auto-merge on PR #{pr_num}..."));
let (ok, output) = cmd_capture("gh", &["pr", "merge", &num_s, "--auto", "--squash"]);
if ok {
log(&format!("Auto-merge enabled on PR #{pr_num}"));
} else {
log(&format!(
"Failed to enable auto-merge on PR #{pr_num}: {output}"
));
}
ok
}
pub fn pr_body(pr_num: u32) -> String {
let num_s = pr_num.to_string();
cmd_stdout_or_die(
"gh",
&["pr", "view", &num_s, "--json", "body", "--jq", ".body"],
"failed to fetch PR body",
)
}
pub fn pr_head_branch(pr_num: u32) -> String {
let num_s = pr_num.to_string();
cmd_stdout_or_die(
"gh",
&[
"pr",
"view",
&num_s,
"--json",
"headRefName",
"--jq",
".headRefName",
],
"failed to fetch PR head branch",
)
}
pub fn pr_review_decision(pr_num: u32) -> Option<String> {
let num_s = pr_num.to_string();
cmd_stdout(
"gh",
&[
"pr",
"view",
&num_s,
"--json",
"reviewDecision",
"--jq",
".reviewDecision // \"\"",
],
)
.map(|s| s.trim().to_string())
}
#[derive(Clone, Debug, PartialEq, Eq)]
pub struct ReviewThread {
pub id: String,
pub path: String,
pub line: u32,
pub body: String,
pub author: String,
}
pub const DEFAULT_REVIEW_BOT_LOGIN: &str = "caretta-ai";
pub const HUMAN_FIX_MARKER: &str = "@caretta fix";
fn has_human_fix_marker(body: &str) -> bool {
body.to_lowercase().contains(HUMAN_FIX_MARKER)
}
fn pull_request_review_threads_json(pr_num: u32) -> Option<String> {
let owner_repo = match cmd_stdout(
"gh",
&[
"repo",
"view",
"--json",
"nameWithOwner",
"-q",
".nameWithOwner",
],
) {
Some(s) if !s.is_empty() => s,
_ => {
log("WARNING: could not resolve owner/repo via `gh repo view`");
return None;
}
};
let (owner, repo) = match owner_repo.split_once('/') {
Some((o, r)) => (o.to_string(), r.to_string()),
None => {
log(&format!(
"WARNING: unexpected repo identifier '{owner_repo}'"
));
return None;
}
};
let query = "\nquery($owner: String!, $repo: String!, $number: Int!) {\n repository(owner: $owner, name: $repo) {\n pullRequest(number: $number) {\n reviewThreads(first: 100) {\n nodes {\n id\n isResolved\n comments(first: 1) {\n nodes {\n author { login __typename }\n path\n line\n originalLine\n body\n }\n }\n }\n }\n }\n }\n}";
let pr_num_s = pr_num.to_string();
let owner_arg = format!("owner={owner}");
let repo_arg = format!("repo={repo}");
let number_arg = format!("number={pr_num_s}");
let query_arg = format!("query={query}");
match cmd_stdout(
"gh",
&[
"api",
"graphql",
"-F",
&owner_arg,
"-F",
&repo_arg,
"-F",
&number_arg,
"-f",
&query_arg,
],
) {
Some(s) => Some(s),
None => {
log(&format!(
"WARNING: failed to fetch review threads for PR #{pr_num}"
));
None
}
}
}
pub fn fetch_unresolved_review_threads(pr_num: u32, bot_login: &str) -> Vec<ReviewThread> {
pull_request_review_threads_json(pr_num)
.map(|out| parse_review_threads(&out, bot_login))
.unwrap_or_default()
}
pub fn fetch_all_unresolved_review_threads(pr_num: u32) -> Vec<ReviewThread> {
pull_request_review_threads_json(pr_num)
.map(|out| parse_all_unresolved_review_threads(&out))
.unwrap_or_default()
}
const RESOLVE_REVIEW_THREAD_MUTATION: &str = "\nmutation($threadId: ID!) {\n resolveReviewThread(input: {threadId: $threadId}) {\n thread { id isResolved }\n }\n}";
pub fn resolve_review_thread(thread_id: &str) -> bool {
let thread_arg = format!("threadId={thread_id}");
let query_arg = format!("query={RESOLVE_REVIEW_THREAD_MUTATION}");
let resp = match cmd_stdout(
"gh",
&["api", "graphql", "-F", &thread_arg, "-f", &query_arg],
) {
Some(r) => r,
None => {
log(&format!(
"WARNING: gh api graphql failed for resolveReviewThread on {thread_id}"
));
return false;
}
};
let ok = parse_resolve_review_thread_response(&resp);
if !ok {
log(&format!(
"WARNING: resolveReviewThread mutation did not confirm isResolved for {thread_id}: {resp}"
));
}
ok
}
fn parse_resolve_review_thread_response(json: &str) -> bool {
serde_json::from_str::<serde_json::Value>(json)
.ok()
.and_then(|v| {
v.pointer("/data/resolveReviewThread/thread/isResolved")
.and_then(serde_json::Value::as_bool)
})
.unwrap_or(false)
}
fn parse_review_threads(json: &str, bot_login: &str) -> Vec<ReviewThread> {
let v: serde_json::Value = match serde_json::from_str(json) {
Ok(v) => v,
Err(e) => {
log(&format!("WARNING: review-threads JSON parse failed: {e}"));
return Vec::new();
}
};
let nodes = v
.pointer("/data/repository/pullRequest/reviewThreads/nodes")
.and_then(|n| n.as_array())
.cloned()
.unwrap_or_default();
let mut out = Vec::new();
for thread in nodes {
let resolved = thread
.get("isResolved")
.and_then(serde_json::Value::as_bool)
.unwrap_or(false);
if resolved {
continue;
}
let id = thread
.get("id")
.and_then(serde_json::Value::as_str)
.unwrap_or("")
.to_string();
if id.is_empty() {
continue;
}
let comments = thread
.pointer("/comments/nodes")
.and_then(|n| n.as_array())
.cloned()
.unwrap_or_default();
let Some(c) = comments.first() else {
continue;
};
let author = c
.pointer("/author/login")
.and_then(serde_json::Value::as_str)
.unwrap_or("")
.to_string();
let typename = c
.pointer("/author/__typename")
.and_then(serde_json::Value::as_str)
.unwrap_or("");
let body = c
.get("body")
.and_then(serde_json::Value::as_str)
.unwrap_or("")
.to_string();
let is_bot = author == bot_login || author.ends_with("[bot]") || typename == "Bot";
if !is_bot && !has_human_fix_marker(&body) {
continue;
}
let path = c
.get("path")
.and_then(serde_json::Value::as_str)
.unwrap_or("")
.to_string();
if path.is_empty() {
continue;
}
let line = c
.get("line")
.and_then(serde_json::Value::as_u64)
.or_else(|| c.get("originalLine").and_then(serde_json::Value::as_u64))
.unwrap_or(0) as u32;
out.push(ReviewThread {
id,
path,
line,
body,
author,
});
}
out
}
fn parse_all_unresolved_review_threads(json: &str) -> Vec<ReviewThread> {
let v: serde_json::Value = match serde_json::from_str(json) {
Ok(v) => v,
Err(e) => {
log(&format!("WARNING: review-threads JSON parse failed: {e}"));
return Vec::new();
}
};
let nodes = v
.pointer("/data/repository/pullRequest/reviewThreads/nodes")
.and_then(|n| n.as_array())
.cloned()
.unwrap_or_default();
let mut out = Vec::new();
for thread in nodes {
let resolved = thread
.get("isResolved")
.and_then(serde_json::Value::as_bool)
.unwrap_or(false);
if resolved {
continue;
}
let id = thread
.get("id")
.and_then(serde_json::Value::as_str)
.unwrap_or("")
.to_string();
if id.is_empty() {
continue;
}
let comments = thread
.pointer("/comments/nodes")
.and_then(|n| n.as_array())
.cloned()
.unwrap_or_default();
let Some(c) = comments.first() else {
continue;
};
let author = c
.pointer("/author/login")
.and_then(serde_json::Value::as_str)
.unwrap_or("")
.to_string();
let body = c
.get("body")
.and_then(serde_json::Value::as_str)
.unwrap_or("")
.to_string();
let path = c
.get("path")
.and_then(serde_json::Value::as_str)
.unwrap_or("")
.to_string();
if path.is_empty() {
continue;
}
let line = c
.get("line")
.and_then(serde_json::Value::as_u64)
.or_else(|| c.get("originalLine").and_then(serde_json::Value::as_u64))
.unwrap_or(0) as u32;
out.push(ReviewThread {
id,
path,
line,
body,
author,
});
}
out
}
pub fn fetch_unresolved_thread_counts(bot_login: &str) -> std::collections::HashMap<u32, u32> {
let owner_repo = match cmd_stdout(
"gh",
&[
"repo",
"view",
"--json",
"nameWithOwner",
"-q",
".nameWithOwner",
],
) {
Some(s) if !s.is_empty() => s,
_ => {
log("WARNING: could not resolve owner/repo via `gh repo view`");
return std::collections::HashMap::new();
}
};
let (owner, repo) = match owner_repo.split_once('/') {
Some((o, r)) => (o.to_string(), r.to_string()),
None => {
log(&format!(
"WARNING: unexpected repo identifier '{owner_repo}'"
));
return std::collections::HashMap::new();
}
};
let query = "\nquery($owner: String!, $repo: String!) {\n repository(owner: $owner, name: $repo) {\n pullRequests(states: OPEN, first: 100) {\n nodes {\n number\n reviewThreads(first: 100) {\n nodes {\n isResolved\n comments(first: 1) {\n nodes {\n author { login __typename }\n body\n }\n }\n }\n }\n }\n }\n }\n}";
let owner_arg = format!("owner={owner}");
let repo_arg = format!("repo={repo}");
let query_arg = format!("query={query}");
let out = match cmd_stdout(
"gh",
&[
"api", "graphql", "-F", &owner_arg, "-F", &repo_arg, "-f", &query_arg,
],
) {
Some(s) => s,
None => {
log("WARNING: failed to fetch open-PR thread counts");
return std::collections::HashMap::new();
}
};
parse_pr_thread_counts(&out, bot_login)
}
fn parse_pr_thread_counts(json: &str, bot_login: &str) -> std::collections::HashMap<u32, u32> {
let mut counts = std::collections::HashMap::new();
let v: serde_json::Value = match serde_json::from_str(json) {
Ok(v) => v,
Err(e) => {
log(&format!("WARNING: pr-thread-counts JSON parse failed: {e}"));
return counts;
}
};
let prs = v
.pointer("/data/repository/pullRequests/nodes")
.and_then(|n| n.as_array())
.cloned()
.unwrap_or_default();
for pr in prs {
let Some(number) = pr
.get("number")
.and_then(serde_json::Value::as_u64)
.and_then(|n| u32::try_from(n).ok())
else {
continue;
};
let threads = pr
.pointer("/reviewThreads/nodes")
.and_then(|n| n.as_array())
.cloned()
.unwrap_or_default();
let mut count: u32 = 0;
for t in threads {
if t.get("isResolved")
.and_then(serde_json::Value::as_bool)
.unwrap_or(false)
{
continue;
}
let author = t
.pointer("/comments/nodes/0/author/login")
.and_then(serde_json::Value::as_str)
.unwrap_or("");
let typename = t
.pointer("/comments/nodes/0/author/__typename")
.and_then(serde_json::Value::as_str)
.unwrap_or("");
let body = t
.pointer("/comments/nodes/0/body")
.and_then(serde_json::Value::as_str)
.unwrap_or("");
let is_bot = author == bot_login || author.ends_with("[bot]") || typename == "Bot";
if is_bot || has_human_fix_marker(body) {
count += 1;
}
}
if count > 0 {
counts.insert(number, count);
}
}
counts
}
mod prompts;
pub use prompts::*;
#[cfg(test)]
mod tests;