use std::collections::HashMap;
use std::sync::OnceLock;
use serde::Deserialize;
use super::types::{
CheckRun, CheckState, Inbox, Issue, Label, MergeStateStatus, Mergeable, PullRequest, Review,
ReviewDecision, ReviewState, Role,
};
const PR_FRAGMENT: &str = r"
fragment PullRequestFields on PullRequest {
number
title
url
isDraft
mergeable
mergeStateStatus
reviewDecision
repository { nameWithOwner }
author { login }
updatedAt
baseRefName
headRefName
commits(last: 1) {
totalCount
nodes {
commit {
statusCheckRollup {
state
contexts(first: 20) {
nodes {
... on CheckRun {
name
status
conclusion
checkSuite { workflowRun { workflow { name } } }
}
... on StatusContext {
context
state
}
}
}
}
}
}
}
comments { totalCount }
reviewRequests(first: 10) {
nodes {
requestedReviewer {
... on User { login }
... on Team { name }
}
}
}
reviewThreads(first: 30) {
nodes {
isResolved
isOutdated
}
}
latestReviews(first: 10) {
nodes {
author { login }
state
}
}
}
";
const ISSUE_FRAGMENT: &str = r"
fragment IssueFields on Issue {
number
title
url
repository { nameWithOwner }
author { login }
updatedAt
comments { totalCount }
labels(first: 20) {
nodes {
name
color
}
}
}
";
pub(super) fn inbox_query() -> &'static str {
static Q: OnceLock<String> = OnceLock::new();
Q.get_or_init(|| {
let mut s = String::from(
r#"
query InboxQuery {
authored: viewer {
login
pullRequests(first: 50, states: OPEN, orderBy: {field: UPDATED_AT, direction: DESC}) {
nodes {
...PullRequestFields
}
}
}
reviewRequested: search(query: "is:open is:pr review-requested:@me", type: ISSUE, first: 50) {
nodes {
... on PullRequest {
...PullRequestFields
}
}
}
assignedPrs: search(query: "is:open is:pr assignee:@me", type: ISSUE, first: 50) {
nodes {
... on PullRequest {
...PullRequestFields
}
}
}
assignedIssues: search(query: "is:open is:issue assignee:@me", type: ISSUE, first: 50) {
nodes {
... on Issue {
...IssueFields
}
}
}
}
"#,
);
s.push_str(PR_FRAGMENT);
s.push_str(ISSUE_FRAGMENT);
s
})
.as_str()
}
pub(super) fn build_show_all_query(repos: &[String]) -> String {
let repo_qualifiers: String =
repos.iter().map(|r| format!("repo:{r}")).collect::<Vec<_>>().join(" ");
let header = format!(
r#"
query ShowAllQuery {{
allPrs: search(query: "{repo_qualifiers} is:open is:pr", type: ISSUE, first: 50) {{
nodes {{
... on PullRequest {{
...PullRequestFields
}}
}}
}}
allIssues: search(query: "{repo_qualifiers} is:open is:issue", type: ISSUE, first: 50) {{
nodes {{
... on Issue {{
...IssueFields
}}
}}
}}
viewer {{ login }}
}}
"#,
);
let mut s = String::with_capacity(header.len() + PR_FRAGMENT.len() + ISSUE_FRAGMENT.len());
s.push_str(&header);
s.push_str(PR_FRAGMENT);
s.push_str(ISSUE_FRAGMENT);
s
}
#[derive(Debug, Deserialize)]
pub(super) struct GqlEnvelope<T> {
pub data: Option<T>,
pub errors: Option<Vec<GraphQlError>>,
}
#[derive(Debug, Deserialize)]
pub(super) struct GraphQlError {
pub message: String,
}
#[derive(Debug, Deserialize)]
#[serde(rename_all = "camelCase")]
pub(super) struct ResponseData {
pub authored: AuthoredViewer,
pub review_requested: SearchResult,
pub assigned_prs: SearchResult,
pub assigned_issues: SearchResult,
}
#[derive(Debug, Deserialize)]
#[serde(rename_all = "camelCase")]
pub(super) struct ResponseDataAll {
pub all_prs: SearchResult,
pub all_issues: SearchResult,
pub viewer: ViewerLogin,
}
#[derive(Debug, Deserialize)]
pub(super) struct ViewerLogin {
pub login: String,
}
#[derive(Debug, Deserialize)]
#[serde(rename_all = "camelCase")]
pub(super) struct AuthoredViewer {
pub login: String,
pub pull_requests: NodeList<RawPr>,
}
#[derive(Debug, Deserialize)]
pub(super) struct SearchResult {
pub nodes: Vec<Option<SearchNode>>,
}
#[derive(Debug, Deserialize)]
#[serde(untagged)]
pub(super) enum SearchNode {
Pr(RawPr),
Issue(RawIssue),
}
#[derive(Debug, Deserialize)]
pub(super) struct NodeList<T> {
pub nodes: Vec<T>,
}
#[derive(Debug, Deserialize)]
#[serde(rename_all = "camelCase")]
pub(super) struct RawPr {
pub number: u32,
pub title: String,
pub url: String,
pub is_draft: bool,
pub mergeable: Mergeable,
pub merge_state_status: MergeStateStatus,
pub review_decision: Option<ReviewDecision>,
pub repository: RawRepo,
pub author: Option<RawActor>,
pub updated_at: chrono::DateTime<chrono::Utc>,
pub base_ref_name: String,
pub head_ref_name: String,
pub commits: RawCommits,
pub comments: RawTotalCount,
pub review_requests: NodeList<RawReviewRequest>,
pub review_threads: NodeList<RawReviewThread>,
pub latest_reviews: NodeList<RawReview>,
}
#[derive(Debug, Deserialize)]
#[serde(rename_all = "camelCase")]
pub(super) struct RawCommits {
pub total_count: u32,
pub nodes: Vec<RawCommitNode>,
}
#[derive(Debug, Deserialize)]
pub(super) struct RawCommitNode {
pub commit: RawCommit,
}
#[derive(Debug, Deserialize)]
#[serde(rename_all = "camelCase")]
pub(super) struct RawCommit {
pub status_check_rollup: Option<RawStatusRollup>,
}
#[derive(Debug, Deserialize)]
pub(super) struct RawStatusRollup {
pub state: CheckState,
pub contexts: NodeList<RawCheckContext>,
}
#[derive(Debug, Deserialize)]
#[serde(untagged)]
pub(super) enum RawCheckContext {
CheckRun(RawCheckRun),
#[allow(dead_code)]
StatusContext(RawStatusContext),
}
#[derive(Debug, Deserialize)]
#[serde(rename_all = "camelCase")]
pub(super) struct RawCheckRun {
pub name: String,
pub status: String,
pub conclusion: Option<String>,
pub check_suite: Option<RawCheckSuite>,
}
#[derive(Debug, Deserialize)]
#[serde(rename_all = "camelCase")]
pub(super) struct RawCheckSuite {
pub workflow_run: Option<RawWorkflowRun>,
}
#[derive(Debug, Deserialize)]
#[serde(rename_all = "camelCase")]
pub(super) struct RawWorkflowRun {
pub workflow: Option<RawWorkflow>,
}
#[derive(Debug, Deserialize)]
pub(super) struct RawWorkflow {
pub name: String,
}
#[derive(Debug, Deserialize)]
pub(super) struct RawStatusContext {
pub context: String,
pub state: String,
}
#[derive(Debug, Deserialize)]
pub(super) struct RawTotalCount {
#[serde(rename = "totalCount")]
pub total_count: u32,
}
#[derive(Debug, Deserialize)]
#[serde(rename_all = "camelCase")]
pub(super) struct RawReviewRequest {
pub requested_reviewer: Option<RawReviewer>,
}
#[derive(Debug, Deserialize)]
#[serde(untagged)]
pub(super) enum RawReviewer {
User { login: String },
Team { name: String },
}
#[derive(Debug, Deserialize)]
#[serde(rename_all = "camelCase")]
pub(super) struct RawReviewThread {
pub is_resolved: bool,
pub is_outdated: bool,
}
#[derive(Debug, Deserialize)]
pub(super) struct RawReview {
pub author: Option<RawActor>,
pub state: ReviewState,
}
#[derive(Debug, Deserialize)]
pub(super) struct RawRepo {
#[serde(rename = "nameWithOwner")]
pub name_with_owner: String,
}
#[derive(Debug, Deserialize)]
pub(super) struct RawActor {
pub login: String,
}
#[derive(Debug, Deserialize)]
#[serde(rename_all = "camelCase")]
pub(super) struct RawIssue {
pub number: u32,
pub title: String,
pub url: String,
pub repository: RawRepo,
pub author: Option<RawActor>,
pub updated_at: chrono::DateTime<chrono::Utc>,
pub comments: RawTotalCount,
pub labels: NodeList<RawLabel>,
}
#[derive(Debug, Deserialize)]
pub(super) struct RawLabel {
pub name: String,
pub color: String,
}
pub(super) fn to_inbox(viewer_login: String, data: ResponseData) -> Inbox {
type PrKey = (String, u32);
let mut pr_map: HashMap<PrKey, (PullRequest, Vec<Role>)> = HashMap::new();
let mut upsert = |raw: RawPr, role: Role| {
let key = (raw.repository.name_with_owner.clone(), raw.number);
let entry = pr_map.entry(key);
match entry {
std::collections::hash_map::Entry::Occupied(mut occ) => {
let (_, roles) = occ.get_mut();
if !roles.contains(&role) {
roles.push(role);
}
}
std::collections::hash_map::Entry::Vacant(vac) => {
let pr = raw_pr_to_domain(raw);
vac.insert((pr, vec![role]));
}
}
};
for raw in data.authored.pull_requests.nodes {
upsert(raw, Role::Author);
}
for node in data.review_requested.nodes.into_iter().flatten() {
if let SearchNode::Pr(raw) = node {
upsert(raw, Role::Reviewer);
}
}
for node in data.assigned_prs.nodes.into_iter().flatten() {
if let SearchNode::Pr(raw) = node {
upsert(raw, Role::Assignee);
}
}
let mut prs: Vec<PullRequest> = pr_map
.into_values()
.map(|(mut pr, roles)| {
pr.roles = roles;
pr
})
.collect();
prs.sort_by_key(|p| std::cmp::Reverse(p.updated_at));
let mut issues: Vec<Issue> = data
.assigned_issues
.nodes
.into_iter()
.flatten()
.filter_map(|node| {
if let SearchNode::Issue(raw) = node { Some(raw_issue_to_domain(raw)) } else { None }
})
.collect();
issues.sort_by_key(|i| std::cmp::Reverse(i.updated_at));
Inbox { viewer_login, prs, issues }
}
pub(super) fn to_inbox_all(viewer_login: String, data: ResponseDataAll) -> Inbox {
let mut prs: Vec<PullRequest> = data
.all_prs
.nodes
.into_iter()
.flatten()
.filter_map(|node| if let SearchNode::Pr(raw) = node { Some(raw) } else { None })
.map(|raw| {
let mut roles: Vec<Role> = Vec::new();
if raw.author.as_ref().is_some_and(|a| a.login == viewer_login) {
roles.push(Role::Author);
}
let viewer_is_reviewer = raw.review_requests.nodes.iter().any(|rr| {
rr.requested_reviewer.as_ref().is_some_and(|rv| match rv {
RawReviewer::User { login } => login == &viewer_login,
RawReviewer::Team { .. } => false,
})
});
if viewer_is_reviewer {
roles.push(Role::Reviewer);
}
let mut pr = raw_pr_to_domain(raw);
pr.roles = roles;
pr
})
.collect();
prs.sort_by_key(|p| std::cmp::Reverse(p.updated_at));
let mut issues: Vec<Issue> = data
.all_issues
.nodes
.into_iter()
.flatten()
.filter_map(|node| {
if let SearchNode::Issue(raw) = node { Some(raw_issue_to_domain(raw)) } else { None }
})
.collect();
issues.sort_by_key(|i| std::cmp::Reverse(i.updated_at));
Inbox { viewer_login, prs, issues }
}
fn raw_pr_to_domain(raw: RawPr) -> PullRequest {
let rollup = raw.commits.nodes.into_iter().next().and_then(|n| n.commit.status_check_rollup);
let check_state = rollup.as_ref().map(|r| r.state);
let failing_checks = rollup
.map(|r| {
r.contexts
.nodes
.into_iter()
.filter_map(|ctx| match ctx {
RawCheckContext::CheckRun(cr) => {
let is_failing = cr.conclusion.as_deref().is_some_and(|c| {
matches!(
c,
"FAILURE"
| "ERROR"
| "TIMED_OUT"
| "ACTION_REQUIRED"
| "CANCELLED"
| "STARTUP_FAILURE"
)
});
if is_failing {
let workflow_name = cr
.check_suite
.as_ref()
.and_then(|cs| cs.workflow_run.as_ref())
.and_then(|wr| wr.workflow.as_ref())
.map(|w| w.name.clone());
Some(CheckRun {
name: cr.name,
workflow_name,
conclusion: cr.conclusion,
status: cr.status,
})
} else {
None
}
}
RawCheckContext::StatusContext(sc) => {
if matches!(sc.state.as_str(), "FAILURE" | "ERROR") {
Some(CheckRun {
name: sc.context,
workflow_name: None,
conclusion: Some(sc.state),
status: "COMPLETED".to_owned(),
})
} else {
None
}
}
})
.collect()
})
.unwrap_or_default();
#[allow(clippy::cast_possible_truncation)]
let unresolved_threads =
raw.review_threads.nodes.iter().filter(|t| !t.is_resolved && !t.is_outdated).count() as u32;
let requested_reviewers = raw
.review_requests
.nodes
.into_iter()
.filter_map(|rr| rr.requested_reviewer)
.map(|rv| match rv {
RawReviewer::User { login } => login,
RawReviewer::Team { name } => name,
})
.collect();
let reviews = raw
.latest_reviews
.nodes
.into_iter()
.filter_map(|r| r.author.map(|a| Review { author: a.login, state: r.state }))
.collect();
PullRequest {
number: raw.number,
title: raw.title,
url: raw.url,
repo: raw.repository.name_with_owner,
author: super::author_or_deleted(raw.author.map(|a| a.login)),
is_draft: raw.is_draft,
mergeable: raw.mergeable,
merge_state: raw.merge_state_status,
review_decision: raw.review_decision,
commits_count: raw.commits.total_count,
comments_count: raw.comments.total_count,
check_state,
failing_checks,
unresolved_threads,
requested_reviewers,
reviews,
updated_at: raw.updated_at,
roles: vec![], base_ref: Some(raw.base_ref_name),
head_ref: Some(raw.head_ref_name),
}
}
fn raw_issue_to_domain(raw: RawIssue) -> Issue {
Issue {
number: raw.number,
title: raw.title,
url: raw.url,
repo: raw.repository.name_with_owner,
author: super::author_or_deleted(raw.author.map(|a| a.login)),
comments_count: raw.comments.total_count,
updated_at: raw.updated_at,
labels: raw
.labels
.nodes
.into_iter()
.map(|l| Label { name: l.name, color: l.color })
.collect(),
}
}
#[cfg(test)]
#[allow(clippy::expect_used)]
mod tests {
use super::*;
fn make_base_pr_json(
number: u32,
check_state: &str,
conclusion: &str,
review_decision: &str,
is_draft: bool,
) -> serde_json::Value {
serde_json::json!({
"number": number,
"title": "Test PR",
"url": "https://github.com/owner/repo/pull/1",
"isDraft": is_draft,
"mergeable": "MERGEABLE",
"mergeStateStatus": "CLEAN",
"reviewDecision": review_decision,
"repository": { "nameWithOwner": "owner/repo" },
"author": { "login": "author-login" },
"updatedAt": "2024-01-01T00:00:00Z",
"baseRefName": "main",
"headRefName": "feat/test-branch",
"commits": {
"totalCount": 1,
"nodes": [{
"commit": {
"statusCheckRollup": {
"state": check_state,
"contexts": {
"nodes": [{
"name": "CI",
"status": "COMPLETED",
"conclusion": conclusion,
"checkSuite": null
}]
}
}
}
}]
},
"comments": { "totalCount": 0 },
"reviewRequests": { "nodes": [] },
"reviewThreads": { "nodes": [] },
"latestReviews": { "nodes": [] }
})
}
#[test]
fn failing_ci_and_changes_requested() {
let json = make_base_pr_json(1, "FAILURE", "FAILURE", "CHANGES_REQUESTED", false);
let raw: RawPr = serde_json::from_value(json).expect("deserialize RawPr");
let pr = raw_pr_to_domain(raw);
assert_eq!(pr.check_state, Some(CheckState::Failure));
assert_eq!(pr.review_decision, Some(ReviewDecision::ChangesRequested));
assert_eq!(pr.failing_checks.len(), 1);
assert_eq!(pr.failing_checks[0].name, "CI");
}
#[test]
fn clean_approved_pr() {
let json = make_base_pr_json(2, "SUCCESS", "success", "APPROVED", false);
let raw: RawPr = serde_json::from_value(json).expect("deserialize RawPr");
let pr = raw_pr_to_domain(raw);
assert_eq!(pr.check_state, Some(CheckState::Success));
assert_eq!(pr.review_decision, Some(ReviewDecision::Approved));
assert!(pr.failing_checks.is_empty(), "clean PR should have no failing checks");
}
#[test]
fn build_show_all_query_includes_repo_qualifiers() {
let repos = vec!["owner/alpha".to_owned(), "owner/beta".to_owned()];
let query = build_show_all_query(&repos);
assert!(query.contains("repo:owner/alpha"), "must contain repo:owner/alpha");
assert!(query.contains("repo:owner/beta"), "must contain repo:owner/beta");
assert!(
query.matches("repo:owner/alpha").count() >= 2,
"repo:owner/alpha must appear in both PR and issue searches"
);
assert!(
query.matches("repo:owner/beta").count() >= 2,
"repo:owner/beta must appear in both PR and issue searches"
);
}
#[test]
fn build_show_all_query_empty_repos() {
let query = build_show_all_query(&[]);
assert!(query.contains("allPrs"), "must contain allPrs alias");
assert!(query.contains("allIssues"), "must contain allIssues alias");
assert!(query.contains("PullRequestFields"), "must reference PullRequestFields fragment");
assert!(query.contains("IssueFields"), "must reference IssueFields fragment");
}
#[test]
fn dedup_unions_roles() {
let pr_json = make_base_pr_json(1, "SUCCESS", "success", "APPROVED", false);
let raw1: RawPr = serde_json::from_value(pr_json.clone()).expect("deserialize");
let raw2: RawPr = serde_json::from_value(pr_json).expect("deserialize");
let data = ResponseData {
authored: AuthoredViewer {
login: "viewer".to_owned(),
pull_requests: NodeList { nodes: vec![raw1] },
},
review_requested: SearchResult { nodes: vec![Some(SearchNode::Pr(raw2))] },
assigned_prs: SearchResult { nodes: vec![] },
assigned_issues: SearchResult { nodes: vec![] },
};
let inbox = to_inbox("viewer".to_owned(), data);
assert_eq!(inbox.prs.len(), 1, "duplicate PR must be merged");
let roles = &inbox.prs[0].roles;
assert!(roles.contains(&Role::Author), "Author role missing");
assert!(roles.contains(&Role::Reviewer), "Reviewer role missing");
}
}