use std::fmt::Write;
use super::cli::PrArgs;
use super::types::{Files, Label, Labels, Pr, ReviewRequested, Score, ScoredPr, TestsState};
use anyhow::{anyhow, Result};
use chrono::prelude::{DateTime as DT, Utc};
use graphql_client::{GraphQLQuery, QueryBody, Response};
use itertools::Itertools;
use regex::Regex;
mod blame;
pub mod followup;
pub mod username;
use futures::join;
use futures::stream::{FuturesUnordered, StreamExt};
#[derive(GraphQLQuery)]
#[graphql(
schema_path = "src/client/schema.graphql",
query_path = "src/client/pr.graphql",
response_derives = "Debug"
)]
pub struct RepoView;
#[allow(clippy::upper_case_acronyms)]
type URI = String;
type DateTime = String;
const AGENT: &str = concat!("ateam/", env!("CARGO_PKG_VERSION"));
pub async fn fetch_scored_prs(
github_api_token: &str,
username: &str,
options: &PrArgs,
) -> Result<Vec<ScoredPr>> {
let mut list_prs: Vec<Vec<ScoredPr>> = vec![];
let mut list_data: Vec<repo_view::ResponseData> = vec![];
let mut cursor = None;
let mut first = true;
loop {
eprint!(".");
let o_get_ranked_prs = if first {
None
} else {
list_data.pop().map(|data| {
ranked_prs(
github_api_token,
username,
options.required_approvals,
options,
data,
)
})
};
let o_get_next_response_data_and_cursor = if first || cursor.is_some() {
Some(query(github_api_token, username, options, cursor.clone()))
} else {
None
};
#[allow(clippy::unnecessary_unwrap)]
if o_get_ranked_prs.is_some() && o_get_next_response_data_and_cursor.is_some() {
let (prs, response_and_cursor) = join!(
o_get_ranked_prs.unwrap(),
o_get_next_response_data_and_cursor.unwrap()
);
list_prs.push(prs);
let (new_response_data, new_cursor) = response_and_cursor?;
cursor = new_cursor;
list_data.push(new_response_data);
} else if o_get_ranked_prs.is_some() {
list_prs.push(o_get_ranked_prs.unwrap().await);
} else if o_get_next_response_data_and_cursor.is_some() {
#[allow(clippy::unnecessary_unwrap)]
let (new_response_data, new_cursor) =
o_get_next_response_data_and_cursor.unwrap().await?;
list_data.push(new_response_data);
cursor = new_cursor;
} else {
break;
}
first = false;
}
eprintln!();
Ok(list_prs.into_iter().flatten().collect::<Vec<ScoredPr>>())
}
pub async fn call<V: serde::Serialize>(
github_api_token: &str,
q: &QueryBody<V>,
) -> Result<reqwest::Response> {
let client = reqwest::Client::builder().user_agent(AGENT).build()?;
let res = client
.post("https://api.github.com/graphql")
.json(&q)
.bearer_auth(github_api_token)
.send()
.await?;
Ok(res)
}
async fn query(
github_api_token: &str,
username: &str,
options: &PrArgs,
after: Option<String>,
) -> Result<(repo_view::ResponseData, Option<String>)> {
let query_argument = github_query(username, options);
if options.debug {
println!(">> GitHub query: {query_argument:?}");
}
let batch_size = limited_batch_size(options.batch_size);
let q = RepoView::build_query(repo_view::Variables {
query: query_argument,
first: batch_size,
after,
num_checks: match options.tests_regex {
Some(_) => 20,
None => 0,
},
});
let res = call(github_api_token, &q).await?;
let response_body: Response<repo_view::ResponseData> = res.json().await?;
let response_data = get_response_data(response_body)?;
let cursor = last_item_cursor(&response_data, batch_size);
Ok((response_data, cursor))
}
fn get_response_data(
response_body: Response<repo_view::ResponseData>,
) -> Result<repo_view::ResponseData> {
if let Some(errors) = response_body.errors {
let mut error_str = String::new();
for error in &errors {
write!(error_str, "{error:?}").unwrap();
}
Err(anyhow!("Errors executing the query {error_str}"))
} else {
match response_body.data {
Some(data) => Ok(data),
None => Err(anyhow!("Missing response data executing the query")),
}
}
}
fn limited_batch_size(batch_size: u8) -> i64 {
i64::from(if batch_size <= 100 { batch_size } else { 100 })
}
fn last_item_cursor(response_data: &repo_view::ResponseData, batch_size: i64) -> Option<String> {
match &response_data.search.edges {
Some(items) =>
{
#[allow(clippy::cast_possible_truncation, clippy::cast_sign_loss)]
if items.len() < usize::try_from(batch_size).unwrap_or(usize::MAX) {
None
} else {
match items.last() {
Some(Some(item)) => Some(item.cursor.clone()),
_ => None,
}
}
}
None => None,
}
}
fn github_query(username: &str, options: &PrArgs) -> String {
format!(
"is:pr is:open {}{}{}{}{}{}{}",
query_drafts(options.include_drafts),
query_mine(username, options.only_mine),
query_requested(username, options.requested),
query_labels(&options.label, &options.exclude_label),
query_repos(&options.repo),
query_org(options.org.as_ref()),
&options.query.join(" ")
)
}
fn query_drafts(include_drafts: bool) -> &'static str {
if include_drafts {
""
} else {
"draft:false "
}
}
fn query_mine(username: &str, only_mine: bool) -> String {
if only_mine {
format!("author:{username} ")
} else {
String::new()
}
}
fn query_requested(username: &str, requested: bool) -> String {
if requested {
format!("review-requested:{username} ")
} else {
String::new()
}
}
fn query_labels(labels: &[String], exclude_label: &[String]) -> String {
let mut result = String::new();
for label in labels {
write!(result, "label:\"{label}\" ").unwrap();
}
for label in exclude_label {
write!(result, "-label:\"{label}\" ").unwrap();
}
result
}
fn query_repos(repos: &[String]) -> String {
let mut result = String::new();
for repo in repos {
write!(result, "repo:{repo} ").unwrap();
}
result
}
fn query_org(org: Option<&String>) -> String {
if let Some(org) = org {
format!("org:{org} ")
} else {
String::new()
}
}
async fn ranked_prs(
github_api_token: &str,
username: &str,
required_approvals: u8,
options: &PrArgs,
response_data: repo_view::ResponseData,
) -> Vec<ScoredPr> {
prs(github_api_token, username, options, response_data)
.await
.into_iter()
.map(|pr| scored_pr(required_approvals, pr))
.collect::<Vec<ScoredPr>>()
}
pub fn sorted_ranked_prs(mut sprs: Vec<ScoredPr>) -> Vec<ScoredPr> {
#[allow(clippy::cast_precision_loss, clippy::cast_possible_truncation)]
sprs.sort_by_key(|scored_pr| (scored_pr.score.total() * 1000.0) as i64);
sprs.reverse();
sprs
}
fn regex(regex_text: Option<&String>) -> Option<Regex> {
regex_text.and_then(|text| Regex::new(text).ok())
}
fn scored_pr(required_approvals: u8, pr: Pr) -> ScoredPr {
let s = Score::from_pr(required_approvals, &pr);
ScoredPr { pr, score: s }
}
async fn prs(
github_api_token: &str,
username: &str,
options: &PrArgs,
response_data: repo_view::ResponseData,
) -> Vec<Pr> {
let re = regex(options.regex.as_ref());
let re_not = regex(options.regex_not.as_ref());
let prs: FuturesUnordered<_> = response_data
.search
.edges
.into_iter()
.flatten()
.flatten()
.map(|i| i.node)
.filter_map(|n| match n {
Some(repo_view::RepoViewSearchEdgesNode::PullRequest(pull_request)) => {
Some(pull_request)
}
_ => None,
})
.filter(|i| {
include_pr(
i,
re.as_ref(),
re_not.as_ref(),
username,
options.include_mine,
options.only_mine,
options.include_reviewed_by_me,
)
})
.map(|i| async move { pr_stats(github_api_token, username, options, i).await })
.collect();
prs.collect::<Vec<Option<Pr>>>()
.await
.into_iter()
.flatten()
.collect()
}
fn include_pr(
pr: &repo_view::RepoViewSearchEdgesNodeOnPullRequest,
regex: Option<&Regex>,
regex_not: Option<&Regex>,
username: &str,
include_mine: bool,
only_mine: bool,
include_reviewed_by_me: bool,
) -> bool {
!is_empty(pr)
&& !has_conflicts(pr)
&& regex_match(regex, true, pr)
&& !regex_match(regex_not, false, pr)
&& (include_mine || only_mine || author(pr) != username)
&& (include_reviewed_by_me
|| only_mine
|| review_states(pr.reviews.as_ref(), username, true).is_empty())
}
fn regex_match(
regex: Option<&Regex>,
or: bool,
pr: &repo_view::RepoViewSearchEdgesNodeOnPullRequest,
) -> bool {
match regex {
Some(re) => re.is_match(&pr.title),
None => or,
}
}
fn is_empty(pr: &repo_view::RepoViewSearchEdgesNodeOnPullRequest) -> bool {
pr.additions == 0 && pr.deletions == 0
}
fn has_conflicts(pr: &repo_view::RepoViewSearchEdgesNodeOnPullRequest) -> bool {
matches!(pr.mergeable, repo_view::MergeableState::CONFLICTING)
}
fn pr_files(pr: &repo_view::RepoViewSearchEdgesNodeOnPullRequest) -> Vec<String> {
match &pr.files {
Some(files) => files
.nodes
.iter()
.flatten()
.flatten()
.map(|f| f.path.clone())
.collect(),
None => vec![],
}
}
fn pr_labels(labels: Option<&repo_view::RepoViewSearchEdgesNodeOnPullRequestLabels>) -> Labels {
match labels {
Some(labels) => Labels(
labels
.nodes
.iter()
.flatten()
.flatten()
.map(|l| Label {
name: l.name.clone(),
color: l.color.clone(),
})
.collect(),
),
None => Labels(vec![]),
}
}
async fn pr_stats(
github_api_token: &str,
username: &str,
options: &PrArgs,
pr: repo_view::RepoViewSearchEdgesNodeOnPullRequest,
) -> Option<Pr> {
let (last_commit_pushed_date, tests_result) = last_commit(&pr, options.tests_regex.as_ref());
if !include_by_tests_state(&tests_result, options) {
return None;
}
let (files, blame) = if options.blame {
let files = pr_files(&pr);
let blame = blame::blame(
github_api_token,
&pr.repository.name,
&pr.repository.owner.login,
&files,
username,
)
.await;
(Files(files), blame)
} else {
(Files(vec![]), false)
};
let author = author(&pr);
let reviews = review_states(pr.reviews.as_ref(), &author, false);
let review_requested = review_requested(pr.review_requests.as_ref(), username);
Some(Pr {
title: pr.title.clone(),
url: pr.url.clone(),
last_commit_pushed_date,
last_commit_age_min: age(last_commit_pushed_date),
tests_result,
open_conversations: pr_open_conversations(&pr.review_threads),
num_approvals: pr_num_approvals(&reviews),
num_reviewers: pr_num_reviewers(&reviews),
additions: pr.additions,
deletions: pr.deletions,
based_on_main_branch: pr_based_on_main_branch(&pr.base_ref_name),
files,
blame,
labels: pr_labels(pr.labels.as_ref()),
requested: matches!(review_requested, ReviewRequested::RequestedNotAsCodeOwner),
codeowner: matches!(review_requested, ReviewRequested::RequestedAsCodeOwner),
})
}
fn author(pr: &repo_view::RepoViewSearchEdgesNodeOnPullRequest) -> String {
match &pr.author {
Some(repo_view::RepoViewSearchEdgesNodeOnPullRequestAuthor { login, on: _ }) => {
login.clone()
}
_ => String::new(),
}
}
fn include_by_tests_state(state: &TestsState, options: &PrArgs) -> bool {
match state {
TestsState::Success => !options.exclude_tests_success,
TestsState::Failure => options.include_tests_failure,
TestsState::Pending => options.include_tests_pending,
TestsState::None => !options.exclude_tests_none,
}
}
fn last_commit(
pr: &repo_view::RepoViewSearchEdgesNodeOnPullRequest,
tests_regex: Option<&String>,
) -> (Option<DT<Utc>>, TestsState) {
let tests_re = regex(tests_regex);
if let Some((pushed_date, state)) = pr
.commits
.nodes
.as_ref()
.and_then(|nodes| nodes[0].as_ref())
.map(|node| {
(
&node.commit.pushed_date,
node.commit
.status_check_rollup
.as_ref()
.map(|status| commit_status_state(status, tests_re.as_ref())),
)
})
{
(
parse_date(pushed_date.as_ref()),
state.unwrap_or(TestsState::None),
)
} else {
(None, TestsState::None)
}
}
fn commit_status_state(
status: &repo_view::RepoViewSearchEdgesNodeOnPullRequestCommitsNodesCommitStatusCheckRollup,
tests_re: Option<&Regex>,
) -> TestsState {
match tests_re {
Some(tests_re) => {
commit_tests_state_from_contexts(status.contexts.nodes.as_ref(), tests_re)
}
None => tests_state(&status.state),
}
}
fn commit_tests_state_from_contexts(
contexts_nodes: Option<&Vec<Option<repo_view::RepoViewSearchEdgesNodeOnPullRequestCommitsNodesCommitStatusCheckRollupContextsNodes>>>,
tests_re: &Regex,
) -> TestsState {
match contexts_nodes {
Some(nodes) => {
let states: Vec<TestsState> = nodes.iter().filter_map(|node|
match node {
Some(repo_view::RepoViewSearchEdgesNodeOnPullRequestCommitsNodesCommitStatusCheckRollupContextsNodes::StatusContext(status_context)) => {
if tests_re.is_match(&status_context.context) {
Some(tests_state(&status_context.state))
} else {
None
}
},
_ => None
}).collect();
match states {
v if v.iter().any(|state| matches!(state, TestsState::Failure)) => {
TestsState::Failure
}
v if v.iter().any(|state| matches!(state, TestsState::Pending)) => {
TestsState::Pending
}
v if v.iter().all(|state| matches!(state, TestsState::Success)) => {
TestsState::Success
}
_ => TestsState::None,
}
}
None => TestsState::None,
}
}
fn tests_state(state: &repo_view::StatusState) -> TestsState {
match state {
repo_view::StatusState::SUCCESS => TestsState::Success,
repo_view::StatusState::PENDING | repo_view::StatusState::EXPECTED => TestsState::Pending,
repo_view::StatusState::FAILURE | repo_view::StatusState::ERROR => TestsState::Failure,
repo_view::StatusState::Other(_) => TestsState::None,
}
}
fn pr_open_conversations(
review_threads: &repo_view::RepoViewSearchEdgesNodeOnPullRequestReviewThreads,
) -> i64 {
#[allow(clippy::cast_possible_wrap, clippy::map_unwrap_or)]
review_threads.nodes.as_ref().map_or(0, |nodes| {
nodes
.iter()
.filter(|review_thread| {
review_thread.as_ref().is_some_and(|review_thread| {
!review_thread.is_resolved && !review_thread.is_outdated
})
})
.count() as i64
})
}
fn review_states<'a>(
reviews: Option<&'a repo_view::RepoViewSearchEdgesNodeOnPullRequestReviews>,
author: &str,
reviewed_by_author: bool,
) -> Vec<&'a repo_view::PullRequestReviewState> {
if let Some(repo_view::RepoViewSearchEdgesNodeOnPullRequestReviews {
total_count: _,
nodes: Some(nodes),
}) = reviews
{
nodes
.iter()
.filter_map(|review| match review {
Some(repo_view::RepoViewSearchEdgesNodeOnPullRequestReviewsNodes {
author:
Some(repo_view::RepoViewSearchEdgesNodeOnPullRequestReviewsNodesAuthor {
login,
on: _,
}),
state,
}) => Some((login, state)),
_ => None,
})
.rev() .filter(|review| {
if reviewed_by_author {
review.0 == author } else {
review.0 != author }
})
.unique_by(|review| review.0) .map(|review| review.1) .collect()
} else {
vec![]
}
}
#[allow(clippy::cast_possible_wrap)]
fn pr_num_approvals(review_states: &[&repo_view::PullRequestReviewState]) -> i64 {
review_states
.iter()
.filter(|&&state| matches!(state, &repo_view::PullRequestReviewState::APPROVED))
.count() as i64
}
#[allow(clippy::cast_possible_wrap)]
fn pr_num_reviewers(review_states: &[&repo_view::PullRequestReviewState]) -> i64 {
review_states.len() as i64
}
fn parse_date(date: Option<&String>) -> Option<DT<Utc>> {
date.and_then(|s| s.parse::<DT<Utc>>().ok())
}
fn age(date_time: Option<DT<Utc>>) -> Option<i64> {
date_time.map(|date_time| (Utc::now() - date_time).num_minutes())
}
fn pr_based_on_main_branch(base_branch_name: &str) -> bool {
base_branch_name == "main" || base_branch_name == "master"
}
fn review_requested(
requests: Option<&repo_view::RepoViewSearchEdgesNodeOnPullRequestReviewRequests>,
username: &str,
) -> ReviewRequested {
match requests {
Some(requests) => {
requests.nodes.iter().flatten().flatten().find(|r|
match &r.requested_reviewer {
Some(repo_view::RepoViewSearchEdgesNodeOnPullRequestReviewRequestsNodesRequestedReviewer::User(reviewer)) =>
reviewer.login == username,
Some(repo_view::RepoViewSearchEdgesNodeOnPullRequestReviewRequestsNodesRequestedReviewer::Team(team)) =>
team.members.nodes.iter().flatten().flatten().any(|member| member.login == username),
Some(repo_view::RepoViewSearchEdgesNodeOnPullRequestReviewRequestsNodesRequestedReviewer::Mannequin) => false, None => panic!("Something is wrong with the GitHub token! Have you added the read:org scope?"),
})
.map_or(
ReviewRequested::NotRequested,
|r|
if r.as_code_owner {
ReviewRequested::RequestedAsCodeOwner
} else {
ReviewRequested::RequestedNotAsCodeOwner
}
)
}
None => ReviewRequested::NotRequested,
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_limited_batch_size() {
assert_eq!(limited_batch_size(1), 1);
assert_eq!(limited_batch_size(100), 100);
assert_eq!(limited_batch_size(101), 100);
}
#[test]
fn test_pr_based_on_main_branch() {
assert!(pr_based_on_main_branch("main"));
assert!(pr_based_on_main_branch("master"));
assert!(!pr_based_on_main_branch("develop"));
}
#[test]
fn test_sorted_ranked_prs_empty() {
let input: Vec<ScoredPr> = Vec::new();
let result = sorted_ranked_prs(input);
assert!(result.is_empty());
}
#[test]
fn test_sorted_ranked_prs_single() {
let pr = Pr {
title: "test".to_string(),
url: "https://example.com/1".to_string(),
last_commit_pushed_date: None,
last_commit_age_min: None,
tests_result: TestsState::Success,
open_conversations: 0,
num_approvals: 0,
num_reviewers: 0,
additions: 0,
deletions: 0,
based_on_main_branch: false,
files: Files(vec![]),
blame: false,
labels: Labels(vec![]),
requested: false,
codeowner: false,
};
let scored_pr = ScoredPr {
pr: pr.clone(),
score: Score::from_pr(1, &pr),
};
let input = vec![scored_pr];
let result = sorted_ranked_prs(input);
assert_eq!(result.len(), 1);
assert_eq!(result[0].pr.url, "https://example.com/1");
}
#[test]
fn test_sorted_ranked_prs_multiple_sorted() {
let pr_high = Pr {
title: "High score PR".to_string(),
url: "https://example.com/2".to_string(),
last_commit_pushed_date: None,
last_commit_age_min: None,
tests_result: TestsState::Success,
open_conversations: 0,
num_approvals: 5, num_reviewers: 0,
additions: 0,
deletions: 0,
based_on_main_branch: false,
files: Files(vec![]),
blame: false,
labels: Labels(vec![]),
requested: false,
codeowner: false,
};
let score_high = Score::from_pr(1, &pr_high);
let scored_pr_high = ScoredPr {
pr: pr_high,
score: score_high,
};
let pr_low = Pr {
title: "Low score PR".to_string(),
url: "https://example.com/1".to_string(),
last_commit_pushed_date: None,
last_commit_age_min: None,
tests_result: TestsState::Success,
open_conversations: 0,
num_approvals: 1, num_reviewers: 0,
additions: 0,
deletions: 0,
based_on_main_branch: false,
files: Files(vec![]),
blame: false,
labels: Labels(vec![]),
requested: false,
codeowner: false,
};
let score_low = Score::from_pr(1, &pr_low);
let scored_pr_low = ScoredPr {
pr: pr_low,
score: score_low,
};
let input = vec![scored_pr_low, scored_pr_high];
let result = sorted_ranked_prs(input);
assert_eq!(result.len(), 2);
assert_eq!(result[0].pr.url, "https://example.com/1");
assert_eq!(result[1].pr.url, "https://example.com/2");
}
#[test]
fn test_sorted_ranked_prs_same_score() {
let pr1 = Pr {
title: "First PR".to_string(),
url: "https://example.com/1".to_string(),
last_commit_pushed_date: None,
last_commit_age_min: None,
tests_result: TestsState::Success,
open_conversations: 0,
num_approvals: 2,
num_reviewers: 0,
additions: 100,
deletions: 50,
based_on_main_branch: false,
files: Files(vec![]),
blame: false,
labels: Labels(vec![]),
requested: false,
codeowner: false,
};
let score1 = Score::from_pr(1, &pr1);
let scored_pr1 = ScoredPr {
pr: pr1,
score: score1,
};
let pr2 = Pr {
title: "Second PR".to_string(),
url: "https://example.com/2".to_string(),
last_commit_pushed_date: None,
last_commit_age_min: None,
tests_result: TestsState::Success,
open_conversations: 0,
num_approvals: 2,
num_reviewers: 0,
additions: 100,
deletions: 50,
based_on_main_branch: false,
files: Files(vec![]),
blame: false,
labels: Labels(vec![]),
requested: false,
codeowner: false,
};
let score2 = Score::from_pr(1, &pr2);
let scored_pr2 = ScoredPr {
pr: pr2,
score: score2,
};
let input = vec![scored_pr1, scored_pr2];
let result = sorted_ranked_prs(input);
assert_eq!(result.len(), 2);
assert_eq!(result[0].pr.url, "https://example.com/2");
assert_eq!(result[1].pr.url, "https://example.com/1");
}
#[test]
fn test_sorted_ranked_prs_various_scores() {
let mut prs = Vec::new();
let pr1 = Pr {
title: "Many additions".to_string(),
url: "https://example.com/1".to_string(),
last_commit_pushed_date: None,
last_commit_age_min: Some(60), tests_result: TestsState::Success,
open_conversations: 0,
num_approvals: 0,
num_reviewers: 0,
additions: 1000, deletions: 0,
based_on_main_branch: false,
files: Files(vec![]),
blame: false,
labels: Labels(vec![]),
requested: false,
codeowner: false,
};
let score1 = Score::from_pr(1, &pr1);
prs.push(ScoredPr {
pr: pr1,
score: score1,
});
let pr2 = Pr {
title: "Old PR".to_string(),
url: "https://example.com/2".to_string(),
last_commit_pushed_date: None,
last_commit_age_min: Some(1440), tests_result: TestsState::Success,
open_conversations: 0,
num_approvals: 0,
num_reviewers: 0,
additions: 100, deletions: 0,
based_on_main_branch: false,
files: Files(vec![]),
blame: false,
labels: Labels(vec![]),
requested: false,
codeowner: false,
};
let score2 = Score::from_pr(1, &pr2);
prs.push(ScoredPr {
pr: pr2,
score: score2,
});
let pr3 = Pr {
title: "Main branch PR".to_string(),
url: "https://example.com/3".to_string(),
last_commit_pushed_date: None,
last_commit_age_min: Some(60), tests_result: TestsState::Success,
open_conversations: 0,
num_approvals: 0,
num_reviewers: 0,
additions: 500, deletions: 0,
based_on_main_branch: true, files: Files(vec![]),
blame: false,
labels: Labels(vec![]),
requested: false,
codeowner: false,
};
let score3 = Score::from_pr(1, &pr3);
prs.push(ScoredPr {
pr: pr3,
score: score3,
});
let result = sorted_ranked_prs(prs);
assert_eq!(result.len(), 3);
let first_url = &result[0].pr.url;
let second_url = &result[1].pr.url;
let third_url = &result[2].pr.url;
assert_ne!(first_url, second_url);
assert_ne!(second_url, third_url);
assert_ne!(first_url, third_url);
}
#[test]
fn test_pr_based_on_main_branch_feature_branch() {
assert!(!pr_based_on_main_branch("feature/new-feature"));
}
#[test]
fn test_pr_based_on_main_branch_release_branch() {
assert!(!pr_based_on_main_branch("release/v1.0"));
}
#[test]
fn test_pr_based_on_main_branch_hotfix_branch() {
assert!(!pr_based_on_main_branch("hotfix/fix-bug"));
}
#[test]
fn test_limited_batch_size_zero() {
assert_eq!(limited_batch_size(0), 0);
}
#[test]
fn test_limited_batch_size_boundary() {
assert_eq!(limited_batch_size(99), 99);
assert_eq!(limited_batch_size(100), 100);
}
}