pub mod http;
pub mod models;
pub mod tools;
#[doc(hidden)]
pub mod test_support;
use agentic_tools_utils::pagination::PaginationCache;
use agentic_tools_utils::pagination::paginate_slice;
use anyhow::Context;
use anyhow::Result;
use cynic::MutationBuilder;
use cynic::QueryBuilder;
use http::LinearClient;
use linear_queries::scalars::DateTimeOrDuration;
use linear_queries::*;
use regex::Regex;
use std::sync::Arc;
pub use tools::build_registry;
fn parse_identifier(input: &str) -> Option<(String, i32)> {
let upper = input.to_uppercase();
let re = Regex::new(r"([A-Z]{2,10})-(\d{1,10})").unwrap();
if let Some(caps) = re.captures(&upper) {
let key = caps.get(1)?.as_str().to_string();
let num_str = caps.get(2)?.as_str();
let number: i32 = num_str.parse().ok()?;
return Some((key, number));
}
None
}
const COMMENTS_PAGE_SIZE: usize = 10;
const ISSUE_COMMENTS_FETCH_PAGE_SIZE: i32 = 50;
const ISSUE_COMMENTS_MAX_PAGES: usize = 100;
#[derive(Clone)]
pub struct LinearTools {
api_key: Option<String>,
comments_cache: Arc<PaginationCache<models::CommentSummary, String>>,
}
impl LinearTools {
pub fn new() -> Self {
Self {
api_key: std::env::var("LINEAR_API_KEY").ok(),
comments_cache: Arc::new(PaginationCache::new()),
}
}
fn resolve_issue_id(&self, input: &str) -> IssueIdentifier {
if let Some((key, number)) = parse_identifier(input) {
return IssueIdentifier::Identifier(format!("{}-{}", key, number));
}
IssueIdentifier::Id(input.to_string())
}
async fn resolve_to_issue_id(&self, client: &LinearClient, input: &str) -> Result<String> {
match self.resolve_issue_id(input) {
IssueIdentifier::Id(id) => Ok(id),
IssueIdentifier::Identifier(ident) => {
let (team_key, number) = parse_identifier(&ident)
.ok_or_else(|| anyhow::anyhow!("not found: Issue {} not found", ident))?;
let filter = IssueFilter {
team: Some(TeamFilter {
key: Some(StringComparator {
eq: Some(team_key),
..Default::default()
}),
..Default::default()
}),
number: Some(NumberComparator {
eq: Some(number as f64),
..Default::default()
}),
..Default::default()
};
let op = IssuesQuery::build(IssuesArguments {
first: Some(1),
after: None,
filter: Some(filter),
});
let resp = client.run(op).await?;
let data = http::extract_data(resp)?;
let issue = data
.issues
.nodes
.into_iter()
.next()
.ok_or_else(|| anyhow::anyhow!("not found: Issue {} not found", ident))?;
Ok(issue.id.inner().to_string())
}
}
}
}
impl Default for LinearTools {
fn default() -> Self {
Self::new()
}
}
enum IssueIdentifier {
Id(String),
Identifier(String),
}
impl From<linear_queries::User> for models::UserRef {
fn from(u: linear_queries::User) -> Self {
let name = if u.display_name.is_empty() {
u.name
} else {
u.display_name
};
Self {
id: u.id.inner().to_string(),
name,
email: u.email,
}
}
}
impl From<linear_queries::Team> for models::TeamRef {
fn from(t: linear_queries::Team) -> Self {
Self {
id: t.id.inner().to_string(),
key: t.key,
name: t.name,
}
}
}
impl From<linear_queries::WorkflowState> for models::WorkflowStateRef {
fn from(s: linear_queries::WorkflowState) -> Self {
Self {
id: s.id.inner().to_string(),
name: s.name,
state_type: s.state_type,
}
}
}
impl From<linear_queries::Project> for models::ProjectRef {
fn from(p: linear_queries::Project) -> Self {
Self {
id: p.id.inner().to_string(),
name: p.name,
}
}
}
impl From<linear_queries::ParentIssue> for models::ParentIssueRef {
fn from(p: linear_queries::ParentIssue) -> Self {
Self {
id: p.id.inner().to_string(),
identifier: p.identifier,
}
}
}
impl From<linear_queries::Issue> for models::IssueSummary {
fn from(i: linear_queries::Issue) -> Self {
Self {
id: i.id.inner().to_string(),
identifier: i.identifier,
title: i.title,
url: i.url,
team: i.team.into(),
state: i.state.map(Into::into),
assignee: i.assignee.map(Into::into),
creator: i.creator.map(Into::into),
project: i.project.map(Into::into),
priority: i.priority as i32,
priority_label: i.priority_label,
label_ids: i.label_ids,
due_date: i.due_date.map(|d| d.0),
created_at: i.created_at.0,
updated_at: i.updated_at.0,
}
}
}
impl From<linear_queries::IssueSearchResult> for models::IssueSummary {
fn from(i: linear_queries::IssueSearchResult) -> Self {
Self {
id: i.id.inner().to_string(),
identifier: i.identifier,
title: i.title,
url: i.url,
team: i.team.into(),
state: Some(i.state.into()),
assignee: i.assignee.map(Into::into),
creator: i.creator.map(Into::into),
project: i.project.map(Into::into),
priority: i.priority as i32,
priority_label: i.priority_label,
label_ids: i.label_ids,
due_date: i.due_date.map(|d| d.0),
created_at: i.created_at.0,
updated_at: i.updated_at.0,
}
}
}
impl LinearTools {
#[allow(clippy::too_many_arguments)]
pub async fn search_issues(
&self,
query: Option<String>,
include_comments: Option<bool>,
priority: Option<i32>,
state_id: Option<String>,
assignee_id: Option<String>,
creator_id: Option<String>,
team_id: Option<String>,
project_id: Option<String>,
created_after: Option<String>,
created_before: Option<String>,
updated_after: Option<String>,
updated_before: Option<String>,
first: Option<i32>,
after: Option<String>,
) -> Result<models::SearchResult> {
let client = LinearClient::new(self.api_key.clone())
.context("internal: failed to create Linear client")?;
let mut filter = IssueFilter::default();
let mut has_filter = false;
if let Some(p) = priority {
filter.priority = Some(NullableNumberComparator {
eq: Some(p as f64),
..Default::default()
});
has_filter = true;
}
if let Some(id) = state_id {
filter.state = Some(WorkflowStateFilter {
id: Some(IdComparator {
eq: Some(cynic::Id::new(id)),
}),
..Default::default()
});
has_filter = true;
}
if let Some(id) = assignee_id {
filter.assignee = Some(NullableUserFilter {
id: Some(IdComparator {
eq: Some(cynic::Id::new(id)),
}),
});
has_filter = true;
}
if let Some(id) = creator_id {
filter.creator = Some(NullableUserFilter {
id: Some(IdComparator {
eq: Some(cynic::Id::new(id)),
}),
});
has_filter = true;
}
if let Some(id) = team_id {
filter.team = Some(TeamFilter {
id: Some(IdComparator {
eq: Some(cynic::Id::new(id)),
}),
..Default::default()
});
has_filter = true;
}
if let Some(id) = project_id {
filter.project = Some(NullableProjectFilter {
id: Some(IdComparator {
eq: Some(cynic::Id::new(id)),
}),
});
has_filter = true;
}
if created_after.is_some() || created_before.is_some() {
filter.created_at = Some(DateComparator {
gte: created_after.map(DateTimeOrDuration),
lte: created_before.map(DateTimeOrDuration),
..Default::default()
});
has_filter = true;
}
if updated_after.is_some() || updated_before.is_some() {
filter.updated_at = Some(DateComparator {
gte: updated_after.map(DateTimeOrDuration),
lte: updated_before.map(DateTimeOrDuration),
..Default::default()
});
has_filter = true;
}
let filter_opt = if has_filter { Some(filter) } else { None };
let page_size = Some(first.unwrap_or(50).clamp(1, 100));
let q_trimmed = query.as_ref().map(|s| s.trim()).unwrap_or("");
if !q_trimmed.is_empty() {
let op = SearchIssuesQuery::build(SearchIssuesArguments {
term: q_trimmed.to_string(),
include_comments: Some(include_comments.unwrap_or(true)),
first: page_size,
after,
filter: filter_opt,
});
let resp = client.run(op).await?;
let data = http::extract_data(resp)?;
let issues = data
.search_issues
.nodes
.into_iter()
.map(Into::into)
.collect();
Ok(models::SearchResult {
issues,
has_next_page: data.search_issues.page_info.has_next_page,
end_cursor: data.search_issues.page_info.end_cursor,
})
} else {
let op = IssuesQuery::build(IssuesArguments {
first: page_size,
after,
filter: filter_opt,
});
let resp = client.run(op).await?;
let data = http::extract_data(resp)?;
let issues = data.issues.nodes.into_iter().map(Into::into).collect();
Ok(models::SearchResult {
issues,
has_next_page: data.issues.page_info.has_next_page,
end_cursor: data.issues.page_info.end_cursor,
})
}
}
pub async fn read_issue(&self, issue: String) -> Result<models::IssueDetails> {
let client = LinearClient::new(self.api_key.clone())
.context("internal: failed to create Linear client")?;
let resolved = self.resolve_issue_id(&issue);
let issue_data = match resolved {
IssueIdentifier::Id(id) => {
let op = IssueByIdQuery::build(IssueByIdArguments { id });
let resp = client.run(op).await?;
let data = http::extract_data(resp)?;
data.issue
.ok_or_else(|| anyhow::anyhow!("not found: Issue not found"))?
}
IssueIdentifier::Identifier(ident) => {
let (team_key, number) = parse_identifier(&ident)
.ok_or_else(|| anyhow::anyhow!("not found: Issue {} not found", ident))?;
let filter = IssueFilter {
team: Some(TeamFilter {
key: Some(StringComparator {
eq: Some(team_key),
..Default::default()
}),
..Default::default()
}),
number: Some(NumberComparator {
eq: Some(number as f64),
..Default::default()
}),
..Default::default()
};
let op = IssuesQuery::build(IssuesArguments {
first: Some(1),
after: None,
filter: Some(filter),
});
let resp = client.run(op).await?;
let data = http::extract_data(resp)?;
data.issues
.nodes
.into_iter()
.next()
.ok_or_else(|| anyhow::anyhow!("not found: Issue {} not found", ident))?
}
};
let description = issue_data.description.clone();
let estimate = issue_data.estimate;
let started_at = issue_data.started_at.as_ref().map(|d| d.0.clone());
let completed_at = issue_data.completed_at.as_ref().map(|d| d.0.clone());
let canceled_at = issue_data.canceled_at.as_ref().map(|d| d.0.clone());
let parent = issue_data.parent.as_ref().map(|p| models::ParentIssueRef {
id: p.id.inner().to_string(),
identifier: p.identifier.clone(),
});
let summary: models::IssueSummary = issue_data.into();
Ok(models::IssueDetails {
issue: summary,
description,
estimate,
parent,
started_at,
completed_at,
canceled_at,
})
}
#[allow(clippy::too_many_arguments)]
pub async fn create_issue(
&self,
team_id: String,
title: String,
description: Option<String>,
priority: Option<i32>,
assignee_id: Option<String>,
project_id: Option<String>,
state_id: Option<String>,
parent_id: Option<String>,
label_ids: Vec<String>,
) -> Result<models::CreateIssueResult> {
let client = LinearClient::new(self.api_key.clone())
.context("internal: failed to create Linear client")?;
let label_ids_opt = if label_ids.is_empty() {
None
} else {
Some(label_ids)
};
let input = IssueCreateInput {
team_id,
title: Some(title),
description,
priority,
assignee_id,
project_id,
state_id,
parent_id,
label_ids: label_ids_opt,
};
let op = IssueCreateMutation::build(IssueCreateArguments { input });
let resp = client.run(op).await?;
let data = http::extract_data(resp)?;
let payload = data.issue_create;
let issue: Option<models::IssueSummary> = payload.issue.map(Into::into);
Ok(models::CreateIssueResult {
success: payload.success,
issue,
})
}
#[allow(clippy::too_many_arguments)]
pub async fn update_issue(
&self,
issue: String,
title: Option<String>,
description: Option<String>,
priority: Option<i32>,
assignee_id: Option<String>,
state_id: Option<String>,
project_id: Option<String>,
parent_id: Option<String>,
label_ids: Option<Vec<String>>,
added_label_ids: Option<Vec<String>>,
removed_label_ids: Option<Vec<String>>,
due_date: Option<String>,
) -> Result<models::IssueResult> {
let client = LinearClient::new(self.api_key.clone())
.context("internal: failed to create Linear client")?;
let id = self.resolve_to_issue_id(&client, &issue).await?;
let input = IssueUpdateInput {
title,
description,
priority,
assignee_id,
state_id,
project_id,
parent_id,
label_ids,
added_label_ids,
removed_label_ids,
due_date: due_date.map(linear_queries::scalars::TimelessDate),
};
let op = IssueUpdateMutation::build(IssueUpdateArguments { id, input });
let resp = client.run(op).await?;
let data = http::extract_data(resp)?;
let payload = data.issue_update;
if !payload.success {
anyhow::bail!("Update failed: Linear returned success=false");
}
let issue = payload
.issue
.ok_or_else(|| anyhow::anyhow!("No issue returned from update"))?;
Ok(models::IssueResult {
issue: issue.into(),
})
}
pub async fn add_comment(
&self,
issue: String,
body: String,
parent_id: Option<String>,
) -> Result<models::CommentResult> {
let client = LinearClient::new(self.api_key.clone())
.context("internal: failed to create Linear client")?;
let issue_id = self.resolve_to_issue_id(&client, &issue).await?;
let input = CommentCreateInput {
issue_id,
body: Some(body),
parent_id,
};
let op = CommentCreateMutation::build(CommentCreateArguments { input });
let resp = client.run(op).await?;
let data = http::extract_data(resp)?;
let payload = data.comment_create;
let (comment_id, body, created_at) = match payload.comment {
Some(c) => (
Some(c.id.inner().to_string()),
Some(c.body),
Some(c.created_at.0),
),
None => (None, None, None),
};
Ok(models::CommentResult {
success: payload.success,
comment_id,
body,
created_at,
})
}
pub async fn archive_issue(&self, issue: String) -> Result<models::ArchiveIssueResult> {
let client = LinearClient::new(self.api_key.clone())
.context("internal: failed to create Linear client")?;
let id = self.resolve_to_issue_id(&client, &issue).await?;
let op = IssueArchiveMutation::build(IssueArchiveArguments { id });
let resp = client.run(op).await?;
let data = http::extract_data(resp)?;
Ok(models::ArchiveIssueResult {
success: data.issue_archive.success,
})
}
pub async fn get_metadata(
&self,
kind: models::MetadataKind,
search: Option<String>,
team_id: Option<String>,
first: Option<i32>,
after: Option<String>,
) -> Result<models::GetMetadataResult> {
let client = LinearClient::new(self.api_key.clone())
.context("internal: failed to create Linear client")?;
let first = first.or(Some(50));
match kind {
models::MetadataKind::Users => {
let filter = search.map(|s| linear_queries::UserFilter {
display_name: Some(StringComparator {
contains_ignore_case: Some(s),
..Default::default()
}),
});
let op = linear_queries::UsersQuery::build(linear_queries::UsersArguments {
first,
after,
filter,
});
let resp = client.run(op).await?;
let data = http::extract_data(resp)?;
let items = data
.users
.nodes
.into_iter()
.map(|u| {
let name = if u.display_name.is_empty() {
u.name
} else {
u.display_name
};
models::MetadataItem {
id: u.id.inner().to_string(),
name,
email: Some(u.email),
key: None,
state_type: None,
team_id: None,
}
})
.collect();
Ok(models::GetMetadataResult {
kind: models::MetadataKind::Users,
items,
has_next_page: data.users.page_info.has_next_page,
end_cursor: data.users.page_info.end_cursor,
})
}
models::MetadataKind::Teams => {
let filter = search.map(|s| linear_queries::TeamFilter {
key: Some(StringComparator {
contains_ignore_case: Some(s),
..Default::default()
}),
..Default::default()
});
let op = linear_queries::TeamsQuery::build(linear_queries::TeamsArguments {
first,
after,
filter,
});
let resp = client.run(op).await?;
let data = http::extract_data(resp)?;
let items = data
.teams
.nodes
.into_iter()
.map(|t| models::MetadataItem {
id: t.id.inner().to_string(),
name: t.name,
key: Some(t.key),
email: None,
state_type: None,
team_id: None,
})
.collect();
Ok(models::GetMetadataResult {
kind: models::MetadataKind::Teams,
items,
has_next_page: data.teams.page_info.has_next_page,
end_cursor: data.teams.page_info.end_cursor,
})
}
models::MetadataKind::Projects => {
let filter = search.map(|s| linear_queries::ProjectFilter {
name: Some(StringComparator {
contains_ignore_case: Some(s),
..Default::default()
}),
});
let op = linear_queries::ProjectsQuery::build(linear_queries::ProjectsArguments {
first,
after,
filter,
});
let resp = client.run(op).await?;
let data = http::extract_data(resp)?;
let items = data
.projects
.nodes
.into_iter()
.map(|p| models::MetadataItem {
id: p.id.inner().to_string(),
name: p.name,
key: None,
email: None,
state_type: None,
team_id: None,
})
.collect();
Ok(models::GetMetadataResult {
kind: models::MetadataKind::Projects,
items,
has_next_page: data.projects.page_info.has_next_page,
end_cursor: data.projects.page_info.end_cursor,
})
}
models::MetadataKind::WorkflowStates => {
let mut filter = linear_queries::WorkflowStateFilter::default();
let mut has_filter = false;
if let Some(s) = search {
filter.name = Some(StringComparator {
contains_ignore_case: Some(s),
..Default::default()
});
has_filter = true;
}
if let Some(tid) = team_id {
filter.team = Some(linear_queries::TeamFilter {
id: Some(linear_queries::IdComparator {
eq: Some(cynic::Id::new(tid)),
}),
..Default::default()
});
has_filter = true;
}
let filter_opt = if has_filter { Some(filter) } else { None };
let op = linear_queries::WorkflowStatesQuery::build(
linear_queries::WorkflowStatesArguments {
first,
after,
filter: filter_opt,
},
);
let resp = client.run(op).await?;
let data = http::extract_data(resp)?;
let items = data
.workflow_states
.nodes
.into_iter()
.map(|s| models::MetadataItem {
id: s.id.inner().to_string(),
name: s.name,
state_type: Some(s.state_type),
key: None,
email: None,
team_id: None,
})
.collect();
Ok(models::GetMetadataResult {
kind: models::MetadataKind::WorkflowStates,
items,
has_next_page: data.workflow_states.page_info.has_next_page,
end_cursor: data.workflow_states.page_info.end_cursor,
})
}
models::MetadataKind::Labels => {
let mut filter = linear_queries::IssueLabelFilter::default();
let mut has_filter = false;
if let Some(s) = search {
filter.name = Some(StringComparator {
contains_ignore_case: Some(s),
..Default::default()
});
has_filter = true;
}
if let Some(tid) = team_id {
filter.team = Some(linear_queries::NullableTeamFilter {
id: Some(linear_queries::IdComparator {
eq: Some(cynic::Id::new(tid)),
}),
..Default::default()
});
has_filter = true;
}
let filter_opt = if has_filter { Some(filter) } else { None };
let op =
linear_queries::IssueLabelsQuery::build(linear_queries::IssueLabelsArguments {
first,
after,
filter: filter_opt,
});
let resp = client.run(op).await?;
let data = http::extract_data(resp)?;
let items = data
.issue_labels
.nodes
.into_iter()
.map(|l| models::MetadataItem {
id: l.id.inner().to_string(),
name: l.name,
team_id: l.team.map(|t| t.id.inner().to_string()),
key: None,
email: None,
state_type: None,
})
.collect();
Ok(models::GetMetadataResult {
kind: models::MetadataKind::Labels,
items,
has_next_page: data.issue_labels.page_info.has_next_page,
end_cursor: data.issue_labels.page_info.end_cursor,
})
}
}
}
pub async fn set_relation(
&self,
issue: String,
related_issue: String,
relation_type: Option<String>,
) -> Result<models::SetRelationResult> {
let client = LinearClient::new(self.api_key.clone())
.context("internal: failed to create Linear client")?;
let issue_id = self.resolve_to_issue_id(&client, &issue).await?;
let related_issue_id = self.resolve_to_issue_id(&client, &related_issue).await?;
match relation_type {
Some(rel_type) => {
let relation_type = match rel_type.to_lowercase().as_str() {
"blocks" => IssueRelationType::Blocks,
"duplicate" => IssueRelationType::Duplicate,
"related" => IssueRelationType::Related,
other => anyhow::bail!(
"Invalid relation type: {}. Must be one of: blocks, duplicate, related",
other
),
};
let input = IssueRelationCreateInput {
issue_id,
related_issue_id,
relation_type,
};
let op = IssueRelationCreateMutation::build(IssueRelationCreateArguments { input });
let resp = client.run(op).await?;
let data = http::extract_data(resp)?;
Ok(models::SetRelationResult {
success: data.issue_relation_create.success,
action: "created".to_string(),
})
}
None => {
let op = IssueRelationsQuery::build(IssueRelationsArguments { id: issue_id });
let resp = client.run(op).await?;
let data = http::extract_data(resp)?;
let issue_with_relations = data
.issue
.ok_or_else(|| anyhow::anyhow!("not found: Issue not found"))?;
let relation_id = issue_with_relations
.relations
.nodes
.iter()
.find(|r| r.related_issue.id.inner() == related_issue_id)
.map(|r| r.id.inner().to_string())
.or_else(|| {
issue_with_relations
.inverse_relations
.nodes
.iter()
.find(|r| r.related_issue.id.inner() == related_issue_id)
.map(|r| r.id.inner().to_string())
});
match relation_id {
Some(id) => {
let op =
IssueRelationDeleteMutation::build(IssueRelationDeleteArguments { id });
let resp = client.run(op).await?;
let data = http::extract_data(resp)?;
Ok(models::SetRelationResult {
success: data.issue_relation_delete.success,
action: "removed".to_string(),
})
}
None => {
Ok(models::SetRelationResult {
success: true,
action: "no_change".to_string(),
})
}
}
}
}
}
pub async fn get_issue_comments(&self, issue: String) -> Result<models::CommentsResult> {
let client = LinearClient::new(self.api_key.clone())
.context("internal: failed to create Linear client")?;
let issue_id = self.resolve_to_issue_id(&client, &issue).await?;
let cache_key = format!("{}|{}", issue_id, COMMENTS_PAGE_SIZE);
self.comments_cache.sweep_expired();
let query_lock = self.comments_cache.get_or_create(&cache_key);
let needs_fetch = {
let state = query_lock.lock_state();
state.is_empty() || state.is_expired()
};
let issue_identifier: String;
if needs_fetch {
let (identifier, all_comments) = self.fetch_all_comments(&client, &issue_id).await?;
issue_identifier = identifier.clone();
let mut state = query_lock.lock_state();
if state.is_empty() || state.is_expired() {
state.reset(all_comments, identifier, COMMENTS_PAGE_SIZE);
}
} else {
let state = query_lock.lock_state();
issue_identifier = state.meta.clone();
}
let (page_comments, total, shown, has_more) = {
let mut state = query_lock.lock_state();
let (page, has_more) =
paginate_slice(&state.results, state.next_offset, state.page_size);
let total = state.results.len();
state.next_offset += page.len();
let shown = state.next_offset;
(page, total, shown, has_more)
};
if !has_more {
self.comments_cache.remove_if_same(&cache_key, &query_lock);
}
Ok(models::CommentsResult {
issue_identifier,
comments: page_comments,
shown_comments: shown,
total_comments: total,
has_more,
})
}
async fn fetch_all_comments(
&self,
client: &LinearClient,
issue_id: &str,
) -> Result<(String, Vec<models::CommentSummary>)> {
let mut cursor: Option<String> = None;
let mut all_comments = Vec::new();
let mut identifier: Option<String> = None;
for page in 0..ISSUE_COMMENTS_MAX_PAGES {
let args = IssueCommentsArguments {
id: issue_id.to_string(),
first: Some(ISSUE_COMMENTS_FETCH_PAGE_SIZE),
after: cursor.clone(),
};
let op = IssueCommentsQuery::build(args);
let resp = client.run(op).await?;
let data = http::extract_data(resp)?;
let issue = data
.issue
.ok_or_else(|| anyhow::anyhow!("Issue not found: {}", issue_id))?;
if identifier.is_none() {
identifier = Some(issue.identifier.clone());
}
all_comments.extend(
issue
.comments
.nodes
.into_iter()
.map(|c| models::CommentSummary {
id: c.id.inner().to_string(),
body: c.body,
url: c.url,
created_at: c.created_at.0,
updated_at: c.updated_at.0,
parent_id: c.parent_id,
author_name: c.user.as_ref().map(|u| u.name.clone()),
author_email: c.user.as_ref().map(|u| u.email.clone()),
}),
);
if !issue.comments.page_info.has_next_page {
all_comments.sort_by(|a, b| a.created_at.cmp(&b.created_at));
return Ok((identifier.unwrap_or_default(), all_comments));
}
cursor = issue.comments.page_info.end_cursor.clone();
if cursor.is_none() {
return Err(anyhow::anyhow!(
"Issue comments pagination for {} reported has_next_page=true without end_cursor",
issue_id
));
}
if page + 1 == ISSUE_COMMENTS_MAX_PAGES {
return Err(anyhow::anyhow!(
"Issue comments pagination for {} exceeded {} pages",
issue_id,
ISSUE_COMMENTS_MAX_PAGES
));
}
}
unreachable!("issue comments pagination loop must return or error")
}
}
#[cfg(test)]
mod tests {
use super::parse_identifier;
#[test]
fn parse_plain_uppercase() {
assert_eq!(parse_identifier("ENG-245"), Some(("ENG".into(), 245)));
}
#[test]
fn parse_lowercase_normalizes() {
assert_eq!(parse_identifier("eng-245"), Some(("ENG".into(), 245)));
}
#[test]
fn parse_from_url() {
assert_eq!(
parse_identifier("https://linear.app/foo/issue/eng-245/slug"),
Some(("ENG".into(), 245))
);
}
#[test]
fn parse_invalid_returns_none() {
assert_eq!(parse_identifier("invalid"), None);
assert_eq!(parse_identifier("ENG-"), None);
assert_eq!(parse_identifier("ENG"), None);
assert_eq!(parse_identifier("123-456"), None);
}
}