1pub mod http;
2pub mod models;
3pub mod tools;
4
5#[doc(hidden)]
7pub mod test_support;
8
9use agentic_tools_utils::pagination::PaginationCache;
10use agentic_tools_utils::pagination::paginate_slice;
11use anyhow::Context;
12use anyhow::Result;
13use cynic::MutationBuilder;
14use cynic::QueryBuilder;
15use http::LinearClient;
16use linear_queries::scalars::DateTimeOrDuration;
17use linear_queries::*;
18use regex::Regex;
19use std::sync::Arc;
20
21pub use tools::build_registry;
23
24fn parse_identifier(input: &str) -> Option<(String, i32)> {
27 let upper = input.to_uppercase();
28 let re = Regex::new(r"([A-Z]{2,10})-(\d{1,10})").unwrap();
29 if let Some(caps) = re.captures(&upper) {
30 let key = caps.get(1)?.as_str().to_string();
31 let num_str = caps.get(2)?.as_str();
32 let number: i32 = num_str.parse().ok()?;
33 return Some((key, number));
34 }
35 None
36}
37
38const COMMENTS_PAGE_SIZE: usize = 10;
39const ISSUE_COMMENTS_FETCH_PAGE_SIZE: i32 = 50;
40const ISSUE_COMMENTS_MAX_PAGES: usize = 100;
41
42#[derive(Clone)]
43pub struct LinearTools {
44 api_key: Option<String>,
45 comments_cache: Arc<PaginationCache<models::CommentSummary, String>>,
46}
47
48impl LinearTools {
49 pub fn new() -> Self {
50 Self {
51 api_key: std::env::var("LINEAR_API_KEY").ok(),
52 comments_cache: Arc::new(PaginationCache::new()),
53 }
54 }
55
56 fn resolve_issue_id(&self, input: &str) -> IssueIdentifier {
57 if let Some((key, number)) = parse_identifier(input) {
59 return IssueIdentifier::Identifier(format!("{}-{}", key, number));
60 }
61 IssueIdentifier::Id(input.to_string())
63 }
64
65 async fn resolve_to_issue_id(&self, client: &LinearClient, input: &str) -> Result<String> {
68 match self.resolve_issue_id(input) {
69 IssueIdentifier::Id(id) => Ok(id),
70 IssueIdentifier::Identifier(ident) => {
71 let (team_key, number) = parse_identifier(&ident)
72 .ok_or_else(|| anyhow::anyhow!("not found: Issue {} not found", ident))?;
73 let filter = IssueFilter {
74 team: Some(TeamFilter {
75 key: Some(StringComparator {
76 eq: Some(team_key),
77 ..Default::default()
78 }),
79 ..Default::default()
80 }),
81 number: Some(NumberComparator {
82 eq: Some(number as f64),
83 ..Default::default()
84 }),
85 ..Default::default()
86 };
87 let op = IssuesQuery::build(IssuesArguments {
88 first: Some(1),
89 after: None,
90 filter: Some(filter),
91 });
92 let resp = client.run(op).await?;
93 let data = http::extract_data(resp)?;
94 let issue = data
95 .issues
96 .nodes
97 .into_iter()
98 .next()
99 .ok_or_else(|| anyhow::anyhow!("not found: Issue {} not found", ident))?;
100 Ok(issue.id.inner().to_string())
101 }
102 }
103 }
104}
105
106impl Default for LinearTools {
107 fn default() -> Self {
108 Self::new()
109 }
110}
111
112enum IssueIdentifier {
113 Id(String),
114 Identifier(String),
115}
116
117impl From<linear_queries::User> for models::UserRef {
125 fn from(u: linear_queries::User) -> Self {
126 let name = if u.display_name.is_empty() {
127 u.name
128 } else {
129 u.display_name
130 };
131 Self {
132 id: u.id.inner().to_string(),
133 name,
134 email: u.email,
135 }
136 }
137}
138
139impl From<linear_queries::Team> for models::TeamRef {
140 fn from(t: linear_queries::Team) -> Self {
141 Self {
142 id: t.id.inner().to_string(),
143 key: t.key,
144 name: t.name,
145 }
146 }
147}
148
149impl From<linear_queries::WorkflowState> for models::WorkflowStateRef {
150 fn from(s: linear_queries::WorkflowState) -> Self {
151 Self {
152 id: s.id.inner().to_string(),
153 name: s.name,
154 state_type: s.state_type,
155 }
156 }
157}
158
159impl From<linear_queries::Project> for models::ProjectRef {
160 fn from(p: linear_queries::Project) -> Self {
161 Self {
162 id: p.id.inner().to_string(),
163 name: p.name,
164 }
165 }
166}
167
168impl From<linear_queries::ParentIssue> for models::ParentIssueRef {
169 fn from(p: linear_queries::ParentIssue) -> Self {
170 Self {
171 id: p.id.inner().to_string(),
172 identifier: p.identifier,
173 }
174 }
175}
176
177impl From<linear_queries::Issue> for models::IssueSummary {
178 fn from(i: linear_queries::Issue) -> Self {
179 Self {
180 id: i.id.inner().to_string(),
181 identifier: i.identifier,
182 title: i.title,
183 url: i.url,
184 team: i.team.into(),
185 state: i.state.map(Into::into),
186 assignee: i.assignee.map(Into::into),
187 creator: i.creator.map(Into::into),
188 project: i.project.map(Into::into),
189 priority: i.priority as i32,
190 priority_label: i.priority_label,
191 label_ids: i.label_ids,
192 due_date: i.due_date.map(|d| d.0),
193 created_at: i.created_at.0,
194 updated_at: i.updated_at.0,
195 }
196 }
197}
198
199impl From<linear_queries::IssueSearchResult> for models::IssueSummary {
200 fn from(i: linear_queries::IssueSearchResult) -> Self {
201 Self {
202 id: i.id.inner().to_string(),
203 identifier: i.identifier,
204 title: i.title,
205 url: i.url,
206 team: i.team.into(),
207 state: Some(i.state.into()),
208 assignee: i.assignee.map(Into::into),
209 creator: i.creator.map(Into::into),
210 project: i.project.map(Into::into),
211 priority: i.priority as i32,
212 priority_label: i.priority_label,
213 label_ids: i.label_ids,
214 due_date: i.due_date.map(|d| d.0),
215 created_at: i.created_at.0,
216 updated_at: i.updated_at.0,
217 }
218 }
219}
220
221impl LinearTools {
223 #[allow(clippy::too_many_arguments)]
225 pub async fn search_issues(
226 &self,
227 query: Option<String>,
228 include_comments: Option<bool>,
229 priority: Option<i32>,
230 state_id: Option<String>,
231 assignee_id: Option<String>,
232 creator_id: Option<String>,
233 team_id: Option<String>,
234 project_id: Option<String>,
235 created_after: Option<String>,
236 created_before: Option<String>,
237 updated_after: Option<String>,
238 updated_before: Option<String>,
239 first: Option<i32>,
240 after: Option<String>,
241 ) -> Result<models::SearchResult> {
242 let client = LinearClient::new(self.api_key.clone())
243 .context("internal: failed to create Linear client")?;
244
245 let mut filter = IssueFilter::default();
247 let mut has_filter = false;
248
249 if let Some(p) = priority {
250 filter.priority = Some(NullableNumberComparator {
251 eq: Some(p as f64),
252 ..Default::default()
253 });
254 has_filter = true;
255 }
256 if let Some(id) = state_id {
257 filter.state = Some(WorkflowStateFilter {
258 id: Some(IdComparator {
259 eq: Some(cynic::Id::new(id)),
260 }),
261 ..Default::default()
262 });
263 has_filter = true;
264 }
265 if let Some(id) = assignee_id {
266 filter.assignee = Some(NullableUserFilter {
267 id: Some(IdComparator {
268 eq: Some(cynic::Id::new(id)),
269 }),
270 });
271 has_filter = true;
272 }
273 if let Some(id) = creator_id {
274 filter.creator = Some(NullableUserFilter {
275 id: Some(IdComparator {
276 eq: Some(cynic::Id::new(id)),
277 }),
278 });
279 has_filter = true;
280 }
281 if let Some(id) = team_id {
282 filter.team = Some(TeamFilter {
283 id: Some(IdComparator {
284 eq: Some(cynic::Id::new(id)),
285 }),
286 ..Default::default()
287 });
288 has_filter = true;
289 }
290 if let Some(id) = project_id {
291 filter.project = Some(NullableProjectFilter {
292 id: Some(IdComparator {
293 eq: Some(cynic::Id::new(id)),
294 }),
295 });
296 has_filter = true;
297 }
298 if created_after.is_some() || created_before.is_some() {
299 filter.created_at = Some(DateComparator {
300 gte: created_after.map(DateTimeOrDuration),
301 lte: created_before.map(DateTimeOrDuration),
302 ..Default::default()
303 });
304 has_filter = true;
305 }
306 if updated_after.is_some() || updated_before.is_some() {
307 filter.updated_at = Some(DateComparator {
308 gte: updated_after.map(DateTimeOrDuration),
309 lte: updated_before.map(DateTimeOrDuration),
310 ..Default::default()
311 });
312 has_filter = true;
313 }
314
315 let filter_opt = if has_filter { Some(filter) } else { None };
316 let page_size = Some(first.unwrap_or(50).clamp(1, 100));
317 let q_trimmed = query.as_ref().map(|s| s.trim()).unwrap_or("");
318
319 if !q_trimmed.is_empty() {
320 let op = SearchIssuesQuery::build(SearchIssuesArguments {
322 term: q_trimmed.to_string(),
323 include_comments: Some(include_comments.unwrap_or(true)),
324 first: page_size,
325 after,
326 filter: filter_opt,
327 });
328 let resp = client.run(op).await?;
329 let data = http::extract_data(resp)?;
330
331 let issues = data
332 .search_issues
333 .nodes
334 .into_iter()
335 .map(Into::into)
336 .collect();
337
338 Ok(models::SearchResult {
339 issues,
340 has_next_page: data.search_issues.page_info.has_next_page,
341 end_cursor: data.search_issues.page_info.end_cursor,
342 })
343 } else {
344 let op = IssuesQuery::build(IssuesArguments {
346 first: page_size,
347 after,
348 filter: filter_opt,
349 });
350
351 let resp = client.run(op).await?;
352 let data = http::extract_data(resp)?;
353
354 let issues = data.issues.nodes.into_iter().map(Into::into).collect();
355
356 Ok(models::SearchResult {
357 issues,
358 has_next_page: data.issues.page_info.has_next_page,
359 end_cursor: data.issues.page_info.end_cursor,
360 })
361 }
362 }
363
364 pub async fn read_issue(&self, issue: String) -> Result<models::IssueDetails> {
366 let client = LinearClient::new(self.api_key.clone())
367 .context("internal: failed to create Linear client")?;
368 let resolved = self.resolve_issue_id(&issue);
369
370 let issue_data = match resolved {
371 IssueIdentifier::Id(id) => {
372 let op = IssueByIdQuery::build(IssueByIdArguments { id });
373 let resp = client.run(op).await?;
374 let data = http::extract_data(resp)?;
375 data.issue
376 .ok_or_else(|| anyhow::anyhow!("not found: Issue not found"))?
377 }
378 IssueIdentifier::Identifier(ident) => {
379 let (team_key, number) = parse_identifier(&ident)
381 .ok_or_else(|| anyhow::anyhow!("not found: Issue {} not found", ident))?;
382 let filter = IssueFilter {
383 team: Some(TeamFilter {
384 key: Some(StringComparator {
385 eq: Some(team_key),
386 ..Default::default()
387 }),
388 ..Default::default()
389 }),
390 number: Some(NumberComparator {
391 eq: Some(number as f64),
392 ..Default::default()
393 }),
394 ..Default::default()
395 };
396 let op = IssuesQuery::build(IssuesArguments {
397 first: Some(1),
398 after: None,
399 filter: Some(filter),
400 });
401 let resp = client.run(op).await?;
402 let data = http::extract_data(resp)?;
403 data.issues
404 .nodes
405 .into_iter()
406 .next()
407 .ok_or_else(|| anyhow::anyhow!("not found: Issue {} not found", ident))?
408 }
409 };
410
411 let description = issue_data.description.clone();
412 let estimate = issue_data.estimate;
413 let started_at = issue_data.started_at.as_ref().map(|d| d.0.clone());
414 let completed_at = issue_data.completed_at.as_ref().map(|d| d.0.clone());
415 let canceled_at = issue_data.canceled_at.as_ref().map(|d| d.0.clone());
416 let parent = issue_data.parent.as_ref().map(|p| models::ParentIssueRef {
417 id: p.id.inner().to_string(),
418 identifier: p.identifier.clone(),
419 });
420
421 let summary: models::IssueSummary = issue_data.into();
422
423 Ok(models::IssueDetails {
424 issue: summary,
425 description,
426 estimate,
427 parent,
428 started_at,
429 completed_at,
430 canceled_at,
431 })
432 }
433
434 #[allow(clippy::too_many_arguments)]
436 pub async fn create_issue(
437 &self,
438 team_id: String,
439 title: String,
440 description: Option<String>,
441 priority: Option<i32>,
442 assignee_id: Option<String>,
443 project_id: Option<String>,
444 state_id: Option<String>,
445 parent_id: Option<String>,
446 label_ids: Vec<String>,
447 ) -> Result<models::CreateIssueResult> {
448 let client = LinearClient::new(self.api_key.clone())
449 .context("internal: failed to create Linear client")?;
450
451 let label_ids_opt = if label_ids.is_empty() {
453 None
454 } else {
455 Some(label_ids)
456 };
457
458 let input = IssueCreateInput {
459 team_id,
460 title: Some(title),
461 description,
462 priority,
463 assignee_id,
464 project_id,
465 state_id,
466 parent_id,
467 label_ids: label_ids_opt,
468 };
469
470 let op = IssueCreateMutation::build(IssueCreateArguments { input });
471 let resp = client.run(op).await?;
472 let data = http::extract_data(resp)?;
473
474 let payload = data.issue_create;
475 let issue: Option<models::IssueSummary> = payload.issue.map(Into::into);
476
477 Ok(models::CreateIssueResult {
478 success: payload.success,
479 issue,
480 })
481 }
482
483 #[allow(clippy::too_many_arguments)]
485 pub async fn update_issue(
486 &self,
487 issue: String,
488 title: Option<String>,
489 description: Option<String>,
490 priority: Option<i32>,
491 assignee_id: Option<String>,
492 state_id: Option<String>,
493 project_id: Option<String>,
494 parent_id: Option<String>,
495 label_ids: Option<Vec<String>>,
496 added_label_ids: Option<Vec<String>>,
497 removed_label_ids: Option<Vec<String>>,
498 due_date: Option<String>,
499 ) -> Result<models::IssueResult> {
500 let client = LinearClient::new(self.api_key.clone())
501 .context("internal: failed to create Linear client")?;
502 let id = self.resolve_to_issue_id(&client, &issue).await?;
503
504 let input = IssueUpdateInput {
505 title,
506 description,
507 priority,
508 assignee_id,
509 state_id,
510 project_id,
511 parent_id,
512 label_ids,
513 added_label_ids,
514 removed_label_ids,
515 due_date: due_date.map(linear_queries::scalars::TimelessDate),
516 };
517
518 let op = IssueUpdateMutation::build(IssueUpdateArguments { id, input });
519 let resp = client.run(op).await?;
520 let data = http::extract_data(resp)?;
521
522 let payload = data.issue_update;
523 if !payload.success {
524 anyhow::bail!("Update failed: Linear returned success=false");
525 }
526 let issue = payload
527 .issue
528 .ok_or_else(|| anyhow::anyhow!("No issue returned from update"))?;
529
530 Ok(models::IssueResult {
531 issue: issue.into(),
532 })
533 }
534
535 pub async fn add_comment(
537 &self,
538 issue: String,
539 body: String,
540 parent_id: Option<String>,
541 ) -> Result<models::CommentResult> {
542 let client = LinearClient::new(self.api_key.clone())
543 .context("internal: failed to create Linear client")?;
544 let issue_id = self.resolve_to_issue_id(&client, &issue).await?;
545
546 let input = CommentCreateInput {
547 issue_id,
548 body: Some(body),
549 parent_id,
550 };
551
552 let op = CommentCreateMutation::build(CommentCreateArguments { input });
553 let resp = client.run(op).await?;
554 let data = http::extract_data(resp)?;
555
556 let payload = data.comment_create;
557 let (comment_id, body, created_at) = match payload.comment {
558 Some(c) => (
559 Some(c.id.inner().to_string()),
560 Some(c.body),
561 Some(c.created_at.0),
562 ),
563 None => (None, None, None),
564 };
565
566 Ok(models::CommentResult {
567 success: payload.success,
568 comment_id,
569 body,
570 created_at,
571 })
572 }
573
574 pub async fn archive_issue(&self, issue: String) -> Result<models::ArchiveIssueResult> {
576 let client = LinearClient::new(self.api_key.clone())
577 .context("internal: failed to create Linear client")?;
578 let id = self.resolve_to_issue_id(&client, &issue).await?;
579 let op = IssueArchiveMutation::build(IssueArchiveArguments { id });
580 let resp = client.run(op).await?;
581 let data = http::extract_data(resp)?;
582 Ok(models::ArchiveIssueResult {
583 success: data.issue_archive.success,
584 })
585 }
586
587 pub async fn get_metadata(
589 &self,
590 kind: models::MetadataKind,
591 search: Option<String>,
592 team_id: Option<String>,
593 first: Option<i32>,
594 after: Option<String>,
595 ) -> Result<models::GetMetadataResult> {
596 let client = LinearClient::new(self.api_key.clone())
597 .context("internal: failed to create Linear client")?;
598 let first = first.or(Some(50));
599
600 match kind {
601 models::MetadataKind::Users => {
602 let filter = search.map(|s| linear_queries::UserFilter {
603 display_name: Some(StringComparator {
604 contains_ignore_case: Some(s),
605 ..Default::default()
606 }),
607 });
608 let op = linear_queries::UsersQuery::build(linear_queries::UsersArguments {
609 first,
610 after,
611 filter,
612 });
613 let resp = client.run(op).await?;
614 let data = http::extract_data(resp)?;
615 let items = data
616 .users
617 .nodes
618 .into_iter()
619 .map(|u| {
620 let name = if u.display_name.is_empty() {
621 u.name
622 } else {
623 u.display_name
624 };
625 models::MetadataItem {
626 id: u.id.inner().to_string(),
627 name,
628 email: Some(u.email),
629 key: None,
630 state_type: None,
631 team_id: None,
632 }
633 })
634 .collect();
635 Ok(models::GetMetadataResult {
636 kind: models::MetadataKind::Users,
637 items,
638 has_next_page: data.users.page_info.has_next_page,
639 end_cursor: data.users.page_info.end_cursor,
640 })
641 }
642 models::MetadataKind::Teams => {
643 let filter = search.map(|s| linear_queries::TeamFilter {
644 key: Some(StringComparator {
645 contains_ignore_case: Some(s),
646 ..Default::default()
647 }),
648 ..Default::default()
649 });
650 let op = linear_queries::TeamsQuery::build(linear_queries::TeamsArguments {
651 first,
652 after,
653 filter,
654 });
655 let resp = client.run(op).await?;
656 let data = http::extract_data(resp)?;
657 let items = data
658 .teams
659 .nodes
660 .into_iter()
661 .map(|t| models::MetadataItem {
662 id: t.id.inner().to_string(),
663 name: t.name,
664 key: Some(t.key),
665 email: None,
666 state_type: None,
667 team_id: None,
668 })
669 .collect();
670 Ok(models::GetMetadataResult {
671 kind: models::MetadataKind::Teams,
672 items,
673 has_next_page: data.teams.page_info.has_next_page,
674 end_cursor: data.teams.page_info.end_cursor,
675 })
676 }
677 models::MetadataKind::Projects => {
678 let filter = search.map(|s| linear_queries::ProjectFilter {
679 name: Some(StringComparator {
680 contains_ignore_case: Some(s),
681 ..Default::default()
682 }),
683 });
684 let op = linear_queries::ProjectsQuery::build(linear_queries::ProjectsArguments {
685 first,
686 after,
687 filter,
688 });
689 let resp = client.run(op).await?;
690 let data = http::extract_data(resp)?;
691 let items = data
692 .projects
693 .nodes
694 .into_iter()
695 .map(|p| models::MetadataItem {
696 id: p.id.inner().to_string(),
697 name: p.name,
698 key: None,
699 email: None,
700 state_type: None,
701 team_id: None,
702 })
703 .collect();
704 Ok(models::GetMetadataResult {
705 kind: models::MetadataKind::Projects,
706 items,
707 has_next_page: data.projects.page_info.has_next_page,
708 end_cursor: data.projects.page_info.end_cursor,
709 })
710 }
711 models::MetadataKind::WorkflowStates => {
712 let mut filter = linear_queries::WorkflowStateFilter::default();
713 let mut has_filter = false;
714 if let Some(s) = search {
715 filter.name = Some(StringComparator {
716 contains_ignore_case: Some(s),
717 ..Default::default()
718 });
719 has_filter = true;
720 }
721 if let Some(tid) = team_id {
722 filter.team = Some(linear_queries::TeamFilter {
723 id: Some(linear_queries::IdComparator {
724 eq: Some(cynic::Id::new(tid)),
725 }),
726 ..Default::default()
727 });
728 has_filter = true;
729 }
730 let filter_opt = if has_filter { Some(filter) } else { None };
731 let op = linear_queries::WorkflowStatesQuery::build(
732 linear_queries::WorkflowStatesArguments {
733 first,
734 after,
735 filter: filter_opt,
736 },
737 );
738 let resp = client.run(op).await?;
739 let data = http::extract_data(resp)?;
740 let items = data
741 .workflow_states
742 .nodes
743 .into_iter()
744 .map(|s| models::MetadataItem {
745 id: s.id.inner().to_string(),
746 name: s.name,
747 state_type: Some(s.state_type),
748 key: None,
749 email: None,
750 team_id: None,
751 })
752 .collect();
753 Ok(models::GetMetadataResult {
754 kind: models::MetadataKind::WorkflowStates,
755 items,
756 has_next_page: data.workflow_states.page_info.has_next_page,
757 end_cursor: data.workflow_states.page_info.end_cursor,
758 })
759 }
760 models::MetadataKind::Labels => {
761 let mut filter = linear_queries::IssueLabelFilter::default();
762 let mut has_filter = false;
763 if let Some(s) = search {
764 filter.name = Some(StringComparator {
765 contains_ignore_case: Some(s),
766 ..Default::default()
767 });
768 has_filter = true;
769 }
770 if let Some(tid) = team_id {
771 filter.team = Some(linear_queries::NullableTeamFilter {
772 id: Some(linear_queries::IdComparator {
773 eq: Some(cynic::Id::new(tid)),
774 }),
775 ..Default::default()
776 });
777 has_filter = true;
778 }
779 let filter_opt = if has_filter { Some(filter) } else { None };
780 let op =
781 linear_queries::IssueLabelsQuery::build(linear_queries::IssueLabelsArguments {
782 first,
783 after,
784 filter: filter_opt,
785 });
786 let resp = client.run(op).await?;
787 let data = http::extract_data(resp)?;
788 let items = data
789 .issue_labels
790 .nodes
791 .into_iter()
792 .map(|l| models::MetadataItem {
793 id: l.id.inner().to_string(),
794 name: l.name,
795 team_id: l.team.map(|t| t.id.inner().to_string()),
796 key: None,
797 email: None,
798 state_type: None,
799 })
800 .collect();
801 Ok(models::GetMetadataResult {
802 kind: models::MetadataKind::Labels,
803 items,
804 has_next_page: data.issue_labels.page_info.has_next_page,
805 end_cursor: data.issue_labels.page_info.end_cursor,
806 })
807 }
808 }
809 }
810
811 pub async fn set_relation(
813 &self,
814 issue: String,
815 related_issue: String,
816 relation_type: Option<String>,
817 ) -> Result<models::SetRelationResult> {
818 let client = LinearClient::new(self.api_key.clone())
819 .context("internal: failed to create Linear client")?;
820 let issue_id = self.resolve_to_issue_id(&client, &issue).await?;
821 let related_issue_id = self.resolve_to_issue_id(&client, &related_issue).await?;
822
823 match relation_type {
824 Some(rel_type) => {
825 let relation_type = match rel_type.to_lowercase().as_str() {
827 "blocks" => IssueRelationType::Blocks,
828 "duplicate" => IssueRelationType::Duplicate,
829 "related" => IssueRelationType::Related,
830 other => anyhow::bail!(
831 "Invalid relation type: {}. Must be one of: blocks, duplicate, related",
832 other
833 ),
834 };
835
836 let input = IssueRelationCreateInput {
837 issue_id,
838 related_issue_id,
839 relation_type,
840 };
841
842 let op = IssueRelationCreateMutation::build(IssueRelationCreateArguments { input });
843 let resp = client.run(op).await?;
844 let data = http::extract_data(resp)?;
845
846 Ok(models::SetRelationResult {
847 success: data.issue_relation_create.success,
848 action: "created".to_string(),
849 })
850 }
851 None => {
852 let op = IssueRelationsQuery::build(IssueRelationsArguments { id: issue_id });
854 let resp = client.run(op).await?;
855 let data = http::extract_data(resp)?;
856
857 let issue_with_relations = data
858 .issue
859 .ok_or_else(|| anyhow::anyhow!("not found: Issue not found"))?;
860
861 let relation_id = issue_with_relations
863 .relations
864 .nodes
865 .iter()
866 .find(|r| r.related_issue.id.inner() == related_issue_id)
867 .map(|r| r.id.inner().to_string())
868 .or_else(|| {
869 issue_with_relations
870 .inverse_relations
871 .nodes
872 .iter()
873 .find(|r| r.related_issue.id.inner() == related_issue_id)
874 .map(|r| r.id.inner().to_string())
875 });
876
877 match relation_id {
878 Some(id) => {
879 let op =
880 IssueRelationDeleteMutation::build(IssueRelationDeleteArguments { id });
881 let resp = client.run(op).await?;
882 let data = http::extract_data(resp)?;
883
884 Ok(models::SetRelationResult {
885 success: data.issue_relation_delete.success,
886 action: "removed".to_string(),
887 })
888 }
889 None => {
890 Ok(models::SetRelationResult {
892 success: true,
893 action: "no_change".to_string(),
894 })
895 }
896 }
897 }
898 }
899 }
900
901 pub async fn get_issue_comments(&self, issue: String) -> Result<models::CommentsResult> {
903 let client = LinearClient::new(self.api_key.clone())
904 .context("internal: failed to create Linear client")?;
905
906 let issue_id = self.resolve_to_issue_id(&client, &issue).await?;
908
909 let cache_key = format!("{}|{}", issue_id, COMMENTS_PAGE_SIZE);
911
912 self.comments_cache.sweep_expired();
914
915 let query_lock = self.comments_cache.get_or_create(&cache_key);
917
918 let needs_fetch = {
920 let state = query_lock.lock_state();
921 state.is_empty() || state.is_expired()
922 };
923
924 let issue_identifier: String;
926
927 if needs_fetch {
928 let (identifier, all_comments) = self.fetch_all_comments(&client, &issue_id).await?;
930 issue_identifier = identifier.clone();
931
932 let mut state = query_lock.lock_state();
934 if state.is_empty() || state.is_expired() {
935 state.reset(all_comments, identifier, COMMENTS_PAGE_SIZE);
936 }
937 } else {
938 let state = query_lock.lock_state();
940 issue_identifier = state.meta.clone();
941 }
942
943 let (page_comments, total, shown, has_more) = {
945 let mut state = query_lock.lock_state();
946 let (page, has_more) =
947 paginate_slice(&state.results, state.next_offset, state.page_size);
948 let total = state.results.len();
949 state.next_offset += page.len();
950 let shown = state.next_offset;
951 (page, total, shown, has_more)
952 };
953
954 if !has_more {
956 self.comments_cache.remove_if_same(&cache_key, &query_lock);
957 }
958
959 Ok(models::CommentsResult {
960 issue_identifier,
961 comments: page_comments,
962 shown_comments: shown,
963 total_comments: total,
964 has_more,
965 })
966 }
967
968 async fn fetch_all_comments(
969 &self,
970 client: &LinearClient,
971 issue_id: &str,
972 ) -> Result<(String, Vec<models::CommentSummary>)> {
973 let mut cursor: Option<String> = None;
974 let mut all_comments = Vec::new();
975 let mut identifier: Option<String> = None;
976
977 for page in 0..ISSUE_COMMENTS_MAX_PAGES {
978 let args = IssueCommentsArguments {
979 id: issue_id.to_string(),
980 first: Some(ISSUE_COMMENTS_FETCH_PAGE_SIZE),
981 after: cursor.clone(),
982 };
983 let op = IssueCommentsQuery::build(args);
984 let resp = client.run(op).await?;
985 let data = http::extract_data(resp)?;
986
987 let issue = data
988 .issue
989 .ok_or_else(|| anyhow::anyhow!("Issue not found: {}", issue_id))?;
990
991 if identifier.is_none() {
992 identifier = Some(issue.identifier.clone());
993 }
994
995 all_comments.extend(
996 issue
997 .comments
998 .nodes
999 .into_iter()
1000 .map(|c| models::CommentSummary {
1001 id: c.id.inner().to_string(),
1002 body: c.body,
1003 url: c.url,
1004 created_at: c.created_at.0,
1005 updated_at: c.updated_at.0,
1006 parent_id: c.parent_id,
1007 author_name: c.user.as_ref().map(|u| u.name.clone()),
1008 author_email: c.user.as_ref().map(|u| u.email.clone()),
1009 }),
1010 );
1011
1012 if !issue.comments.page_info.has_next_page {
1013 all_comments.sort_by(|a, b| a.created_at.cmp(&b.created_at));
1014 return Ok((identifier.unwrap_or_default(), all_comments));
1015 }
1016
1017 cursor = issue.comments.page_info.end_cursor.clone();
1018 if cursor.is_none() {
1019 return Err(anyhow::anyhow!(
1020 "Issue comments pagination for {} reported has_next_page=true without end_cursor",
1021 issue_id
1022 ));
1023 }
1024
1025 if page + 1 == ISSUE_COMMENTS_MAX_PAGES {
1026 return Err(anyhow::anyhow!(
1027 "Issue comments pagination for {} exceeded {} pages",
1028 issue_id,
1029 ISSUE_COMMENTS_MAX_PAGES
1030 ));
1031 }
1032 }
1033
1034 unreachable!("issue comments pagination loop must return or error")
1035 }
1036}
1037
1038#[cfg(test)]
1041mod tests {
1042 use super::parse_identifier;
1043
1044 #[test]
1045 fn parse_plain_uppercase() {
1046 assert_eq!(parse_identifier("ENG-245"), Some(("ENG".into(), 245)));
1047 }
1048
1049 #[test]
1050 fn parse_lowercase_normalizes() {
1051 assert_eq!(parse_identifier("eng-245"), Some(("ENG".into(), 245)));
1052 }
1053
1054 #[test]
1055 fn parse_from_url() {
1056 assert_eq!(
1057 parse_identifier("https://linear.app/foo/issue/eng-245/slug"),
1058 Some(("ENG".into(), 245))
1059 );
1060 }
1061
1062 #[test]
1063 fn parse_invalid_returns_none() {
1064 assert_eq!(parse_identifier("invalid"), None);
1065 assert_eq!(parse_identifier("ENG-"), None);
1066 assert_eq!(parse_identifier("ENG"), None);
1067 assert_eq!(parse_identifier("123-456"), None);
1068 }
1069}