1use regex::Regex;
2use serde_json::{json, Value};
3use std::collections::{HashMap, HashSet};
4use std::env;
5use std::sync::LazyLock;
6use std::time::Duration;
7
8use crate::compact;
9use crate::git_refs;
10
11const GITHUB_API: &str = "https://api.github.com";
16pub const OVERFLOW_LIMIT: usize = 100_000;
17pub const OVERFLOW_PREVIEW: usize = 40_000;
18const MAX_RELATED: usize = 10;
19const COMMENT_HEAD_PAGES: usize = 5;
21const COMMENT_TAIL_PAGES: usize = 5;
22const TIMELINE_HEAD_PAGES: usize = 3;
24const TIMELINE_TAIL_PAGES: usize = 2;
25
26static URL_RE: LazyLock<Regex> =
27 LazyLock::new(|| Regex::new(r"^https://github\.com/([^/]+/[^/]+)/").unwrap());
28
29static GIT_SSH_RE: LazyLock<Regex> =
30 LazyLock::new(|| Regex::new(r"^git@github\.com:([^/]+/[^/]+?)(?:\.git)?$").unwrap());
31
32static GIT_HTTPS_RE: LazyLock<Regex> =
33 LazyLock::new(|| Regex::new(r"^https?://github\.com/([^/]+/[^/]+?)(?:\.git)?$").unwrap());
34
35static AGENT: LazyLock<ureq::Agent> = LazyLock::new(|| {
37 ureq::AgentBuilder::new()
38 .timeout(Duration::from_secs(30))
39 .build()
40});
41
42pub fn estimate_json_size(val: &Value) -> usize {
44 match val {
45 Value::Null => 4,
46 Value::Bool(b) => {
47 if *b {
48 4
49 } else {
50 5
51 }
52 }
53 Value::Number(n) => {
54 let s = n.to_string();
56 s.len()
57 }
58 Value::String(s) => s.len() + 2, Value::Array(arr) => 2 + arr.iter().map(|v| estimate_json_size(v) + 1).sum::<usize>(),
60 Value::Object(map) => {
61 2 + map
62 .iter()
63 .map(|(k, v)| k.len() + 3 + estimate_json_size(v) + 1)
64 .sum::<usize>()
65 }
66 }
67}
68
69fn auth_token() -> Option<String> {
74 env::var("GITHUB_TOKEN")
80 .or_else(|_| env::var("GH_TOKEN"))
81 .ok()
82 .filter(|s| !s.is_empty())
83}
84
85pub fn has_git_token() -> bool {
87 auth_token().is_some()
88}
89
90pub fn detect_git_repo(cwd: &str) -> Option<String> {
92 let output = std::process::Command::new("git")
93 .args(["remote", "get-url", "origin"])
94 .current_dir(cwd)
95 .output()
96 .ok()?;
97
98 if !output.status.success() {
99 return None;
100 }
101
102 let url = String::from_utf8_lossy(&output.stdout).trim().to_string();
103
104 if let Some(cap) = GIT_SSH_RE.captures(&url) {
105 return Some(cap[1].to_string());
106 }
107 if let Some(cap) = GIT_HTTPS_RE.captures(&url) {
108 return Some(cap[1].to_string());
109 }
110 None
111}
112
113fn gh_get(endpoint: &str) -> Result<Value, String> {
118 let url = if endpoint.starts_with("http") {
119 endpoint.to_string()
120 } else {
121 format!("{}/{}", GITHUB_API, endpoint)
122 };
123
124 let mut req = AGENT
125 .get(&url)
126 .set("Accept", "application/vnd.github+json")
127 .set("User-Agent", "mcp-methods");
128
129 if let Some(token) = auth_token() {
130 req = req.set("Authorization", &format!("Bearer {}", token));
131 }
132
133 match req.call() {
134 Ok(resp) => resp
135 .into_json::<Value>()
136 .map_err(|e| format!("JSON parse error: {}", e)),
137 Err(ureq::Error::Status(404, _)) => Err(format!("Not found: {}", endpoint)),
138 Err(ureq::Error::Status(403, resp)) => {
139 let body = resp.into_string().unwrap_or_default();
140 if body.to_lowercase().contains("rate limit") {
141 Err(
142 "GitHub API rate limit exceeded. Set GITHUB_TOKEN or GH_TOKEN env var for higher limits."
143 .into(),
144 )
145 } else {
146 Err(format!("GitHub API forbidden: {}", body))
147 }
148 }
149 Err(ureq::Error::Status(code, resp)) => {
150 let body = resp.into_string().unwrap_or_default();
151 Err(format!("GitHub API error ({}): {}", code, body))
152 }
153 Err(e) => Err(format!("GitHub API error: {}", e)),
154 }
155}
156
157fn gh_graphql(query: &str, variables: Value) -> Result<Value, String> {
158 let token = auth_token().ok_or(
159 "GitHub token required for Discussions (GraphQL API). \
160 Set GITHUB_TOKEN or GH_TOKEN.",
161 )?;
162
163 let body = json!({
164 "query": query,
165 "variables": variables,
166 });
167
168 let resp = AGENT
169 .post("https://api.github.com/graphql")
170 .set("Authorization", &format!("Bearer {}", token))
171 .set("User-Agent", "mcp-methods")
172 .send_json(&body)
173 .map_err(|e| match e {
174 ureq::Error::Status(401, _) => {
175 "GitHub token is invalid or expired. Check GITHUB_TOKEN / GH_TOKEN.".to_string()
176 }
177 ureq::Error::Status(code, resp) => {
178 let body = resp.into_string().unwrap_or_default();
179 format!("GitHub GraphQL error ({}): {}", code, body)
180 }
181 other => format!("GitHub GraphQL error: {}", other),
182 })?;
183
184 let result: Value = resp
185 .into_json()
186 .map_err(|e| format!("GraphQL JSON parse error: {}", e))?;
187
188 if let Some(errors) = result.get("errors").and_then(|v| v.as_array()) {
190 if let Some(first) = errors.first() {
191 let msg = first
192 .get("message")
193 .and_then(|m| m.as_str())
194 .unwrap_or("Unknown GraphQL error");
195 return Err(format!("GitHub GraphQL error: {}", msg));
196 }
197 }
198
199 result
200 .get("data")
201 .cloned()
202 .ok_or_else(|| "GitHub GraphQL: no 'data' in response".to_string())
203}
204
205fn parse_link_rel(link: &str, rel: &str) -> Option<String> {
206 let tag = format!("rel=\"{}\"", rel);
207 for part in link.split(',') {
208 if part.contains(&tag) {
209 let start = part.find('<')? + 1;
210 let end = part.find('>')?;
211 return Some(part[start..end].to_string());
212 }
213 }
214 None
215}
216
217fn parse_link_next(link: &str) -> Option<String> {
218 parse_link_rel(link, "next")
219}
220
221fn parse_last_page(link: &str) -> Option<usize> {
223 let url = parse_link_rel(link, "last")?;
224 url.split('?').nth(1)?.split('&').find_map(|param| {
226 let (k, v) = param.split_once('=')?;
227 if k == "page" {
228 v.parse().ok()
229 } else {
230 None
231 }
232 })
233}
234
235fn gh_get_paginated(endpoint: &str) -> Result<Vec<Value>, String> {
236 gh_get_paginated_bookends(endpoint, 0, 0)
237}
238
239fn gh_get_page(url: &str) -> Result<Vec<Value>, String> {
241 let mut req = AGENT
242 .get(url)
243 .set("Accept", "application/vnd.github+json")
244 .set("User-Agent", "mcp-methods");
245 if let Some(token) = auth_token() {
246 req = req.set("Authorization", &format!("Bearer {}", token));
247 }
248 let resp = req.call().map_err(|e| format!("GitHub API error: {}", e))?;
249 let items: Value = resp
250 .into_json()
251 .map_err(|e| format!("JSON parse error: {}", e))?;
252 match items {
253 Value::Array(arr) => Ok(arr),
254 _ => Ok(vec![]),
255 }
256}
257
258fn gh_get_paginated_bookends(
262 endpoint: &str,
263 head: usize,
264 tail: usize,
265) -> Result<Vec<Value>, String> {
266 let mut url = format!("{}/{}", GITHUB_API, endpoint);
267 let mut all_items: Vec<Value> = Vec::new();
268 let mut pages_fetched: usize = 0;
269 let unlimited = head == 0 && tail == 0;
270 let max_head = if unlimited { usize::MAX } else { head };
271 let mut last_page: Option<usize> = None;
272 let mut skipped = false;
273
274 loop {
275 let mut req = AGENT
276 .get(&url)
277 .set("Accept", "application/vnd.github+json")
278 .set("User-Agent", "mcp-methods");
279
280 if let Some(token) = auth_token() {
281 req = req.set("Authorization", &format!("Bearer {}", token));
282 }
283
284 let resp = match req.call() {
285 Ok(r) => r,
286 Err(ureq::Error::Status(403, resp)) => {
287 let body = resp.into_string().unwrap_or_default();
288 if body.to_lowercase().contains("rate limit") {
289 return Err(
290 "GitHub API rate limit exceeded. Set GITHUB_TOKEN or GH_TOKEN env var for higher limits."
291 .into(),
292 );
293 }
294 return Err(format!("GitHub API forbidden: {}", body));
295 }
296 Err(e) => return Err(format!("GitHub API error: {}", e)),
297 };
298
299 let link_header: Option<String> = resp.header("link").map(String::from);
300 let items: Value = resp
301 .into_json()
302 .map_err(|e| format!("JSON parse error: {}", e))?;
303
304 if let Value::Array(arr) = items {
305 all_items.extend(arr);
306 }
307
308 pages_fetched += 1;
309
310 if pages_fetched == 1 && last_page.is_none() {
312 last_page = link_header.as_deref().and_then(parse_last_page);
313 }
314
315 if pages_fetched >= max_head {
316 break;
317 }
318
319 match link_header.as_deref().and_then(parse_link_next) {
320 Some(u) => url = u,
321 None => break,
322 }
323 }
324
325 if !unlimited && tail > 0 {
327 if let Some(total) = last_page {
328 let tail_start = (head + 1).max(total.saturating_sub(tail) + 1);
329 if tail_start <= total {
330 skipped = tail_start > head + 1;
331 let base = format!("{}/{}", GITHUB_API, endpoint);
333 let sep = if base.contains('?') { '&' } else { '?' };
334 for page_num in tail_start..=total {
335 let page_url = format!("{}{}page={}", base, sep, page_num);
336 if let Ok(items) = gh_get_page(&page_url) {
337 all_items.extend(items);
338 }
339 }
340 }
341 }
342 }
343
344 if skipped {
345 all_items.push(json!({"_skipped_middle": true}));
347 }
348
349 Ok(all_items)
350}
351
352fn json_str(val: &Value, key: &str) -> String {
357 val.get(key)
358 .and_then(|v| v.as_str())
359 .unwrap_or("")
360 .to_string()
361}
362
363fn json_author(val: &Value) -> String {
364 val.get("user")
365 .and_then(|u| u.get("login"))
366 .and_then(|v| v.as_str())
367 .unwrap_or("(deleted)")
368 .to_string()
369}
370
371fn json_body(val: &Value) -> Value {
372 match val.get("body").and_then(|v| v.as_str()) {
373 Some(s) => {
374 let trimmed = s.trim();
375 if trimmed.is_empty() {
376 Value::Null
377 } else {
378 Value::String(trimmed.to_string())
379 }
380 }
381 None => Value::Null,
382 }
383}
384
385fn parse_timeline(timeline: &[Value], repo: &str) -> Vec<Value> {
386 let mut referenced_by = Vec::new();
387 for event in timeline {
388 let etype = event.get("event").and_then(|v| v.as_str()).unwrap_or("");
389 match etype {
390 "cross-referenced" => {
391 let source = event
392 .get("source")
393 .and_then(|s| s.get("issue"))
394 .unwrap_or(&Value::Null);
395 if let Some(source_number) = source.get("number").and_then(|v| v.as_u64()) {
396 let src_url = source
397 .get("html_url")
398 .and_then(|v| v.as_str())
399 .unwrap_or("");
400 let src_repo = URL_RE
401 .captures(src_url)
402 .map(|c| c[1].to_string())
403 .unwrap_or_else(|| repo.to_string());
404 let is_pr = source.get("pull_request").is_some();
405 referenced_by.push(json!({
406 "event": "cross-reference",
407 "source_type": if is_pr { "pull_request" } else { "issue" },
408 "source_number": source_number,
409 "source_repo": src_repo,
410 "source_title": json_str(source, "title"),
411 "author": event.get("actor")
412 .and_then(|a| a.get("login"))
413 .and_then(|v| v.as_str())
414 .unwrap_or("(deleted)"),
415 "created_at": json_str(event, "created_at"),
416 }));
417 }
418 }
419 "referenced" => {
420 let sha = json_str(event, "commit_id");
421 referenced_by.push(json!({
422 "event": "commit-reference",
423 "commit_sha": &sha[..sha.len().min(10)],
424 "author": event.get("actor")
425 .and_then(|a| a.get("login"))
426 .and_then(|v| v.as_str())
427 .unwrap_or("(deleted)"),
428 "created_at": json_str(event, "created_at"),
429 }));
430 }
431 _ => {}
432 }
433 }
434 referenced_by
435}
436
437fn build_inline_comment(rc: &Value, reply_map: &HashMap<u64, Vec<&Value>>) -> Value {
438 let rc_id = rc.get("id").and_then(|v| v.as_u64()).unwrap_or(0);
439 let replies: Vec<Value> = reply_map
440 .get(&rc_id)
441 .map(|rps| {
442 rps.iter()
443 .map(|rp| {
444 json!({
445 "author": json_author(rp),
446 "created_at": json_str(rp, "created_at"),
447 "body": json_body(rp),
448 })
449 })
450 .collect()
451 })
452 .unwrap_or_default();
453
454 json!({
455 "author": json_author(rc),
456 "path": json_str(rc, "path"),
457 "line": rc.get("line").or_else(|| rc.get("original_line")).cloned().unwrap_or(Value::Null),
458 "diff_hunk": json_str(rc, "diff_hunk"),
459 "body": json_body(rc),
460 "created_at": json_str(rc, "created_at"),
461 "replies": replies,
462 })
463}
464
465fn build_reviews(reviews_raw: &[Value], review_comments_raw: &[Value]) -> Vec<Value> {
466 let mut by_review: HashMap<Option<u64>, Vec<&Value>> = HashMap::new();
467 let mut reply_map: HashMap<u64, Vec<&Value>> = HashMap::new();
468
469 for rc in review_comments_raw {
470 let rid = rc.get("pull_request_review_id").and_then(|v| v.as_u64());
471 if rc.get("in_reply_to_id").and_then(|v| v.as_u64()).is_some() {
472 let reply_to = rc["in_reply_to_id"].as_u64().unwrap();
473 reply_map.entry(reply_to).or_default().push(rc);
474 } else {
475 by_review.entry(rid).or_default().push(rc);
476 }
477 }
478
479 let mut reviews = Vec::new();
480 let mut known_review_ids = HashSet::new();
481
482 for rev in reviews_raw {
483 let rev_id = rev.get("id").and_then(|v| v.as_u64()).unwrap_or(0);
484 known_review_ids.insert(rev_id);
485
486 let rev_body = json_body(rev);
487 let rev_state = json_str(rev, "state");
488
489 if rev_state == "COMMENTED" && rev_body.is_null() && !by_review.contains_key(&Some(rev_id))
490 {
491 continue;
492 }
493
494 let inlines: Vec<Value> = by_review
495 .get(&Some(rev_id))
496 .map(|rcs| {
497 rcs.iter()
498 .map(|rc| build_inline_comment(rc, &reply_map))
499 .collect()
500 })
501 .unwrap_or_default();
502
503 reviews.push(json!({
504 "author": json_author(rev),
505 "author_association": json_str(rev, "author_association"),
506 "state": rev_state,
507 "submitted_at": json_str(rev, "submitted_at"),
508 "body": rev_body,
509 "inline_comments": inlines,
510 }));
511 }
512
513 for (rid, rcs) in &by_review {
515 if let Some(id) = rid {
516 if known_review_ids.contains(id) {
517 continue;
518 }
519 }
520 for rc in rcs {
521 reviews.push(json!({
522 "author": json_author(rc),
523 "author_association": json_str(rc, "author_association"),
524 "state": "COMMENTED",
525 "submitted_at": json_str(rc, "created_at"),
526 "body": Value::Null,
527 "inline_comments": vec![build_inline_comment(rc, &reply_map)],
528 }));
529 }
530 }
531
532 reviews
533}
534
535const DISCUSSION_QUERY: &str = r#"query($owner: String!, $repo: String!, $number: Int!) {
540 repository(owner: $owner, name: $repo) {
541 discussion(number: $number) {
542 number
543 title
544 body
545 author { login }
546 authorAssociation
547 createdAt
548 updatedAt
549 url
550 closed
551 locked
552 answer { id }
553 labels(first: 20) { nodes { name } }
554 category { name }
555 comments(first: 100) {
556 totalCount
557 nodes {
558 author { login }
559 authorAssociation
560 createdAt
561 body
562 isAnswer
563 replies(first: 100) {
564 nodes {
565 author { login }
566 authorAssociation
567 createdAt
568 body
569 }
570 }
571 }
572 }
573 }
574 }
575}"#;
576
577fn gql_author(val: &Value) -> String {
578 val.get("author")
579 .and_then(|u| u.get("login"))
580 .and_then(|v| v.as_str())
581 .unwrap_or("(deleted)")
582 .to_string()
583}
584
585fn gql_body(val: &Value) -> Value {
586 match val.get("body").and_then(|v| v.as_str()) {
587 Some(s) => {
588 let trimmed = s.trim();
589 if trimmed.is_empty() {
590 Value::Null
591 } else {
592 Value::String(trimmed.to_string())
593 }
594 }
595 None => Value::Null,
596 }
597}
598
599fn fetch_discussion_graphql(repo: &str, number: u64) -> Result<Value, String> {
600 let (owner, name) = repo
601 .split_once('/')
602 .ok_or_else(|| "Invalid repo format for GraphQL".to_string())?;
603
604 let data = gh_graphql(
605 DISCUSSION_QUERY,
606 json!({"owner": owner, "repo": name, "number": number as i64}),
607 )?;
608
609 let disc = data
610 .get("repository")
611 .and_then(|r| r.get("discussion"))
612 .ok_or_else(|| format!("Discussion #{} not found in {}", number, repo))?;
613
614 if disc.is_null() {
615 return Err(format!("Discussion #{} not found in {}", number, repo));
616 }
617
618 let closed = disc
619 .get("closed")
620 .and_then(|v| v.as_bool())
621 .unwrap_or(false);
622 let has_answer = disc.get("answer").map(|v| !v.is_null()).unwrap_or(false);
623
624 let labels: Vec<Value> = disc
625 .get("labels")
626 .and_then(|l| l.get("nodes"))
627 .and_then(|v| v.as_array())
628 .map(|arr| {
629 arr.iter()
630 .filter_map(|l| {
631 l.get("name")
632 .and_then(|n| n.as_str())
633 .map(|s| Value::String(s.to_string()))
634 })
635 .collect()
636 })
637 .unwrap_or_default();
638
639 let category = disc
640 .get("category")
641 .and_then(|c| c.get("name"))
642 .and_then(|v| v.as_str())
643 .unwrap_or("")
644 .to_string();
645
646 let comment_count = disc
647 .get("comments")
648 .and_then(|c| c.get("totalCount"))
649 .and_then(|v| v.as_u64())
650 .unwrap_or(0);
651
652 let comments: Vec<Value> = disc
654 .get("comments")
655 .and_then(|c| c.get("nodes"))
656 .and_then(|v| v.as_array())
657 .map(|nodes| {
658 nodes
659 .iter()
660 .map(|c| {
661 let replies: Vec<Value> = c
662 .get("replies")
663 .and_then(|r| r.get("nodes"))
664 .and_then(|v| v.as_array())
665 .map(|rps| {
666 rps.iter()
667 .map(|rp| {
668 json!({
669 "author": gql_author(rp),
670 "author_association": rp.get("authorAssociation")
671 .and_then(|v| v.as_str()).unwrap_or(""),
672 "created_at": rp.get("createdAt")
673 .and_then(|v| v.as_str()).unwrap_or(""),
674 "body": gql_body(rp),
675 })
676 })
677 .collect()
678 })
679 .unwrap_or_default();
680
681 let is_answer = c.get("isAnswer").and_then(|v| v.as_bool()).unwrap_or(false);
682
683 let mut comment = json!({
684 "author": gql_author(c),
685 "author_association": c.get("authorAssociation")
686 .and_then(|v| v.as_str()).unwrap_or(""),
687 "created_at": c.get("createdAt")
688 .and_then(|v| v.as_str()).unwrap_or(""),
689 "body": gql_body(c),
690 });
691
692 if is_answer {
693 comment["is_answer"] = Value::Bool(true);
694 }
695 if !replies.is_empty() {
696 comment["replies"] = Value::Array(replies);
697 }
698
699 comment
700 })
701 .collect()
702 })
703 .unwrap_or_default();
704
705 let mut result = json!({
706 "type": "discussion",
707 "number": number,
708 "repo": repo,
709 "title": disc.get("title").and_then(|v| v.as_str()).unwrap_or(""),
710 "state": if closed { "closed" } else { "open" },
711 "author": gql_author(disc),
712 "author_association": disc.get("authorAssociation")
713 .and_then(|v| v.as_str()).unwrap_or(""),
714 "created_at": disc.get("createdAt").and_then(|v| v.as_str()).unwrap_or(""),
715 "updated_at": disc.get("updatedAt").and_then(|v| v.as_str()).unwrap_or(""),
716 "url": disc.get("url").and_then(|v| v.as_str()).unwrap_or(""),
717 "labels": labels,
718 "body": gql_body(disc),
719 "comment_count": comment_count,
720 "comments": comments,
721 });
722
723 if !category.is_empty() {
724 result["category"] = Value::String(category);
725 }
726 if has_answer {
727 result["answered"] = Value::Bool(true);
728 }
729
730 Ok(result)
731}
732
733fn fetch_gh_discussion_internal(
736 repo: &str,
737 number: u64,
738) -> Result<(String, Option<String>), String> {
739 let mut parent = fetch_discussion_graphql(repo, number)?;
740
741 let seen: HashSet<(String, u64)> = [(repo.to_string(), number)].into();
743 let all_refs = collect_refs_from_discussion(&parent, repo);
744 let mut refs: Vec<(String, u64)> = all_refs.difference(&seen).cloned().collect();
745 refs.sort();
746 refs.truncate(MAX_RELATED);
747
748 if !refs.is_empty() {
749 let ref_list: Vec<Value> = refs
750 .iter()
751 .map(|(r, n)| json!({"repo": r, "number": n}))
752 .collect();
753 parent["related_refs"] = Value::Array(ref_list);
754 }
755
756 let parent_json = serde_json::to_string(&parent).map_err(|e| format!("JSON error: {}", e))?;
758 let cache_json = serde_json::to_string(&json!({"_n": 0})).unwrap();
759 let (compacted, cache_out) =
760 compact::compact_discussion(&parent_json, Some(&cache_json), None, None)
761 .map_err(|e| format!("Compaction error: {}", e))?;
762
763 Ok((compacted, cache_out))
764}
765
766fn fetch_single_discussion(
771 repo: &str,
772 number: u64,
773 include_files: bool,
774 include_timeline: bool,
775) -> Result<Value, String> {
776 let issue = gh_get(&format!("repos/{}/issues/{}", repo, number))?;
778 let is_pr = issue.get("pull_request").is_some();
779
780 let mut result = json!({
781 "type": if is_pr { "pull_request" } else { "issue" },
782 "number": number,
783 "repo": repo,
784 "title": json_str(&issue, "title"),
785 "state": json_str(&issue, "state"),
786 "author": json_author(&issue),
787 "author_association": json_str(&issue, "author_association"),
788 "created_at": json_str(&issue, "created_at"),
789 "updated_at": json_str(&issue, "updated_at"),
790 "url": json_str(&issue, "html_url"),
791 "labels": issue.get("labels")
792 .and_then(|v| v.as_array())
793 .map(|arr| arr.iter()
794 .filter_map(|l| l.get("name").and_then(|n| n.as_str()).map(|s| Value::String(s.to_string())))
795 .collect::<Vec<_>>())
796 .unwrap_or_default(),
797 "body": json_body(&issue),
798 "comment_count": issue.get("comments").and_then(|v| v.as_u64()).unwrap_or(0),
799 });
800
801 std::thread::scope(|s| {
803 let comments_h = s.spawn(|| {
804 gh_get_paginated_bookends(
805 &format!("repos/{}/issues/{}/comments", repo, number),
806 COMMENT_HEAD_PAGES,
807 COMMENT_TAIL_PAGES,
808 )
809 });
810 let timeline_h = if include_timeline {
811 Some(s.spawn(|| {
812 gh_get_paginated_bookends(
813 &format!("repos/{}/issues/{}/timeline", repo, number),
814 TIMELINE_HEAD_PAGES,
815 TIMELINE_TAIL_PAGES,
816 )
817 }))
818 } else {
819 None
820 };
821 let pr_h = if is_pr {
822 Some(s.spawn(|| gh_get(&format!("repos/{}/pulls/{}", repo, number))))
823 } else {
824 None
825 };
826 let reviews_h = if is_pr {
827 Some(s.spawn(|| gh_get_paginated(&format!("repos/{}/pulls/{}/reviews", repo, number))))
828 } else {
829 None
830 };
831 let review_comments_h = if is_pr {
832 Some(s.spawn(|| gh_get_paginated(&format!("repos/{}/pulls/{}/comments", repo, number))))
833 } else {
834 None
835 };
836 let files_h = if is_pr && include_files {
837 Some(s.spawn(|| gh_get_paginated(&format!("repos/{}/pulls/{}/files", repo, number))))
838 } else {
839 None
840 };
841
842 let comments = comments_h.join().unwrap().unwrap_or_default();
844 result["comments"] = Value::Array(
845 comments
846 .iter()
847 .map(|c| {
848 if c.get("_skipped_middle").is_some() {
849 return json!({
850 "author": "[system]",
851 "body": "--- older comments omitted (middle pages skipped) ---",
852 });
853 }
854 json!({
855 "author": json_author(c),
856 "author_association": json_str(c, "author_association"),
857 "created_at": json_str(c, "created_at"),
858 "body": json_body(c),
859 })
860 })
861 .collect(),
862 );
863
864 if let Some(handle) = timeline_h {
866 if let Ok(timeline) = handle.join().unwrap() {
867 let referenced_by = parse_timeline(&timeline, repo);
868 if !referenced_by.is_empty() {
869 result["referenced_by"] = Value::Array(referenced_by);
870 }
871 }
872 }
873
874 if is_pr {
876 if let Some(handle) = pr_h {
877 if let Ok(pr_data) = handle.join().unwrap() {
878 let merged = pr_data
879 .get("merged")
880 .and_then(|v| v.as_bool())
881 .unwrap_or(false);
882 result["merged"] = Value::Bool(merged);
883 if merged {
884 result["merged_by"] = pr_data
885 .get("merged_by")
886 .and_then(|u| u.get("login"))
887 .cloned()
888 .unwrap_or(Value::Null);
889 result["merged_at"] =
890 pr_data.get("merged_at").cloned().unwrap_or(Value::Null);
891 }
892 result["base"] = Value::String(
893 pr_data
894 .get("base")
895 .and_then(|b| b.get("ref"))
896 .and_then(|v| v.as_str())
897 .unwrap_or("")
898 .to_string(),
899 );
900 result["head"] = Value::String(
901 pr_data
902 .get("head")
903 .and_then(|h| h.get("label"))
904 .and_then(|v| v.as_str())
905 .unwrap_or("")
906 .to_string(),
907 );
908 result["additions"] =
909 pr_data.get("additions").cloned().unwrap_or(Value::from(0));
910 result["deletions"] =
911 pr_data.get("deletions").cloned().unwrap_or(Value::from(0));
912 result["changed_files"] = pr_data
913 .get("changed_files")
914 .cloned()
915 .unwrap_or(Value::from(0));
916 }
917 }
918
919 let reviews = reviews_h
920 .and_then(|h| h.join().ok())
921 .and_then(|r| r.ok())
922 .unwrap_or_default();
923 let review_comments = review_comments_h
924 .and_then(|h| h.join().ok())
925 .and_then(|r| r.ok())
926 .unwrap_or_default();
927 result["reviews"] = Value::Array(build_reviews(&reviews, &review_comments));
928
929 if let Some(handle) = files_h {
930 let files = handle.join().unwrap().unwrap_or_default();
931 result["files"] = Value::Array(
932 files
933 .iter()
934 .map(|f| {
935 json!({
936 "filename": json_str(f, "filename"),
937 "status": json_str(f, "status"),
938 "additions": f.get("additions").and_then(|v| v.as_u64()).unwrap_or(0),
939 "deletions": f.get("deletions").and_then(|v| v.as_u64()).unwrap_or(0),
940 "patch": f.get("patch").cloned().unwrap_or(Value::Null),
941 })
942 })
943 .collect(),
944 );
945 }
946 }
947 });
948
949 Ok(result)
950}
951
952fn iter_discussion_texts(result: &Value) -> Vec<&str> {
957 let mut texts = Vec::new();
958 if let Some(body) = result.get("body").and_then(|v| v.as_str()) {
959 if !body.is_empty() {
960 texts.push(body);
961 }
962 }
963 for field in &["comments", "reviews"] {
964 if let Some(arr) = result.get(*field).and_then(|v| v.as_array()) {
965 for item in arr {
966 if let Some(body) = item.get("body").and_then(|v| v.as_str()) {
967 if !body.is_empty() {
968 texts.push(body);
969 }
970 }
971 if let Some(replies) = item.get("replies").and_then(|v| v.as_array()) {
973 for rp in replies {
974 if let Some(body) = rp.get("body").and_then(|v| v.as_str()) {
975 if !body.is_empty() {
976 texts.push(body);
977 }
978 }
979 }
980 }
981 if let Some(inlines) = item.get("inline_comments").and_then(|v| v.as_array()) {
983 for ic in inlines {
984 if let Some(body) = ic.get("body").and_then(|v| v.as_str()) {
985 if !body.is_empty() {
986 texts.push(body);
987 }
988 }
989 if let Some(replies) = ic.get("replies").and_then(|v| v.as_array()) {
990 for rp in replies {
991 if let Some(body) = rp.get("body").and_then(|v| v.as_str()) {
992 if !body.is_empty() {
993 texts.push(body);
994 }
995 }
996 }
997 }
998 }
999 }
1000 }
1001 }
1002 }
1003 texts
1004}
1005
1006fn collect_refs_from_discussion(result: &Value, default_repo: &str) -> HashSet<(String, u64)> {
1007 let mut refs = HashSet::new();
1008 for text in iter_discussion_texts(result) {
1009 for (repo, num) in git_refs::extract_github_refs(text, default_repo) {
1010 refs.insert((repo, num));
1011 }
1012 }
1013 if let Some(referenced_by) = result.get("referenced_by").and_then(|v| v.as_array()) {
1014 for ref_item in referenced_by {
1015 if ref_item.get("event").and_then(|v| v.as_str()) == Some("cross-reference") {
1016 if let Some(source_number) = ref_item.get("source_number").and_then(|v| v.as_u64())
1017 {
1018 let source_repo = ref_item
1019 .get("source_repo")
1020 .and_then(|v| v.as_str())
1021 .unwrap_or(default_repo)
1022 .to_string();
1023 refs.insert((source_repo, source_number));
1024 }
1025 }
1026 }
1027 }
1028 refs
1029}
1030
1031pub fn fetch_issue_internal(repo: &str, number: u64) -> Result<(String, Option<String>), String> {
1040 if !has_git_token() {
1041 return Err(
1042 "No GitHub token found. A token is required for fetching issues/PRs \
1043 (cross-references, higher rate limits).\n\n\
1044 Set the GITHUB_TOKEN or GH_TOKEN environment variable, or use \
1045 load_env() to load it from a .env file.\n\n\
1046 The token needs no special scopes — a classic PAT with default (no) \
1047 permissions works for public repos."
1048 .into(),
1049 );
1050 }
1051
1052 let mut parent = match fetch_single_discussion(repo, number, true, true) {
1054 Ok(val) => val,
1055 Err(e) if e.starts_with("Not found:") => {
1056 return match fetch_gh_discussion_internal(repo, number) {
1058 Ok(result) => Ok(result),
1059 Err(_) => Err(format!(
1060 "#{} not found in {} (checked Issues, PRs, and Discussions).",
1061 number, repo
1062 )),
1063 };
1064 }
1065 Err(e) => return Err(e),
1066 };
1067
1068 let seen: HashSet<(String, u64)> = [(repo.to_string(), number)].into();
1070 let all_refs = collect_refs_from_discussion(&parent, repo);
1071 let mut refs: Vec<(String, u64)> = all_refs.difference(&seen).cloned().collect();
1072 refs.sort();
1073 refs.truncate(MAX_RELATED);
1074
1075 if !refs.is_empty() {
1076 let ref_list: Vec<Value> = refs
1078 .iter()
1079 .map(|(r, n)| json!({"repo": r, "number": n}))
1080 .collect();
1081 parent["related_refs"] = Value::Array(ref_list);
1082 }
1083
1084 let parent_json = serde_json::to_string(&parent).map_err(|e| format!("JSON error: {}", e))?;
1086 let cache_json = serde_json::to_string(&json!({"_n": 0})).unwrap();
1087 let (compacted, cache_out) =
1088 compact::compact_discussion(&parent_json, Some(&cache_json), None, None)
1089 .map_err(|e| format!("Compaction error: {}", e))?;
1090
1091 Ok((compacted, cache_out))
1092}
1093
1094pub fn git_api_internal(repo: &str, path: &str, truncate_at: usize) -> String {
1099 if let Some(err) = git_refs::validate_repo(repo) {
1100 return err;
1101 }
1102
1103 let top_level = [
1104 "search/",
1105 "users/",
1106 "orgs/",
1107 "gists/",
1108 "rate_limit",
1109 "repos/",
1110 ];
1111 let url = if top_level.iter().any(|p| path.starts_with(p)) {
1112 format!("{}/{}", GITHUB_API, path)
1113 } else {
1114 format!("{}/repos/{}/{}", GITHUB_API, repo, path)
1115 };
1116
1117 match gh_get(&url) {
1118 Ok(data) => {
1119 let text = serde_json::to_string_pretty(&data).unwrap_or_default();
1120 if text.len() > truncate_at {
1121 format!(
1122 "{}\n\n... (truncated, refine your query)",
1123 &text[..compact::safe_byte_index(&text, truncate_at)]
1124 )
1125 } else {
1126 text
1127 }
1128 }
1129 Err(e) => e,
1130 }
1131}
1132
1133#[allow(clippy::too_many_arguments)]
1144pub fn github_issues_rust(
1145 repo: Option<&str>,
1146 number: Option<u64>,
1147 query: Option<&str>,
1148 kind: &str,
1149 state: &str,
1150 sort: Option<&str>,
1151 limit: usize,
1152 labels: Option<&str>,
1153) -> String {
1154 let repo_str = match repo {
1155 Some(r) => r.to_string(),
1156 None => match detect_git_repo(".") {
1157 Some(r) => r,
1158 None => {
1159 return "No repo specified and could not auto-detect from git remote.".to_string()
1160 }
1161 },
1162 };
1163 if let Some(err) = git_refs::validate_repo(&repo_str) {
1164 return err;
1165 }
1166
1167 match (number, query) {
1168 (Some(num), _) => match fetch_issue_internal(&repo_str, num) {
1169 Ok((text, _cache)) => text,
1170 Err(e) => e,
1171 },
1172 (None, Some(q)) => search_issues_dispatch(&repo_str, q, kind, state, sort, limit, labels),
1173 (None, None) => list_issues_internal(
1174 &repo_str,
1175 kind,
1176 state,
1177 sort.unwrap_or("created"),
1178 limit,
1179 labels,
1180 ),
1181 }
1182}
1183
1184fn build_search_qualifiers(repo: &str, kind: &str, state: &str, labels: Option<&str>) -> String {
1190 let mut q = format!(" repo:{}", repo);
1191 match kind {
1192 "issue" => q.push_str(" type:issue"),
1193 "pr" => q.push_str(" type:pr"),
1194 _ => {} }
1196 match state {
1197 "open" => q.push_str(" state:open"),
1198 "closed" => q.push_str(" state:closed"),
1199 _ => {} }
1201 if let Some(lbls) = labels {
1202 for label in lbls.split(',') {
1203 let label = label.trim();
1204 if !label.is_empty() {
1205 if label.contains(' ') {
1206 q.push_str(&format!(" label:\"{}\"", label));
1207 } else {
1208 q.push_str(&format!(" label:{}", label));
1209 }
1210 }
1211 }
1212 }
1213 q
1214}
1215
1216fn search_issues_internal(
1218 repo: &str,
1219 user_query: &str,
1220 kind: &str,
1221 state: &str,
1222 sort: Option<&str>,
1223 limit: usize,
1224 labels: Option<&str>,
1225) -> String {
1226 let q = format!(
1227 "{}{}",
1228 user_query,
1229 build_search_qualifiers(repo, kind, state, labels)
1230 );
1231 let per_page = limit.min(100);
1232
1233 let mut req = AGENT
1234 .get(&format!("{}/search/issues", GITHUB_API))
1235 .set("Accept", "application/vnd.github+json")
1236 .set("User-Agent", "mcp-methods")
1237 .query("q", &q)
1238 .query("per_page", &per_page.to_string());
1239
1240 if let Some(s) = sort {
1241 req = req.query("sort", s);
1242 }
1243 if let Some(token) = auth_token() {
1246 req = req.set("Authorization", &format!("Bearer {}", token));
1247 }
1248
1249 match req.call() {
1250 Ok(resp) => {
1251 let data: Value = match resp.into_json() {
1252 Ok(v) => v,
1253 Err(e) => return format!("JSON parse error: {}", e),
1254 };
1255 format_search_results(repo, user_query, &data)
1256 }
1257 Err(ureq::Error::Status(422, resp)) => {
1258 let body = resp.into_string().unwrap_or_default();
1259 format!("GitHub search validation error: {}", body)
1260 }
1261 Err(ureq::Error::Status(403, resp)) => {
1262 let body = resp.into_string().unwrap_or_default();
1263 if body.to_lowercase().contains("rate limit") {
1264 "GitHub API rate limit exceeded. Set GITHUB_TOKEN or GH_TOKEN for higher limits."
1265 .to_string()
1266 } else {
1267 format!("GitHub API forbidden: {}", body)
1268 }
1269 }
1270 Err(e) => format!("GitHub search error: {}", e),
1271 }
1272}
1273
1274fn search_discussions_graphql(
1276 repo: &str,
1277 user_query: &str,
1278 state: &str,
1279 sort: Option<&str>,
1280 limit: usize,
1281 labels: Option<&str>,
1282) -> String {
1283 let qualifiers = build_search_qualifiers(repo, "discussion", state, labels);
1284 let q = format!("{}{}", user_query, qualifiers);
1285 let per_page = limit.min(100);
1286
1287 let _ = sort;
1290
1291 let query = r#"query($q: String!, $first: Int!) {
1292 search(type: DISCUSSION, query: $q, first: $first) {
1293 discussionCount
1294 nodes {
1295 ... on Discussion {
1296 number
1297 title
1298 author { login }
1299 createdAt
1300 closed
1301 comments { totalCount }
1302 category { name }
1303 labels(first: 5) { nodes { name } }
1304 answer { id }
1305 }
1306 }
1307 }
1308}"#;
1309
1310 let vars = json!({"q": q, "first": per_page as i64});
1311
1312 let data = match gh_graphql(query, vars) {
1313 Ok(d) => d,
1314 Err(e) => return e,
1315 };
1316
1317 let total = data
1318 .get("search")
1319 .and_then(|s| s.get("discussionCount"))
1320 .and_then(|v| v.as_u64())
1321 .unwrap_or(0);
1322 let nodes = match data
1323 .get("search")
1324 .and_then(|s| s.get("nodes"))
1325 .and_then(|v| v.as_array())
1326 {
1327 Some(n) if !n.is_empty() => n,
1328 _ => return format!("No discussion results for \"{}\" in {}.", user_query, repo),
1329 };
1330
1331 let mut out = format!(
1332 "{} discussion{} (of {}) for \"{}\" in {}:\n",
1333 nodes.len(),
1334 if nodes.len() == 1 { "" } else { "s" },
1335 total,
1336 user_query,
1337 repo,
1338 );
1339
1340 for d in nodes {
1341 let number = d.get("number").and_then(|v| v.as_u64()).unwrap_or(0);
1342 if number == 0 {
1343 continue; }
1345 let title = d.get("title").and_then(|v| v.as_str()).unwrap_or("");
1346 let author = gql_author(d);
1347 let date = d
1348 .get("createdAt")
1349 .and_then(|v| v.as_str())
1350 .and_then(|s| s.get(..10))
1351 .unwrap_or("");
1352 let comment_count = d
1353 .get("comments")
1354 .and_then(|c| c.get("totalCount"))
1355 .and_then(|v| v.as_u64())
1356 .unwrap_or(0);
1357 let comments = if comment_count > 0 {
1358 format!(
1359 ", {} comment{}",
1360 comment_count,
1361 if comment_count == 1 { "" } else { "s" }
1362 )
1363 } else {
1364 String::new()
1365 };
1366 let category = d
1367 .get("category")
1368 .and_then(|c| c.get("name"))
1369 .and_then(|v| v.as_str())
1370 .unwrap_or("");
1371 let cat_tag = if category.is_empty() {
1372 String::new()
1373 } else {
1374 format!(" [{}]", category)
1375 };
1376 let label_str: String = d
1377 .get("labels")
1378 .and_then(|l| l.get("nodes"))
1379 .and_then(|v| v.as_array())
1380 .map(|arr| {
1381 arr.iter()
1382 .filter_map(|l| l.get("name").and_then(|n| n.as_str()))
1383 .collect::<Vec<_>>()
1384 .join(", ")
1385 })
1386 .filter(|s| !s.is_empty())
1387 .map(|s| format!(" [{}]", s))
1388 .unwrap_or_default();
1389 let answered = if d.get("answer").map(|v| !v.is_null()).unwrap_or(false) {
1390 " [answered]"
1391 } else {
1392 ""
1393 };
1394
1395 out.push_str(&format!(
1396 " #{}{}{}{} {} — {} ({}{})\n",
1397 number, cat_tag, label_str, answered, title, author, date, comments
1398 ));
1399 }
1400
1401 out.trim_end().to_string()
1402}
1403
1404pub fn search_issues_dispatch(
1406 repo: &str,
1407 query: &str,
1408 kind: &str,
1409 state: &str,
1410 sort: Option<&str>,
1411 limit: usize,
1412 labels: Option<&str>,
1413) -> String {
1414 match kind {
1415 "discussion" => search_discussions_graphql(repo, query, state, sort, limit, labels),
1416 "issue" | "pr" => search_issues_internal(repo, query, kind, state, sort, limit, labels),
1417 _ => {
1418 let issues = search_issues_internal(repo, query, "issue", state, sort, limit, labels);
1422 let prs = search_issues_internal(repo, query, "pr", state, sort, limit, labels);
1423 let rest = match (
1424 issues.starts_with("No results"),
1425 prs.starts_with("No results"),
1426 ) {
1427 (true, true) => issues, (true, false) => prs,
1429 (false, true) => issues,
1430 (false, false) => format!("{}\n\n{}", issues, prs),
1431 };
1432 let gql = search_discussions_graphql(repo, query, state, sort, limit, labels);
1433 if gql.starts_with("No discussion") {
1434 rest
1435 } else if rest.starts_with("No results") {
1436 gql
1437 } else {
1438 format!("{}\n\n{}", rest, gql)
1439 }
1440 }
1441 }
1442}
1443
1444fn format_search_results(repo: &str, user_query: &str, data: &Value) -> String {
1446 let total = data
1447 .get("total_count")
1448 .and_then(|v| v.as_u64())
1449 .unwrap_or(0);
1450 let items = match data.get("items").and_then(|v| v.as_array()) {
1451 Some(arr) if !arr.is_empty() => arr,
1452 _ => return format!("No results for \"{}\" in {}.", user_query, repo),
1453 };
1454
1455 let mut out = format!(
1456 "{} result{} (of {}) for \"{}\" in {}:\n",
1457 items.len(),
1458 if items.len() == 1 { "" } else { "s" },
1459 total,
1460 user_query,
1461 repo,
1462 );
1463
1464 for item in items {
1465 let is_pr = item.get("pull_request").is_some();
1466 if is_pr {
1467 let number = item.get("number").and_then(|v| v.as_u64()).unwrap_or(0);
1468 let title = json_str(item, "title");
1469 let author = json_author(item);
1470 let labels = format_label_tags(item);
1471 let date = format_date(item, "created_at");
1472 let comments = format_comments(item);
1473 out.push_str(&format!(
1474 " #{}{} [PR] {} — {} ({}{})\n",
1475 number, labels, title, author, date, comments
1476 ));
1477 } else {
1478 out.push_str(&format_issue_line(item));
1479 out.push('\n');
1480 }
1481 }
1482
1483 out.trim_end().to_string()
1484}
1485
1486fn list_discussions_graphql(repo: &str, state: &str, sort: &str, per_page: usize) -> String {
1491 let (owner, name) = match repo.split_once('/') {
1492 Some(pair) => pair,
1493 None => return format!("Invalid repo format: {}", repo),
1494 };
1495
1496 let order_field = match sort {
1497 "updated" => "UPDATED_AT",
1498 _ => "CREATED_AT",
1499 };
1500
1501 let states: Value = match state {
1502 "open" => json!(["OPEN"]),
1503 "closed" => json!(["CLOSED"]),
1504 _ => Value::Null,
1505 };
1506
1507 let query = format!(
1509 r#"query($owner: String!, $repo: String!, $first: Int!, $states: [DiscussionState!]) {{
1510 repository(owner: $owner, name: $repo) {{
1511 discussions(first: $first, states: $states, orderBy: {{field: {}, direction: DESC}}) {{
1512 nodes {{
1513 number
1514 title
1515 author {{ login }}
1516 createdAt
1517 closed
1518 comments {{ totalCount }}
1519 category {{ name }}
1520 labels(first: 5) {{ nodes {{ name }} }}
1521 answer {{ id }}
1522 }}
1523 }}
1524 }}
1525}}"#,
1526 order_field
1527 );
1528
1529 let vars = json!({
1530 "owner": owner,
1531 "repo": name,
1532 "first": per_page.min(100) as i64,
1533 "states": states,
1534 });
1535
1536 let data = match gh_graphql(&query, vars) {
1537 Ok(d) => d,
1538 Err(e) => return e,
1539 };
1540
1541 let nodes = match data
1542 .get("repository")
1543 .and_then(|r| r.get("discussions"))
1544 .and_then(|d| d.get("nodes"))
1545 .and_then(|v| v.as_array())
1546 {
1547 Some(n) if !n.is_empty() => n,
1548 _ => return format!("No {} discussions in {}.", state, repo),
1549 };
1550
1551 let mut out = format!(
1552 "{} discussion{} in {} ({}):\n",
1553 nodes.len(),
1554 if nodes.len() == 1 { "" } else { "s" },
1555 repo,
1556 state
1557 );
1558
1559 for d in nodes {
1560 let number = d.get("number").and_then(|v| v.as_u64()).unwrap_or(0);
1561 let title = d.get("title").and_then(|v| v.as_str()).unwrap_or("");
1562 let author = gql_author(d);
1563 let date = d
1564 .get("createdAt")
1565 .and_then(|v| v.as_str())
1566 .and_then(|s| s.get(..10))
1567 .unwrap_or("");
1568 let comment_count = d
1569 .get("comments")
1570 .and_then(|c| c.get("totalCount"))
1571 .and_then(|v| v.as_u64())
1572 .unwrap_or(0);
1573 let comments = if comment_count > 0 {
1574 format!(
1575 ", {} comment{}",
1576 comment_count,
1577 if comment_count == 1 { "" } else { "s" }
1578 )
1579 } else {
1580 String::new()
1581 };
1582 let category = d
1583 .get("category")
1584 .and_then(|c| c.get("name"))
1585 .and_then(|v| v.as_str())
1586 .unwrap_or("");
1587 let cat_tag = if category.is_empty() {
1588 String::new()
1589 } else {
1590 format!(" [{}]", category)
1591 };
1592 let label_str: String = d
1593 .get("labels")
1594 .and_then(|l| l.get("nodes"))
1595 .and_then(|v| v.as_array())
1596 .map(|arr| {
1597 arr.iter()
1598 .filter_map(|l| l.get("name").and_then(|n| n.as_str()))
1599 .collect::<Vec<_>>()
1600 .join(", ")
1601 })
1602 .filter(|s| !s.is_empty())
1603 .map(|s| format!(" [{}]", s))
1604 .unwrap_or_default();
1605 let answered = if d.get("answer").map(|v| !v.is_null()).unwrap_or(false) {
1606 " [answered]"
1607 } else {
1608 ""
1609 };
1610 let is_closed = d.get("closed").and_then(|v| v.as_bool()).unwrap_or(false);
1611 let state_tag = if is_closed { " [closed]" } else { "" };
1612
1613 out.push_str(&format!(
1614 " #{}{}{}{}{} {} — {} ({}{})\n",
1615 number, cat_tag, label_str, answered, state_tag, title, author, date, comments
1616 ));
1617 }
1618
1619 out.trim_end().to_string()
1620}
1621
1622pub fn list_issues_internal(
1623 repo: &str,
1624 kind: &str,
1625 state: &str,
1626 sort: &str,
1627 limit: usize,
1628 labels: Option<&str>,
1629) -> String {
1630 let per_page = limit.min(100);
1631 let direction = "desc";
1632
1633 match kind {
1634 "pr" => list_pulls(repo, state, sort, direction, per_page),
1635 "issue" => list_issues_only(repo, state, sort, direction, per_page, labels),
1636 "discussion" => list_discussions_graphql(repo, state, sort, per_page),
1637 _ => list_all(repo, state, sort, direction, per_page, labels),
1638 }
1639}
1640
1641fn list_pulls(repo: &str, state: &str, sort: &str, direction: &str, per_page: usize) -> String {
1642 let path = format!(
1643 "repos/{}/pulls?state={}&sort={}&direction={}&per_page={}",
1644 repo, state, sort, direction, per_page
1645 );
1646 match gh_get(&format!("{}/{}", GITHUB_API, &path)) {
1647 Ok(Value::Array(items)) => format_pull_list(repo, state, &items),
1648 Ok(_) => "Unexpected response format.".to_string(),
1649 Err(e) => e,
1650 }
1651}
1652
1653fn list_issues_only(
1654 repo: &str,
1655 state: &str,
1656 sort: &str,
1657 direction: &str,
1658 per_page: usize,
1659 labels: Option<&str>,
1660) -> String {
1661 let mut path = format!(
1662 "repos/{}/issues?state={}&sort={}&direction={}&per_page={}",
1663 repo, state, sort, direction, per_page
1664 );
1665 if let Some(lbls) = labels {
1666 if !lbls.is_empty() {
1667 path.push_str(&format!("&labels={}", lbls));
1668 }
1669 }
1670 match gh_get(&format!("{}/{}", GITHUB_API, &path)) {
1671 Ok(Value::Array(items)) => {
1672 let issues: Vec<&Value> = items
1674 .iter()
1675 .filter(|item| item.get("pull_request").is_none())
1676 .collect();
1677 format_issue_list(repo, state, &issues)
1678 }
1679 Ok(_) => "Unexpected response format.".to_string(),
1680 Err(e) => e,
1681 }
1682}
1683
1684fn list_all(
1685 repo: &str,
1686 state: &str,
1687 sort: &str,
1688 direction: &str,
1689 per_page: usize,
1690 labels: Option<&str>,
1691) -> String {
1692 let mut path = format!(
1693 "repos/{}/issues?state={}&sort={}&direction={}&per_page={}",
1694 repo, state, sort, direction, per_page
1695 );
1696 if let Some(lbls) = labels {
1697 if !lbls.is_empty() {
1698 path.push_str(&format!("&labels={}", lbls));
1699 }
1700 }
1701 match gh_get(&format!("{}/{}", GITHUB_API, &path)) {
1702 Ok(Value::Array(items)) => {
1703 let refs: Vec<&Value> = items.iter().collect();
1704 format_mixed_list(repo, state, &refs)
1705 }
1706 Ok(_) => "Unexpected response format.".to_string(),
1707 Err(e) => e,
1708 }
1709}
1710
1711fn format_label_tags(item: &Value) -> String {
1716 item.get("labels")
1717 .and_then(|v| v.as_array())
1718 .map(|arr| {
1719 arr.iter()
1720 .filter_map(|l| l.get("name").and_then(|n| n.as_str()))
1721 .collect::<Vec<_>>()
1722 .join(", ")
1723 })
1724 .filter(|s| !s.is_empty())
1725 .map(|s| format!(" [{}]", s))
1726 .unwrap_or_default()
1727}
1728
1729fn format_date(item: &Value, key: &str) -> String {
1730 item.get(key)
1731 .and_then(|v| v.as_str())
1732 .map(|s| s.get(..10).unwrap_or(s).to_string())
1733 .unwrap_or_default()
1734}
1735
1736fn format_comments(item: &Value) -> String {
1737 let count = item.get("comments").and_then(|v| v.as_u64()).unwrap_or(0);
1738 if count > 0 {
1739 format!(", {} comment{}", count, if count == 1 { "" } else { "s" })
1740 } else {
1741 String::new()
1742 }
1743}
1744
1745fn format_issue_line(item: &Value) -> String {
1746 let number = item.get("number").and_then(|v| v.as_u64()).unwrap_or(0);
1747 let title = json_str(item, "title");
1748 let author = json_author(item);
1749 let labels = format_label_tags(item);
1750 let date = format_date(item, "created_at");
1751 let comments = format_comments(item);
1752 format!(
1753 " #{}{} {} — {} ({}{})",
1754 number, labels, title, author, date, comments
1755 )
1756}
1757
1758fn format_pr_line(item: &Value) -> String {
1759 let number = item.get("number").and_then(|v| v.as_u64()).unwrap_or(0);
1760 let title = json_str(item, "title");
1761 let author = json_author(item);
1762 let labels = format_label_tags(item);
1763 let date = format_date(item, "created_at");
1764 let comments = format_comments(item);
1765 let draft = if item.get("draft").and_then(|v| v.as_bool()).unwrap_or(false) {
1766 " [draft]"
1767 } else {
1768 ""
1769 };
1770 let base = item
1771 .get("base")
1772 .and_then(|b| b.get("ref"))
1773 .and_then(|v| v.as_str())
1774 .unwrap_or("");
1775 let head = item
1776 .get("head")
1777 .and_then(|h| h.get("ref"))
1778 .and_then(|v| v.as_str())
1779 .unwrap_or("");
1780 let branch_info = if !base.is_empty() && !head.is_empty() {
1781 format!(" {} -> {}", head, base)
1782 } else {
1783 String::new()
1784 };
1785 format!(
1786 " #{}{}{} {} — {} ({}{}){}",
1787 number, labels, draft, title, author, date, comments, branch_info
1788 )
1789}
1790
1791fn format_issue_list(repo: &str, state: &str, items: &[&Value]) -> String {
1792 if items.is_empty() {
1793 return format!("No {} issues in {}.", state, repo);
1794 }
1795 let mut out = format!(
1796 "{} issue{} in {} ({}):\n",
1797 items.len(),
1798 if items.len() == 1 { "" } else { "s" },
1799 repo,
1800 state
1801 );
1802 for item in items {
1803 out.push_str(&format_issue_line(item));
1804 out.push('\n');
1805 }
1806 out.trim_end().to_string()
1807}
1808
1809fn format_pull_list(repo: &str, state: &str, items: &[Value]) -> String {
1810 if items.is_empty() {
1811 return format!("No {} pull requests in {}.", state, repo);
1812 }
1813 let mut out = format!(
1814 "{} pull request{} in {} ({}):\n",
1815 items.len(),
1816 if items.len() == 1 { "" } else { "s" },
1817 repo,
1818 state
1819 );
1820 for item in items {
1821 out.push_str(&format_pr_line(item));
1822 out.push('\n');
1823 }
1824 out.trim_end().to_string()
1825}
1826
1827fn format_mixed_list(repo: &str, state: &str, items: &[&Value]) -> String {
1828 if items.is_empty() {
1829 return format!("No {} discussions in {}.", state, repo);
1830 }
1831 let mut out = format!(
1832 "{} discussion{} in {} ({}):\n",
1833 items.len(),
1834 if items.len() == 1 { "" } else { "s" },
1835 repo,
1836 state
1837 );
1838 for item in items {
1839 let is_pr = item.get("pull_request").is_some();
1840 if is_pr {
1841 let number = item.get("number").and_then(|v| v.as_u64()).unwrap_or(0);
1843 let title = json_str(item, "title");
1844 let author = json_author(item);
1845 let labels = format_label_tags(item);
1846 let date = format_date(item, "created_at");
1847 let comments = format_comments(item);
1848 out.push_str(&format!(
1849 " #{}{} [PR] {} — {} ({}{})\n",
1850 number, labels, title, author, date, comments
1851 ));
1852 } else {
1853 out.push_str(&format_issue_line(item));
1854 out.push('\n');
1855 }
1856 }
1857 out.trim_end().to_string()
1858}
1859
1860#[cfg(test)]
1861mod tests {
1862 use super::*;
1863
1864 fn env_lock() -> std::sync::MutexGuard<'static, ()> {
1866 use std::sync::{Mutex, OnceLock};
1867 static LOCK: OnceLock<Mutex<()>> = OnceLock::new();
1868 LOCK.get_or_init(|| Mutex::new(()))
1869 .lock()
1870 .unwrap_or_else(|p| p.into_inner())
1871 }
1872
1873 #[test]
1874 fn empty_string_token_is_treated_as_missing() {
1875 let _g = env_lock();
1876 let prev_gh_token = std::env::var("GITHUB_TOKEN").ok();
1878 let prev_alt_token = std::env::var("GH_TOKEN").ok();
1879
1880 unsafe {
1881 std::env::set_var("GITHUB_TOKEN", "");
1882 std::env::remove_var("GH_TOKEN");
1883 }
1884 assert!(
1885 !has_git_token(),
1886 "empty GITHUB_TOKEN must be treated as missing"
1887 );
1888
1889 unsafe {
1890 std::env::set_var("GITHUB_TOKEN", "ghp_real_value");
1891 }
1892 assert!(has_git_token(), "non-empty token must be detected");
1893
1894 unsafe {
1896 match prev_gh_token {
1897 Some(v) => std::env::set_var("GITHUB_TOKEN", v),
1898 None => std::env::remove_var("GITHUB_TOKEN"),
1899 }
1900 match prev_alt_token {
1901 Some(v) => std::env::set_var("GH_TOKEN", v),
1902 None => std::env::remove_var("GH_TOKEN"),
1903 }
1904 }
1905 }
1906}