octorus 0.6.2

A TUI tool for GitHub PR review, designed for Helix editor users
Documentation
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
use anyhow::{Context, Result};
use chrono::{DateTime, Utc};
use serde::Deserialize;

use super::client::{gh_api, gh_command};

/// GitHub API レスポンスの中間構造体(nested JSON を平坦化)
#[derive(Debug, Clone, Deserialize)]
struct CommitResponse {
    sha: String,
    commit: CommitDetail,
    author: Option<GitHubUser>,
}

#[derive(Debug, Clone, Deserialize)]
struct CommitDetail {
    message: String,
    author: Option<CommitPersonDetail>,
}

#[derive(Debug, Clone, Deserialize)]
struct CommitPersonDetail {
    name: Option<String>,
    date: Option<String>,
}

#[derive(Debug, Clone, Deserialize)]
struct GitHubUser {
    login: String,
}

/// PRコミットの公開型
#[derive(Debug, Clone)]
pub struct PrCommit {
    pub sha: String,
    /// コミットメッセージの1行目
    pub message: String,
    pub author_name: String,
    pub author_login: Option<String>,
    /// ISO 8601 形式の日時文字列
    pub date: String,
}

impl PrCommit {
    /// SHA の先頭7文字を返す
    pub fn short_sha(&self) -> &str {
        &self.sha[..self.sha.len().min(7)]
    }
}

/// コミット一覧のページネーション結果
pub struct CommitListPage {
    pub items: Vec<PrCommit>,
    pub has_more: bool,
}

/// PRのコミット一覧を取得(ページネーション対応)
///
/// `page` は 1-indexed。`per_page + 1` 件を要求して `has_more` を判定する。
/// GitHub API は最大250コミットまで(API仕様制限)。
pub async fn fetch_pr_commits(
    repo: &str,
    pr_number: u32,
    page: u32,
    per_page: u32,
) -> Result<CommitListPage> {
    let fetch_count = per_page + 1;
    let endpoint = format!(
        "repos/{}/pulls/{}/commits?per_page={}&page={}",
        repo, pr_number, fetch_count, page
    );
    let json = gh_api(&endpoint)
        .await
        .context("Failed to fetch PR commits")?;

    let responses: Vec<CommitResponse> =
        serde_json::from_value(json).context("Failed to parse PR commits response")?;

    let has_more = responses.len() > per_page as usize;

    let mut commits: Vec<PrCommit> = responses
        .into_iter()
        .take(per_page as usize)
        .map(commit_response_to_pr_commit)
        .collect();
    // GitHub API は oldest-first → newest-first に変換
    commits.reverse();

    Ok(CommitListPage {
        items: commits,
        has_more,
    })
}

fn commit_response_to_pr_commit(r: CommitResponse) -> PrCommit {
    let message = r.commit.message.lines().next().unwrap_or("").to_string();
    let author_name = r
        .commit
        .author
        .as_ref()
        .and_then(|a| a.name.clone())
        .unwrap_or_else(|| "unknown".to_string());
    let author_login = r.author.map(|a| a.login);
    let date = r
        .commit
        .author
        .as_ref()
        .and_then(|a| a.date.clone())
        .unwrap_or_default();
    PrCommit {
        sha: r.sha,
        message,
        author_name,
        author_login,
        date,
    }
}

/// 特定コミットの unified diff を取得
pub async fn fetch_commit_diff(repo: &str, sha: &str) -> Result<String> {
    let endpoint = format!("repos/{}/commits/{}", repo, sha);
    let diff = gh_command(&[
        "api",
        "-H",
        "Accept: application/vnd.github.v3.diff",
        &endpoint,
    ])
    .await
    .context("Failed to fetch commit diff")?;
    Ok(diff)
}

/// ローカル git log からコミット一覧を取得(ページネーション対応)
///
/// デフォルトブランチ(main/master)からの差分コミットを返す。
/// `git rev-list --count` で総数を取得し、`--skip` / `--max-count` で
/// 必要なページ分のみ取得する(全件取得を回避)。
pub async fn fetch_local_commits(
    working_dir: Option<&str>,
    offset: u32,
    limit: u32,
) -> Result<CommitListPage> {
    // デフォルトブランチを検出(upstream tracking branch ではなく main/master)
    let default_branch = detect_default_branch(working_dir).await;

    let range = default_branch
        .map(|b| format!("{}..HEAD", b))
        .unwrap_or_else(|| "HEAD".to_string());

    // Phase 1: 総コミット数を取得(SHA のみカウント、高速)
    let total = match count_commits(working_dir, &range).await {
        Ok(n) => n,
        Err(_) => {
            // range が無効の場合は HEAD のみでフォールバック
            count_commits(working_dir, "HEAD").await.unwrap_or(0)
        }
    };

    if total == 0 || offset as usize >= total {
        return Ok(CommitListPage {
            items: vec![],
            has_more: false,
        });
    }

    let has_more = (offset + limit) < total as u32;

    // Phase 2: 必要なページ分のみ取得(newest-first のまま返す)
    let skip = offset as usize;
    let max_count = (limit as usize).min(total.saturating_sub(skip));

    let mut cmd = tokio::process::Command::new("git");
    cmd.args([
        "log",
        "--format=%H%x00%s%x00%an%x00%aI",
        &format!("--skip={}", skip),
        &format!("--max-count={}", max_count),
        &range,
    ]);
    if let Some(dir) = working_dir {
        cmd.current_dir(dir);
    }
    let output = cmd.output().await.context("Failed to run git log")?;

    if !output.status.success() {
        // range が無効の場合は HEAD のみでフォールバック
        let mut cmd2 = tokio::process::Command::new("git");
        cmd2.args([
            "log",
            "--format=%H%x00%s%x00%an%x00%aI",
            &format!("--skip={}", skip),
            &format!("--max-count={}", max_count),
        ]);
        if let Some(dir) = working_dir {
            cmd2.current_dir(dir);
        }
        let output2 = cmd2
            .output()
            .await
            .context("Failed to run git log fallback")?;
        if !output2.status.success() {
            let stderr = String::from_utf8_lossy(&output2.stderr);
            anyhow::bail!("git log fallback failed: {}", stderr.trim());
        }
        let items = parse_git_log_output(&output2.stdout);
        return Ok(CommitListPage { items, has_more });
    }

    let items = parse_git_log_output(&output.stdout);
    Ok(CommitListPage { items, has_more })
}

/// range 内のコミット総数を取得(SHA カウントのみ、高速)
async fn count_commits(working_dir: Option<&str>, range: &str) -> Result<usize> {
    let mut cmd = tokio::process::Command::new("git");
    cmd.args(["rev-list", "--count", range]);
    if let Some(dir) = working_dir {
        cmd.current_dir(dir);
    }
    let output = cmd.output().await.context("Failed to count commits")?;
    if !output.status.success() {
        let stderr = String::from_utf8_lossy(&output.stderr);
        anyhow::bail!("git rev-list --count failed: {}", stderr.trim());
    }
    let text = String::from_utf8_lossy(&output.stdout);
    text.trim().parse().context("Failed to parse commit count")
}

/// デフォルトブランチ(origin/main or origin/master)を検出
async fn detect_default_branch(working_dir: Option<&str>) -> Option<String> {
    for candidate in &["origin/main", "origin/master"] {
        let mut cmd = tokio::process::Command::new("git");
        cmd.args(["rev-parse", "--verify", candidate]);
        if let Some(dir) = working_dir {
            cmd.current_dir(dir);
        }
        if let Ok(output) = cmd.output().await {
            if output.status.success() {
                return Some(candidate.to_string());
            }
        }
    }
    None
}

fn parse_git_log_output(stdout: &[u8]) -> Vec<PrCommit> {
    let text = String::from_utf8_lossy(stdout);
    text.lines()
        .filter(|l| !l.is_empty())
        .filter_map(|line| {
            let parts: Vec<&str> = line.splitn(4, '\0').collect();
            if parts.len() < 4 {
                return None;
            }
            Some(PrCommit {
                sha: parts[0].to_string(),
                message: parts[1].to_string(),
                author_name: parts[2].to_string(),
                author_login: None,
                date: parts[3].to_string(),
            })
        })
        .collect()
}

/// テスト用: 全件パース済みデータから offset/limit でページ切り出し
#[cfg(test)]
fn parse_git_log_page(stdout: &[u8], offset: u32, limit: u32) -> Result<CommitListPage> {
    let all = parse_git_log_output(stdout);
    let has_more = all.len() > (offset + limit) as usize;
    let items: Vec<PrCommit> = all
        .into_iter()
        .skip(offset as usize)
        .take(limit as usize)
        .collect();
    Ok(CommitListPage { items, has_more })
}

/// ローカル git show でコミットの unified diff を取得
pub async fn fetch_local_commit_diff(working_dir: Option<&str>, sha: &str) -> Result<String> {
    let mut cmd = tokio::process::Command::new("git");
    cmd.args(["show", "--format=", "--patch", sha]);
    if let Some(dir) = working_dir {
        cmd.current_dir(dir);
    }
    let output = cmd.output().await.context("Failed to run git show")?;

    if !output.status.success() {
        let stderr = String::from_utf8_lossy(&output.stderr);
        anyhow::bail!("git show failed: {}", stderr.trim());
    }

    Ok(String::from_utf8_lossy(&output.stdout).to_string())
}

/// ISO 8601 の日時文字列を相対時間表示に変換
///
/// 例: "2h ago", "3d ago", "1m ago"
pub fn format_relative_time(iso_date: &str) -> String {
    let Ok(dt) = DateTime::parse_from_rfc3339(iso_date) else {
        return iso_date.to_string();
    };

    let now = Utc::now();
    let duration = now.signed_duration_since(dt.with_timezone(&Utc));

    let seconds = duration.num_seconds();
    if seconds < 0 {
        return "just now".to_string();
    }

    let minutes = duration.num_minutes();
    let hours = duration.num_hours();
    let days = duration.num_days();
    let weeks = days / 7;
    let months = days / 30;
    let years = days / 365;

    if seconds < 60 {
        "just now".to_string()
    } else if minutes < 60 {
        format!("{}m ago", minutes)
    } else if hours < 24 {
        format!("{}h ago", hours)
    } else if days < 7 {
        format!("{}d ago", days)
    } else if weeks < 5 {
        format!("{}w ago", weeks)
    } else if months < 12 {
        format!("{}mo ago", months)
    } else {
        format!("{}y ago", years)
    }
}

#[cfg(test)]
mod tests {
    use super::*;

    #[test]
    fn test_format_relative_time_minutes() {
        let now = Utc::now();
        let five_min_ago = now - chrono::Duration::minutes(5);
        let iso = five_min_ago.to_rfc3339();
        let result = format_relative_time(&iso);
        assert_eq!(result, "5m ago");
    }

    #[test]
    fn test_format_relative_time_hours() {
        let now = Utc::now();
        let two_hours_ago = now - chrono::Duration::hours(2);
        let iso = two_hours_ago.to_rfc3339();
        let result = format_relative_time(&iso);
        assert_eq!(result, "2h ago");
    }

    #[test]
    fn test_format_relative_time_days() {
        let now = Utc::now();
        let three_days_ago = now - chrono::Duration::days(3);
        let iso = three_days_ago.to_rfc3339();
        let result = format_relative_time(&iso);
        assert_eq!(result, "3d ago");
    }

    #[test]
    fn test_format_relative_time_invalid() {
        let result = format_relative_time("not-a-date");
        assert_eq!(result, "not-a-date");
    }

    #[test]
    fn test_short_sha() {
        let commit = PrCommit {
            sha: "abc1234567890".to_string(),
            message: "test".to_string(),
            author_name: "author".to_string(),
            author_login: None,
            date: String::new(),
        };
        assert_eq!(commit.short_sha(), "abc1234");
    }

    /// 50件のコミットデータを生成し、ページ境界が重複しないことを検証
    #[test]
    fn test_parse_git_log_page_non_overlapping_pagination() {
        // 50件のコミットを生成(git log --reverse 相当: 古い順)
        let lines: Vec<String> = (0..50)
            .map(|i| {
                format!(
                    "sha{:03}\x00commit {}\x00author\x002024-01-01T00:00:00Z",
                    i, i
                )
            })
            .collect();
        let stdout = lines.join("\n");
        let bytes = stdout.as_bytes();

        let limit = 20u32;

        // ページ1: offset=0
        let page1 = parse_git_log_page(bytes, 0, limit).unwrap();
        assert_eq!(page1.items.len(), 20);
        assert!(page1.has_more);
        assert_eq!(page1.items[0].message, "commit 0");
        assert_eq!(page1.items[19].message, "commit 19");

        // ページ2: offset=20
        let page2 = parse_git_log_page(bytes, 20, limit).unwrap();
        assert_eq!(page2.items.len(), 20);
        assert!(page2.has_more);
        assert_eq!(page2.items[0].message, "commit 20");
        assert_eq!(page2.items[19].message, "commit 39");

        // ページ3: offset=40(残り10件)
        let page3 = parse_git_log_page(bytes, 40, limit).unwrap();
        assert_eq!(page3.items.len(), 10);
        assert!(!page3.has_more);
        assert_eq!(page3.items[0].message, "commit 40");
        assert_eq!(page3.items[9].message, "commit 49");

        // ページ間で重複がないことを検証
        let page1_shas: Vec<&str> = page1.items.iter().map(|c| c.sha.as_str()).collect();
        let page2_shas: Vec<&str> = page2.items.iter().map(|c| c.sha.as_str()).collect();
        let page3_shas: Vec<&str> = page3.items.iter().map(|c| c.sha.as_str()).collect();

        for sha in &page1_shas {
            assert!(
                !page2_shas.contains(sha),
                "page1 and page2 overlap: {}",
                sha
            );
            assert!(
                !page3_shas.contains(sha),
                "page1 and page3 overlap: {}",
                sha
            );
        }
        for sha in &page2_shas {
            assert!(
                !page3_shas.contains(sha),
                "page2 and page3 overlap: {}",
                sha
            );
        }
    }
}