use anyhow::{Context, Result};
use clap::Parser;
use super::parse_beta_header;
#[derive(Parser)]
pub struct CheckCommand {
#[arg(value_name = "COMMIT_RANGE")]
pub commit_range: Option<String>,
#[arg(long)]
pub model: Option<String>,
#[arg(long, value_name = "KEY:VALUE")]
pub beta_header: Option<String>,
#[arg(long)]
pub context_dir: Option<std::path::PathBuf>,
#[arg(long)]
pub guidelines: Option<std::path::PathBuf>,
#[arg(long, default_value = "text")]
pub format: String,
#[arg(long)]
pub strict: bool,
#[arg(long)]
pub quiet: bool,
#[arg(long)]
pub verbose: bool,
#[arg(long)]
pub show_passing: bool,
#[arg(long, default_value = "4")]
pub concurrency: usize,
#[arg(long, hide = true)]
pub batch_size: Option<usize>,
#[arg(long)]
pub no_coherence: bool,
#[arg(long)]
pub no_suggestions: bool,
#[arg(long)]
pub twiddle: bool,
}
impl CheckCommand {
pub async fn execute(mut self) -> Result<()> {
if let Some(bs) = self.batch_size {
eprintln!("warning: --batch-size is deprecated; use --concurrency instead");
self.concurrency = bs;
}
use crate::data::check::OutputFormat;
let output_format: OutputFormat = self.format.parse().unwrap_or(OutputFormat::Text);
let ai_info = crate::utils::check_ai_command_prerequisites(self.model.as_deref())?;
if !self.quiet && output_format == OutputFormat::Text {
println!(
"✓ {} credentials verified (model: {})",
ai_info.provider, ai_info.model
);
}
if !self.quiet && output_format == OutputFormat::Text {
println!("🔍 Checking commit messages against guidelines...");
}
let mut repo_view = self.generate_repository_view().await?;
if repo_view.commits.is_empty() {
eprintln!("error: no commits found in range");
std::process::exit(3);
}
if !self.quiet && output_format == OutputFormat::Text {
println!("📊 Found {} commits to check", repo_view.commits.len());
}
let guidelines = self.load_guidelines().await?;
let valid_scopes = self.load_scopes();
for commit in &mut repo_view.commits {
commit.analysis.refine_scope(&valid_scopes);
}
if !self.quiet && output_format == OutputFormat::Text {
self.show_guidance_files_status(&guidelines, &valid_scopes);
}
let beta = self
.beta_header
.as_deref()
.map(parse_beta_header)
.transpose()?;
let claude_client = crate::claude::create_default_claude_client(self.model.clone(), beta)?;
if self.verbose && output_format == OutputFormat::Text {
self.show_model_info(&claude_client)?;
}
let report = if repo_view.commits.len() > 1 {
if !self.quiet && output_format == OutputFormat::Text {
println!(
"🔄 Processing {} commits in parallel (concurrency: {})...",
repo_view.commits.len(),
self.concurrency
);
}
self.check_with_map_reduce(
&claude_client,
&repo_view,
guidelines.as_deref(),
&valid_scopes,
)
.await?
} else {
if !self.quiet && output_format == OutputFormat::Text {
println!("🤖 Analyzing commits with AI...");
}
claude_client
.check_commits_with_scopes(
&repo_view,
guidelines.as_deref(),
&valid_scopes,
!self.no_suggestions,
)
.await?
};
self.output_report(&report, output_format)?;
if should_offer_twiddle(self.twiddle, report.has_errors(), output_format) {
use std::io::IsTerminal;
let amendments = self.build_amendments_from_suggestions(&report, &repo_view);
if !amendments.is_empty()
&& self
.prompt_and_apply_suggestions(
amendments,
std::io::stdin().is_terminal(),
&mut std::io::BufReader::new(std::io::stdin()),
)
.await?
{
return Ok(());
}
}
let exit_code = report.exit_code(self.strict);
if exit_code != 0 {
std::process::exit(exit_code);
}
Ok(())
}
async fn generate_repository_view(&self) -> Result<crate::data::RepositoryView> {
use crate::data::{
AiInfo, BranchInfo, FieldExplanation, FileStatusInfo, RepositoryView, VersionInfo,
WorkingDirectoryInfo,
};
use crate::git::{GitRepository, RemoteInfo};
use crate::utils::ai_scratch;
let repo = GitRepository::open()
.context("Failed to open git repository. Make sure you're in a git repository.")?;
let current_branch = repo
.get_current_branch()
.unwrap_or_else(|_| "HEAD".to_string());
let commit_range = if let Some(range) = &self.commit_range {
range.clone()
} else {
let base = if repo.branch_exists("main")? {
"main"
} else if repo.branch_exists("master")? {
"master"
} else {
"HEAD~5"
};
format!("{base}..HEAD")
};
let wd_status = repo.get_working_directory_status()?;
let working_directory = WorkingDirectoryInfo {
clean: wd_status.clean,
untracked_changes: wd_status
.untracked_changes
.into_iter()
.map(|fs| FileStatusInfo {
status: fs.status,
file: fs.file,
})
.collect(),
};
let remotes = RemoteInfo::get_all_remotes(repo.repository())?;
let commits = repo.get_commits_in_range(&commit_range)?;
let versions = Some(VersionInfo {
omni_dev: env!("CARGO_PKG_VERSION").to_string(),
});
let ai_scratch_path =
ai_scratch::get_ai_scratch_dir().context("Failed to determine AI scratch directory")?;
let ai_info = AiInfo {
scratch: ai_scratch_path.to_string_lossy().to_string(),
};
let mut repo_view = RepositoryView {
versions,
explanation: FieldExplanation::default(),
working_directory,
remotes,
ai: ai_info,
branch_info: Some(BranchInfo {
branch: current_branch,
}),
pr_template: None,
pr_template_location: None,
branch_prs: None,
commits,
};
repo_view.update_field_presence();
Ok(repo_view)
}
async fn load_guidelines(&self) -> Result<Option<String>> {
if let Some(guidelines_path) = &self.guidelines {
let content = std::fs::read_to_string(guidelines_path).with_context(|| {
format!(
"Failed to read guidelines file: {}",
guidelines_path.display()
)
})?;
return Ok(Some(content));
}
let context_dir = crate::claude::context::resolve_context_dir(self.context_dir.as_deref());
crate::claude::context::load_config_content(&context_dir, "commit-guidelines.md")
}
fn load_scopes(&self) -> Vec<crate::data::context::ScopeDefinition> {
let context_dir = crate::claude::context::resolve_context_dir(self.context_dir.as_deref());
crate::claude::context::load_project_scopes(&context_dir, &std::path::PathBuf::from("."))
}
fn show_guidance_files_status(
&self,
guidelines: &Option<String>,
valid_scopes: &[crate::data::context::ScopeDefinition],
) {
use crate::claude::context::{
config_source_label, resolve_context_dir_with_source, ConfigSourceLabel,
};
let (context_dir, dir_source) =
resolve_context_dir_with_source(self.context_dir.as_deref());
println!("📋 Project guidance files status:");
println!(" 📂 Config dir: {} ({dir_source})", context_dir.display());
let guidelines_source = if guidelines.is_some() {
match config_source_label(&context_dir, "commit-guidelines.md") {
ConfigSourceLabel::NotFound => "✅ (source unknown)".to_string(),
label => format!("✅ {label}"),
}
} else {
"⚪ Using defaults".to_string()
};
println!(" 📝 Commit guidelines: {guidelines_source}");
let scopes_count = valid_scopes.len();
let scopes_source = if scopes_count > 0 {
match config_source_label(&context_dir, "scopes.yaml") {
ConfigSourceLabel::NotFound => {
format!("✅ (source unknown) ({scopes_count} scopes)")
}
label => format!("✅ {label} ({scopes_count} scopes)"),
}
} else {
"⚪ None found (any scope accepted)".to_string()
};
println!(" 🎯 Valid scopes: {scopes_source}");
println!();
}
async fn check_with_map_reduce(
&self,
claude_client: &crate::claude::client::ClaudeClient,
full_repo_view: &crate::data::RepositoryView,
guidelines: Option<&str>,
valid_scopes: &[crate::data::context::ScopeDefinition],
) -> Result<crate::data::check::CheckReport> {
use std::io::IsTerminal;
use std::sync::atomic::{AtomicUsize, Ordering};
use std::sync::Arc;
use crate::claude::batch;
use crate::claude::token_budget;
use crate::data::check::{CheckReport, CommitCheckResult};
let total_commits = full_repo_view.commits.len();
let metadata = claude_client.get_ai_client_metadata();
let system_prompt = crate::claude::prompts::generate_check_system_prompt_with_scopes(
guidelines,
valid_scopes,
);
let system_prompt_tokens = token_budget::estimate_tokens(&system_prompt);
let batch_plan =
batch::plan_batches(&full_repo_view.commits, &metadata, system_prompt_tokens);
if !self.quiet && batch_plan.batches.len() < total_commits {
println!(
" 📦 Grouped {} commits into {} batches by token budget",
total_commits,
batch_plan.batches.len()
);
}
let semaphore = Arc::new(tokio::sync::Semaphore::new(self.concurrency));
let completed = Arc::new(AtomicUsize::new(0));
let futs: Vec<_> = batch_plan
.batches
.iter()
.map(|batch| {
let sem = semaphore.clone();
let completed = completed.clone();
let batch_indices = &batch.commit_indices;
async move {
let _permit = sem
.acquire()
.await
.map_err(|e| anyhow::anyhow!("semaphore closed: {e}"))?;
let batch_size = batch_indices.len();
let batch_view = if batch_size == 1 {
full_repo_view.single_commit_view(&full_repo_view.commits[batch_indices[0]])
} else {
let commits: Vec<_> = batch_indices
.iter()
.map(|&i| &full_repo_view.commits[i])
.collect();
full_repo_view.multi_commit_view(&commits)
};
let result = claude_client
.check_commits_with_scopes(
&batch_view,
guidelines,
valid_scopes,
!self.no_suggestions,
)
.await;
match result {
Ok(report) => {
let done =
completed.fetch_add(batch_size, Ordering::Relaxed) + batch_size;
if !self.quiet {
println!(" ✅ {done}/{total_commits} commits checked");
}
let items: Vec<_> = report
.commits
.into_iter()
.map(|r| {
let summary = r.summary.clone().unwrap_or_default();
(r, summary)
})
.collect();
Ok::<_, anyhow::Error>((items, vec![]))
}
Err(e) if batch_size > 1 => {
eprintln!(
"warning: batch of {batch_size} failed, retrying individually: {e}"
);
let mut items = Vec::new();
let mut failed_indices = Vec::new();
for &idx in batch_indices {
let single_view =
full_repo_view.single_commit_view(&full_repo_view.commits[idx]);
let single_result = claude_client
.check_commits_with_scopes(
&single_view,
guidelines,
valid_scopes,
!self.no_suggestions,
)
.await;
match single_result {
Ok(report) => {
if let Some(r) = report.commits.into_iter().next() {
let summary = r.summary.clone().unwrap_or_default();
items.push((r, summary));
}
let done = completed.fetch_add(1, Ordering::Relaxed) + 1;
if !self.quiet {
println!(
" ✅ {done}/{total_commits} commits checked"
);
}
}
Err(e) => {
eprintln!("warning: failed to check commit: {e}");
failed_indices.push(idx);
if !self.quiet {
println!(" ❌ commit check failed");
}
}
}
}
Ok((items, failed_indices))
}
Err(e) => {
let idx = batch_indices[0];
eprintln!("warning: failed to check commit: {e}");
let done = completed.fetch_add(1, Ordering::Relaxed) + 1;
if !self.quiet {
println!(" ❌ {done}/{total_commits} commits checked (failed)");
}
Ok((vec![], vec![idx]))
}
}
}
})
.collect();
let results = futures::future::join_all(futs).await;
let mut successes: Vec<(CommitCheckResult, String)> = Vec::new();
let mut failed_indices: Vec<usize> = Vec::new();
for (result, batch) in results.into_iter().zip(&batch_plan.batches) {
match result {
Ok((items, failed)) => {
successes.extend(items);
failed_indices.extend(failed);
}
Err(e) => {
eprintln!("warning: batch processing error: {e}");
failed_indices.extend(&batch.commit_indices);
}
}
}
if !failed_indices.is_empty() && !self.quiet && std::io::stdin().is_terminal() {
self.run_interactive_retry_check(
&mut failed_indices,
full_repo_view,
claude_client,
guidelines,
valid_scopes,
&mut successes,
&mut std::io::BufReader::new(std::io::stdin()),
)
.await?;
} else if !failed_indices.is_empty() {
eprintln!(
"warning: {} commit(s) failed to check",
failed_indices.len()
);
}
if !failed_indices.is_empty() {
eprintln!(
"warning: {} commit(s) ultimately failed to check",
failed_indices.len()
);
}
if successes.is_empty() {
anyhow::bail!("All commits failed to check");
}
let single_batch = batch_plan.batches.len() <= 1;
if !self.no_coherence && !single_batch && successes.len() >= 2 {
if !self.quiet {
println!("🔗 Running cross-commit coherence pass...");
}
match claude_client
.refine_checks_coherence(&successes, full_repo_view)
.await
{
Ok(refined) => {
if !self.quiet {
println!("✅ All commits checked!");
}
return Ok(refined);
}
Err(e) => {
eprintln!("warning: coherence pass failed, using individual results: {e}");
}
}
}
if !self.quiet {
println!("✅ All commits checked!");
}
let all_results: Vec<CommitCheckResult> = successes.into_iter().map(|(r, _)| r).collect();
Ok(CheckReport::new(all_results))
}
fn output_report(
&self,
report: &crate::data::check::CheckReport,
format: crate::data::check::OutputFormat,
) -> Result<()> {
use crate::data::check::OutputFormat;
match format {
OutputFormat::Text => self.output_text_report(report),
OutputFormat::Json => {
let json = serde_json::to_string_pretty(report)
.context("Failed to serialize report to JSON")?;
println!("{json}");
Ok(())
}
OutputFormat::Yaml => {
let yaml =
crate::data::to_yaml(report).context("Failed to serialize report to YAML")?;
println!("{yaml}");
Ok(())
}
}
}
fn output_text_report(&self, report: &crate::data::check::CheckReport) -> Result<()> {
use crate::data::check::IssueSeverity;
println!();
for result in &report.commits {
if !should_display_commit(result.passes, self.show_passing) {
continue;
}
if self.quiet && !has_errors_or_warnings(&result.issues) {
continue;
}
let icon = super::formatting::determine_commit_icon(result.passes, &result.issues);
let short_hash = super::formatting::truncate_hash(&result.hash);
println!("{}", format_commit_line(icon, short_hash, &result.message));
for issue in &result.issues {
if self.quiet && issue.severity == IssueSeverity::Info {
continue;
}
let severity_str = super::formatting::format_severity_label(issue.severity);
println!(
" {} [{}] {}",
severity_str, issue.section, issue.explanation
);
}
if !self.quiet {
if let Some(suggestion) = &result.suggestion {
println!();
print!("{}", format_suggestion_text(suggestion, self.verbose));
}
}
println!();
}
println!("{}", format_summary_text(&report.summary));
Ok(())
}
fn show_model_info(&self, client: &crate::claude::client::ClaudeClient) -> Result<()> {
use crate::claude::model_config::get_model_registry;
println!("🤖 AI Model Configuration:");
let metadata = client.get_ai_client_metadata();
let registry = get_model_registry();
if let Some(spec) = registry.get_model_spec(&metadata.model) {
if metadata.model != spec.api_identifier {
println!(
" 📡 Model: {} → \x1b[33m{}\x1b[0m",
metadata.model, spec.api_identifier
);
} else {
println!(" 📡 Model: \x1b[33m{}\x1b[0m", metadata.model);
}
println!(" 🏷️ Provider: {}", spec.provider);
} else {
println!(" 📡 Model: \x1b[33m{}\x1b[0m", metadata.model);
println!(" 🏷️ Provider: {}", metadata.provider);
}
println!();
Ok(())
}
fn build_amendments_from_suggestions(
&self,
report: &crate::data::check::CheckReport,
repo_view: &crate::data::RepositoryView,
) -> Vec<crate::data::amendments::Amendment> {
use crate::data::amendments::Amendment;
let candidate_hashes: Vec<String> =
repo_view.commits.iter().map(|c| c.hash.clone()).collect();
report
.commits
.iter()
.filter(|r| !r.passes)
.filter_map(|r| {
let suggestion = r.suggestion.as_ref()?;
let full_hash = super::formatting::resolve_short_hash(&r.hash, &candidate_hashes)?;
Some(Amendment::new(
full_hash.to_string(),
suggestion.message.clone(),
))
})
.collect()
}
async fn prompt_and_apply_suggestions(
&self,
amendments: Vec<crate::data::amendments::Amendment>,
is_terminal: bool,
reader: &mut (dyn std::io::BufRead + Send),
) -> Result<bool> {
use crate::data::amendments::AmendmentFile;
use crate::git::AmendmentHandler;
use std::io::{self, Write};
println!();
println!(
"🔧 {} commit(s) have issues with suggested fixes available.",
amendments.len()
);
if !is_terminal {
eprintln!("warning: stdin is not interactive, cannot prompt to apply suggested fixes");
return Ok(false);
}
loop {
print!("❓ [A]pply suggested fixes, or [Q]uit? [A/q] ");
io::stdout().flush()?;
let Some(input) = super::read_interactive_line(reader)? else {
eprintln!("warning: stdin closed, not applying suggested fixes");
return Ok(false);
};
match input.trim().to_lowercase().as_str() {
"a" | "apply" | "" => {
let amendment_file = AmendmentFile { amendments };
let temp_file = tempfile::NamedTempFile::new()
.context("Failed to create temp file for amendments")?;
amendment_file
.save_to_file(temp_file.path())
.context("Failed to save amendments")?;
let handler = AmendmentHandler::new()
.context("Failed to initialize amendment handler")?;
handler
.apply_amendments(&temp_file.path().to_string_lossy())
.context("Failed to apply amendments")?;
println!("✅ Suggested fixes applied successfully!");
return Ok(true);
}
"q" | "quit" => return Ok(false),
_ => {
println!("Invalid choice. Please enter 'a' to apply or 'q' to quit.");
}
}
}
}
}
impl CheckCommand {
#[allow(clippy::too_many_arguments)]
async fn run_interactive_retry_check(
&self,
failed_indices: &mut Vec<usize>,
full_repo_view: &crate::data::RepositoryView,
claude_client: &crate::claude::client::ClaudeClient,
guidelines: Option<&str>,
valid_scopes: &[crate::data::context::ScopeDefinition],
successes: &mut Vec<(crate::data::check::CommitCheckResult, String)>,
reader: &mut (dyn std::io::BufRead + Send),
) -> Result<()> {
use std::io::Write as _;
println!("\n⚠️ {} commit(s) failed to check:", failed_indices.len());
for &idx in failed_indices.iter() {
let commit = &full_repo_view.commits[idx];
let subject = commit
.original_message
.lines()
.next()
.unwrap_or("(no message)");
println!(" - {}: {}", &commit.hash[..8], subject);
}
loop {
print!("\n❓ [R]etry failed commits, or [S]kip? [R/s] ");
std::io::stdout().flush()?;
let Some(input) = super::read_interactive_line(reader)? else {
eprintln!("warning: stdin closed, skipping failed commit(s)");
break;
};
match input.trim().to_lowercase().as_str() {
"r" | "retry" | "" => {
let mut still_failed = Vec::new();
for &idx in failed_indices.iter() {
let single_view =
full_repo_view.single_commit_view(&full_repo_view.commits[idx]);
match claude_client
.check_commits_with_scopes(
&single_view,
guidelines,
valid_scopes,
!self.no_suggestions,
)
.await
{
Ok(report) => {
if let Some(r) = report.commits.into_iter().next() {
let summary = r.summary.clone().unwrap_or_default();
successes.push((r, summary));
}
}
Err(e) => {
eprintln!("warning: still failed: {e}");
still_failed.push(idx);
}
}
}
*failed_indices = still_failed;
if failed_indices.is_empty() {
println!("✅ All retried commits succeeded.");
break;
}
println!("\n⚠️ {} commit(s) still failed:", failed_indices.len());
for &idx in failed_indices.iter() {
let commit = &full_repo_view.commits[idx];
let subject = commit
.original_message
.lines()
.next()
.unwrap_or("(no message)");
println!(" - {}: {}", &commit.hash[..8], subject);
}
}
"s" | "skip" => {
println!("Skipping {} failed commit(s).", failed_indices.len());
break;
}
_ => println!("Please enter 'r' to retry or 's' to skip."),
}
}
Ok(())
}
}
#[derive(Debug, Clone)]
pub struct CheckOutcome {
pub report_yaml: String,
pub has_errors: bool,
pub has_warnings: bool,
pub total_commits: usize,
pub strict: bool,
pub exit_code: i32,
}
pub async fn run_check(
range: &str,
guidelines_path: Option<&std::path::Path>,
repo_path: Option<&std::path::Path>,
strict: bool,
model: Option<String>,
) -> Result<CheckOutcome> {
let _cwd_guard = match repo_path {
Some(p) => Some(super::CwdGuard::enter(p).await?),
None => None,
};
crate::utils::check_ai_command_prerequisites(model.as_deref())?;
let claude_client = crate::claude::create_default_claude_client(model, None)?;
run_check_with_client(range, guidelines_path, strict, &claude_client).await
}
pub(crate) async fn run_check_with_client(
range: &str,
guidelines_path: Option<&std::path::Path>,
strict: bool,
claude_client: &crate::claude::client::ClaudeClient,
) -> Result<CheckOutcome> {
use crate::data::{
AiInfo, BranchInfo, FieldExplanation, FileStatusInfo, RepositoryView, VersionInfo,
WorkingDirectoryInfo,
};
use crate::git::{GitRepository, RemoteInfo};
use crate::utils::ai_scratch;
let repo = GitRepository::open()
.context("Failed to open git repository. Make sure you're in a git repository.")?;
let current_branch = repo
.get_current_branch()
.unwrap_or_else(|_| "HEAD".to_string());
let wd_status = repo.get_working_directory_status()?;
let working_directory = WorkingDirectoryInfo {
clean: wd_status.clean,
untracked_changes: wd_status
.untracked_changes
.into_iter()
.map(|fs| FileStatusInfo {
status: fs.status,
file: fs.file,
})
.collect(),
};
let remotes = RemoteInfo::get_all_remotes(repo.repository())?;
let commits = repo.get_commits_in_range(range)?;
if commits.is_empty() {
anyhow::bail!("no commits found in range: {range}");
}
let ai_scratch_path =
ai_scratch::get_ai_scratch_dir().context("Failed to determine AI scratch directory")?;
let ai_info = AiInfo {
scratch: ai_scratch_path.to_string_lossy().to_string(),
};
let mut repo_view = RepositoryView {
versions: Some(VersionInfo {
omni_dev: env!("CARGO_PKG_VERSION").to_string(),
}),
explanation: FieldExplanation::default(),
working_directory,
remotes,
ai: ai_info,
branch_info: Some(BranchInfo {
branch: current_branch,
}),
pr_template: None,
pr_template_location: None,
branch_prs: None,
commits,
};
repo_view.update_field_presence();
let guidelines = if let Some(path) = guidelines_path {
Some(
std::fs::read_to_string(path)
.with_context(|| format!("Failed to read guidelines file: {}", path.display()))?,
)
} else {
let context_dir = crate::claude::context::resolve_context_dir(None);
crate::claude::context::load_config_content(&context_dir, "commit-guidelines.md")?
};
let context_dir = crate::claude::context::resolve_context_dir(None);
let valid_scopes =
crate::claude::context::load_project_scopes(&context_dir, &std::path::PathBuf::from("."));
for commit in &mut repo_view.commits {
commit.analysis.refine_scope(&valid_scopes);
}
let report = claude_client
.check_commits_with_scopes(&repo_view, guidelines.as_deref(), &valid_scopes, true)
.await?;
let report_yaml = crate::data::to_yaml(&report).context("Failed to serialise CheckReport")?;
let has_errors = report.has_errors();
let has_warnings = report.has_warnings();
let exit_code = report.exit_code(strict);
let total_commits = report.commits.len();
Ok(CheckOutcome {
report_yaml,
has_errors,
has_warnings,
total_commits,
strict,
exit_code,
})
}
#[cfg(test)]
#[allow(clippy::unwrap_used, clippy::expect_used)]
mod run_check_tests {
use super::*;
use crate::claude::client::ClaudeClient;
use crate::claude::test_utils::ConfigurableMockAiClient;
use git2::{Repository, Signature};
#[tokio::test]
async fn run_check_invalid_repo_path_errors_before_ai() {
let err = run_check(
"HEAD",
None,
Some(std::path::Path::new("/no/such/path/exists")),
false,
None,
)
.await
.unwrap_err();
let msg = format!("{err:#}");
assert!(
msg.to_lowercase().contains("set_current_dir")
|| msg.to_lowercase().contains("no such")
|| msg.to_lowercase().contains("directory"),
"expected cwd-related error, got: {msg}"
);
}
fn init_test_repo() -> tempfile::TempDir {
let tmp_root = std::path::Path::new(env!("CARGO_MANIFEST_DIR")).join("tmp");
std::fs::create_dir_all(&tmp_root).unwrap();
let temp_dir = tempfile::tempdir_in(&tmp_root).unwrap();
let repo = Repository::init(temp_dir.path()).unwrap();
{
let mut cfg = repo.config().unwrap();
cfg.set_str("user.name", "Test").unwrap();
cfg.set_str("user.email", "test@example.com").unwrap();
}
let signature = Signature::now("Test", "test@example.com").unwrap();
std::fs::write(temp_dir.path().join("f.txt"), "c").unwrap();
let mut idx = repo.index().unwrap();
idx.add_path(std::path::Path::new("f.txt")).unwrap();
idx.write().unwrap();
let tree_id = idx.write_tree().unwrap();
let tree = repo.find_tree(tree_id).unwrap();
repo.commit(
Some("HEAD"),
&signature,
&signature,
"feat(cli): only",
&tree,
&[],
)
.unwrap();
temp_dir
}
fn passing_check_yaml(hash_prefix: &str) -> String {
format!("checks:\n - commit: {hash_prefix}\n passes: true\n issues: []\n")
}
fn failing_check_yaml(hash_prefix: &str) -> String {
format!(
"checks:\n - commit: {hash_prefix}\n passes: false\n issues:\n - severity: error\n section: subject\n rule: format\n explanation: bad\n"
)
}
#[tokio::test]
async fn run_check_with_client_happy_path_passing() {
let temp_dir = init_test_repo();
let _guard = super::super::CwdGuard::enter(temp_dir.path())
.await
.unwrap();
let mock = ConfigurableMockAiClient::new(vec![Ok(passing_check_yaml("00000000"))]);
let client = ClaudeClient::new(Box::new(mock));
let outcome = run_check_with_client("HEAD", None, false, &client)
.await
.unwrap();
assert!(!outcome.has_errors);
assert!(!outcome.has_warnings);
assert_eq!(outcome.exit_code, 0);
assert_eq!(outcome.total_commits, 1);
assert!(outcome.report_yaml.contains("commits:"));
assert!(!outcome.strict);
}
#[tokio::test]
async fn run_check_with_client_failing_commit_sets_error_exit_code() {
let temp_dir = init_test_repo();
let _guard = super::super::CwdGuard::enter(temp_dir.path())
.await
.unwrap();
let mock = ConfigurableMockAiClient::new(vec![Ok(failing_check_yaml("00000000"))]);
let client = ClaudeClient::new(Box::new(mock));
let outcome = run_check_with_client("HEAD", None, false, &client)
.await
.unwrap();
assert!(outcome.has_errors);
assert_eq!(outcome.exit_code, 1);
}
#[tokio::test]
async fn run_check_with_client_strict_does_not_affect_no_issues() {
let temp_dir = init_test_repo();
let _guard = super::super::CwdGuard::enter(temp_dir.path())
.await
.unwrap();
let mock = ConfigurableMockAiClient::new(vec![Ok(passing_check_yaml("00000000"))]);
let client = ClaudeClient::new(Box::new(mock));
let outcome = run_check_with_client("HEAD", None, true, &client)
.await
.unwrap();
assert_eq!(outcome.exit_code, 0);
assert!(outcome.strict);
}
#[tokio::test]
async fn run_check_with_client_explicit_guidelines_path() {
let temp_dir = init_test_repo();
let guidelines_path = temp_dir.path().join("guidelines.md");
std::fs::write(&guidelines_path, "guideline body").unwrap();
let _guard = super::super::CwdGuard::enter(temp_dir.path())
.await
.unwrap();
let mock = ConfigurableMockAiClient::new(vec![Ok(passing_check_yaml("00000000"))]);
let client = ClaudeClient::new(Box::new(mock));
let outcome = run_check_with_client("HEAD", Some(&guidelines_path), false, &client)
.await
.unwrap();
assert_eq!(outcome.exit_code, 0);
}
#[tokio::test]
async fn run_check_with_client_guidelines_path_missing_errors() {
let temp_dir = init_test_repo();
let missing = temp_dir.path().join("no-such.md");
let _guard = super::super::CwdGuard::enter(temp_dir.path())
.await
.unwrap();
let mock = ConfigurableMockAiClient::new(vec![Ok(passing_check_yaml("00000000"))]);
let client = ClaudeClient::new(Box::new(mock));
let err = run_check_with_client("HEAD", Some(&missing), false, &client)
.await
.unwrap_err();
assert!(
format!("{err:#}").contains("guidelines"),
"expected guidelines read error"
);
}
#[tokio::test]
async fn run_check_with_client_empty_range_bails() {
let temp_dir = init_test_repo();
let _guard = super::super::CwdGuard::enter(temp_dir.path())
.await
.unwrap();
let mock = ConfigurableMockAiClient::new(vec![]);
let client = ClaudeClient::new(Box::new(mock));
let err = run_check_with_client("HEAD..HEAD", None, false, &client)
.await
.unwrap_err();
assert!(format!("{err:#}").contains("no commits"));
}
#[tokio::test]
async fn run_check_with_client_ai_failure_propagates() {
let temp_dir = init_test_repo();
let _guard = super::super::CwdGuard::enter(temp_dir.path())
.await
.unwrap();
let mock = ConfigurableMockAiClient::new(vec![]);
let client = ClaudeClient::new(Box::new(mock));
let err = run_check_with_client("HEAD", None, false, &client)
.await
.unwrap_err();
let _ = err; }
#[test]
fn check_outcome_clone_and_debug() {
let outcome = CheckOutcome {
report_yaml: "x".to_string(),
has_errors: false,
has_warnings: true,
total_commits: 1,
strict: true,
exit_code: 2,
};
let cloned = outcome.clone();
assert_eq!(format!("{outcome:?}"), format!("{cloned:?}"));
}
}
fn should_display_commit(passes: bool, show_passing: bool) -> bool {
!passes || show_passing
}
fn has_errors_or_warnings(issues: &[crate::data::check::CommitIssue]) -> bool {
use crate::data::check::IssueSeverity;
issues
.iter()
.any(|i| matches!(i.severity, IssueSeverity::Error | IssueSeverity::Warning))
}
fn should_offer_twiddle(
twiddle_flag: bool,
has_errors: bool,
format: crate::data::check::OutputFormat,
) -> bool {
twiddle_flag && has_errors && format == crate::data::check::OutputFormat::Text
}
fn format_suggestion_text(
suggestion: &crate::data::check::CommitSuggestion,
verbose: bool,
) -> String {
let mut output = String::new();
output.push_str(" Suggested message:\n");
for line in suggestion.message.lines() {
output.push_str(&format!(" {line}\n"));
}
if verbose {
output.push('\n');
output.push_str(" Why this is better:\n");
for line in suggestion.explanation.lines() {
output.push_str(&format!(" {line}\n"));
}
}
output
}
fn format_summary_text(summary: &crate::data::check::CheckSummary) -> String {
format!(
"━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\n\
Summary: {} commits checked\n\
\x20 {} errors, {} warnings\n\
\x20 {} passed, {} with issues",
summary.total_commits,
summary.error_count,
summary.warning_count,
summary.passing_commits,
summary.failing_commits,
)
}
fn format_commit_line(icon: &str, short_hash: &str, message: &str) -> String {
format!("{icon} {short_hash} - \"{message}\"")
}
#[cfg(test)]
mod tests {
use super::*;
use crate::data::check::{
CheckSummary, CommitIssue, CommitSuggestion, IssueSeverity, OutputFormat,
};
#[test]
fn display_commit_passing_hidden() {
assert!(!should_display_commit(true, false));
}
#[test]
fn display_commit_passing_shown() {
assert!(should_display_commit(true, true));
}
#[test]
fn display_commit_failing() {
assert!(should_display_commit(false, false));
assert!(should_display_commit(false, true));
}
#[test]
fn errors_or_warnings_with_error() {
let issues = vec![CommitIssue {
severity: IssueSeverity::Error,
section: "subject".to_string(),
rule: "length".to_string(),
explanation: "too long".to_string(),
}];
assert!(has_errors_or_warnings(&issues));
}
#[test]
fn errors_or_warnings_with_warning() {
let issues = vec![CommitIssue {
severity: IssueSeverity::Warning,
section: "body".to_string(),
rule: "style".to_string(),
explanation: "minor issue".to_string(),
}];
assert!(has_errors_or_warnings(&issues));
}
#[test]
fn errors_or_warnings_info_only() {
let issues = vec![CommitIssue {
severity: IssueSeverity::Info,
section: "body".to_string(),
rule: "suggestion".to_string(),
explanation: "consider adding more detail".to_string(),
}];
assert!(!has_errors_or_warnings(&issues));
}
#[test]
fn errors_or_warnings_empty() {
assert!(!has_errors_or_warnings(&[]));
}
#[test]
fn offer_twiddle_all_conditions_met() {
assert!(should_offer_twiddle(true, true, OutputFormat::Text));
}
#[test]
fn offer_twiddle_flag_off() {
assert!(!should_offer_twiddle(false, true, OutputFormat::Text));
}
#[test]
fn offer_twiddle_no_errors() {
assert!(!should_offer_twiddle(true, false, OutputFormat::Text));
}
#[test]
fn offer_twiddle_json_format() {
assert!(!should_offer_twiddle(true, true, OutputFormat::Json));
}
#[test]
fn suggestion_text_basic() {
let suggestion = CommitSuggestion {
message: "feat(cli): add new flag".to_string(),
explanation: "uses conventional format".to_string(),
};
let result = format_suggestion_text(&suggestion, false);
assert!(result.contains("Suggested message:"));
assert!(result.contains("feat(cli): add new flag"));
assert!(!result.contains("Why this is better"));
}
#[test]
fn suggestion_text_verbose() {
let suggestion = CommitSuggestion {
message: "fix: resolve crash".to_string(),
explanation: "clear description of fix".to_string(),
};
let result = format_suggestion_text(&suggestion, true);
assert!(result.contains("Suggested message:"));
assert!(result.contains("fix: resolve crash"));
assert!(result.contains("Why this is better:"));
assert!(result.contains("clear description of fix"));
}
#[test]
fn summary_text_formatting() {
let summary = CheckSummary {
total_commits: 5,
passing_commits: 3,
failing_commits: 2,
error_count: 1,
warning_count: 4,
info_count: 0,
};
let result = format_summary_text(&summary);
assert!(result.contains("5 commits checked"));
assert!(result.contains("1 errors, 4 warnings"));
assert!(result.contains("3 passed, 2 with issues"));
}
#[test]
fn commit_line_formatting() {
let line = format_commit_line("✅", "abc1234", "feat: add feature");
assert_eq!(line, "✅ abc1234 - \"feat: add feature\"");
}
fn make_check_cmd(quiet: bool) -> CheckCommand {
CheckCommand {
commit_range: None,
model: None,
beta_header: None,
context_dir: None,
guidelines: None,
format: "text".to_string(),
strict: false,
quiet,
verbose: false,
show_passing: false,
concurrency: 4,
batch_size: None,
no_coherence: true,
no_suggestions: false,
twiddle: false,
}
}
fn make_check_commit(hash: &str) -> (crate::git::CommitInfo, tempfile::NamedTempFile) {
use crate::git::commit::FileChanges;
use crate::git::{CommitAnalysis, CommitInfo};
let tmp = tempfile::NamedTempFile::new().unwrap();
let commit = CommitInfo {
hash: hash.to_string(),
author: "Test <test@test.com>".to_string(),
date: chrono::Utc::now().fixed_offset(),
original_message: format!("feat: commit {hash}"),
in_main_branches: vec![],
analysis: CommitAnalysis {
detected_type: "feat".to_string(),
detected_scope: String::new(),
proposed_message: format!("feat: commit {hash}"),
file_changes: FileChanges {
total_files: 0,
files_added: 0,
files_deleted: 0,
file_list: vec![],
},
diff_summary: String::new(),
diff_file: tmp.path().to_string_lossy().to_string(),
file_diffs: Vec::new(),
},
};
(commit, tmp)
}
fn make_check_repo_view(commits: Vec<crate::git::CommitInfo>) -> crate::data::RepositoryView {
use crate::data::{AiInfo, FieldExplanation, RepositoryView, WorkingDirectoryInfo};
RepositoryView {
versions: None,
explanation: FieldExplanation::default(),
working_directory: WorkingDirectoryInfo {
clean: true,
untracked_changes: vec![],
},
remotes: vec![],
ai: AiInfo {
scratch: String::new(),
},
branch_info: None,
pr_template: None,
pr_template_location: None,
branch_prs: None,
commits,
}
}
fn check_yaml(hash: &str) -> String {
format!("checks:\n - commit: {hash}\n passes: true\n issues: []\n")
}
fn make_client(responses: Vec<anyhow::Result<String>>) -> crate::claude::client::ClaudeClient {
crate::claude::client::ClaudeClient::new(Box::new(
crate::claude::test_utils::ConfigurableMockAiClient::new(responses),
))
}
fn errs(n: usize) -> Vec<anyhow::Result<String>> {
(0..n)
.map(|_| Err(anyhow::anyhow!("mock failure")))
.collect()
}
#[tokio::test]
async fn check_with_map_reduce_single_commit_fails_returns_err() {
let (commit, _tmp) = make_check_commit("abc00000");
let cmd = make_check_cmd(true);
let repo_view = make_check_repo_view(vec![commit]);
let client = make_client(errs(3));
let result = cmd
.check_with_map_reduce(&client, &repo_view, None, &[])
.await;
assert!(result.is_err(), "empty successes should bail");
}
#[tokio::test]
async fn check_with_map_reduce_single_commit_succeeds() {
let (commit, _tmp) = make_check_commit("abc00000");
let cmd = make_check_cmd(true);
let repo_view = make_check_repo_view(vec![commit]);
let client = make_client(vec![Ok(check_yaml("abc00000"))]);
let result = cmd
.check_with_map_reduce(&client, &repo_view, None, &[])
.await;
assert!(result.is_ok());
assert_eq!(result.unwrap().commits.len(), 1);
}
#[tokio::test]
async fn check_with_map_reduce_batch_fails_split_retry_both_succeed() {
let (c1, _t1) = make_check_commit("abc00000");
let (c2, _t2) = make_check_commit("def00000");
let cmd = make_check_cmd(true);
let repo_view = make_check_repo_view(vec![c1, c2]);
let mut responses = errs(3); responses.push(Ok(check_yaml("abc00000"))); responses.push(Ok(check_yaml("def00000"))); let client = make_client(responses);
let result = cmd
.check_with_map_reduce(&client, &repo_view, None, &[])
.await;
assert!(result.is_ok());
assert_eq!(result.unwrap().commits.len(), 2);
}
#[tokio::test]
async fn check_with_map_reduce_batch_fails_split_one_individual_fails_quiet() {
let (c1, _t1) = make_check_commit("abc00000");
let (c2, _t2) = make_check_commit("def00000");
let cmd = make_check_cmd(true);
let repo_view = make_check_repo_view(vec![c1, c2]);
let mut responses = errs(3); responses.push(Ok(check_yaml("abc00000"))); responses.extend(errs(3)); let client = make_client(responses);
let result = cmd
.check_with_map_reduce(&client, &repo_view, None, &[])
.await;
assert!(result.is_ok());
assert_eq!(result.unwrap().commits.len(), 1);
}
#[tokio::test]
async fn check_with_map_reduce_all_fail_in_split_retry_returns_err() {
let (c1, _t1) = make_check_commit("abc00000");
let (c2, _t2) = make_check_commit("def00000");
let cmd = make_check_cmd(true);
let repo_view = make_check_repo_view(vec![c1, c2]);
let mut responses = errs(3); responses.extend(errs(3)); responses.extend(errs(3)); let client = make_client(responses);
let result = cmd
.check_with_map_reduce(&client, &repo_view, None, &[])
.await;
assert!(result.is_err(), "no successes should bail");
}
#[tokio::test]
async fn check_with_map_reduce_non_quiet_single_commit_succeeds() {
let (c1, _t1) = make_check_commit("abc00000");
let (c2, _t2) = make_check_commit("def00000");
let cmd = make_check_cmd(false);
let repo_view = make_check_repo_view(vec![c1, c2]);
let mut responses = errs(3); responses.push(Ok(check_yaml("abc00000")));
responses.push(Ok(check_yaml("def00000")));
let client = make_client(responses);
let result = cmd
.check_with_map_reduce(&client, &repo_view, None, &[])
.await;
assert!(result.is_ok());
assert_eq!(result.unwrap().commits.len(), 2);
}
#[tokio::test]
async fn interactive_retry_skip_immediately() {
let (commit, _tmp) = make_check_commit("abc00000");
let cmd = make_check_cmd(false);
let repo_view = make_check_repo_view(vec![commit]);
let client = make_client(vec![]); let mut failed = vec![0usize];
let mut successes = vec![];
let mut stdin = std::io::Cursor::new(b"s\n" as &[u8]);
cmd.run_interactive_retry_check(
&mut failed,
&repo_view,
&client,
None,
&[],
&mut successes,
&mut stdin,
)
.await
.unwrap();
assert_eq!(
failed,
vec![0],
"skip should leave failed_indices unchanged"
);
assert!(successes.is_empty());
}
#[tokio::test]
async fn interactive_retry_retry_succeeds() {
let (commit, _tmp) = make_check_commit("abc00000");
let cmd = make_check_cmd(false);
let repo_view = make_check_repo_view(vec![commit]);
let client = make_client(vec![Ok(check_yaml("abc00000"))]);
let mut failed = vec![0usize];
let mut successes = vec![];
let mut stdin = std::io::Cursor::new(b"r\n" as &[u8]);
cmd.run_interactive_retry_check(
&mut failed,
&repo_view,
&client,
None,
&[],
&mut successes,
&mut stdin,
)
.await
.unwrap();
assert!(
failed.is_empty(),
"retry succeeded → failed_indices cleared"
);
assert_eq!(successes.len(), 1);
}
#[tokio::test]
async fn interactive_retry_default_input_retries() {
let (commit, _tmp) = make_check_commit("abc00000");
let cmd = make_check_cmd(false);
let repo_view = make_check_repo_view(vec![commit]);
let client = make_client(vec![Ok(check_yaml("abc00000"))]);
let mut failed = vec![0usize];
let mut successes = vec![];
let mut stdin = std::io::Cursor::new(b"\n" as &[u8]);
cmd.run_interactive_retry_check(
&mut failed,
&repo_view,
&client,
None,
&[],
&mut successes,
&mut stdin,
)
.await
.unwrap();
assert!(failed.is_empty());
assert_eq!(successes.len(), 1);
}
#[tokio::test]
async fn interactive_retry_still_fails_then_skip() {
let (commit, _tmp) = make_check_commit("abc00000");
let cmd = make_check_cmd(false);
let repo_view = make_check_repo_view(vec![commit]);
let responses = errs(3);
let client = make_client(responses);
let mut failed = vec![0usize];
let mut successes = vec![];
let mut stdin = std::io::Cursor::new(b"r\ns\n" as &[u8]);
cmd.run_interactive_retry_check(
&mut failed,
&repo_view,
&client,
None,
&[],
&mut successes,
&mut stdin,
)
.await
.unwrap();
assert_eq!(failed, vec![0], "commit still failed after retry");
assert!(successes.is_empty());
}
#[tokio::test]
async fn interactive_retry_invalid_input_then_skip() {
let (commit, _tmp) = make_check_commit("abc00000");
let cmd = make_check_cmd(false);
let repo_view = make_check_repo_view(vec![commit]);
let client = make_client(vec![]);
let mut failed = vec![0usize];
let mut successes = vec![];
let mut stdin = std::io::Cursor::new(b"x\ns\n" as &[u8]);
cmd.run_interactive_retry_check(
&mut failed,
&repo_view,
&client,
None,
&[],
&mut successes,
&mut stdin,
)
.await
.unwrap();
assert_eq!(failed, vec![0]);
assert!(successes.is_empty());
}
#[tokio::test]
async fn interactive_retry_eof_breaks_immediately() {
let (commit, _tmp) = make_check_commit("abc00000");
let cmd = make_check_cmd(false);
let repo_view = make_check_repo_view(vec![commit]);
let client = make_client(vec![]); let mut failed = vec![0usize];
let mut successes = vec![];
let mut stdin = std::io::Cursor::new(b"" as &[u8]);
cmd.run_interactive_retry_check(
&mut failed,
&repo_view,
&client,
None,
&[],
&mut successes,
&mut stdin,
)
.await
.unwrap();
assert_eq!(failed, vec![0], "EOF should leave failed_indices unchanged");
assert!(successes.is_empty());
}
fn make_amendment() -> crate::data::amendments::Amendment {
crate::data::amendments::Amendment {
commit: "abc0000000000000000000000000000000000001".to_string(),
message: "feat: improved commit message".to_string(),
summary: None,
}
}
#[tokio::test]
async fn prompt_and_apply_suggestions_non_terminal_returns_false() {
let cmd = make_check_cmd(false);
let mut reader = std::io::Cursor::new(b"" as &[u8]);
let result = cmd
.prompt_and_apply_suggestions(vec![make_amendment()], false, &mut reader)
.await
.unwrap();
assert!(!result, "non-terminal should return false");
}
#[tokio::test]
async fn prompt_and_apply_suggestions_eof_returns_false() {
let cmd = make_check_cmd(false);
let mut reader = std::io::Cursor::new(b"" as &[u8]);
let result = cmd
.prompt_and_apply_suggestions(vec![make_amendment()], true, &mut reader)
.await
.unwrap();
assert!(!result, "EOF should return false");
}
#[tokio::test]
async fn prompt_and_apply_suggestions_quit_returns_false() {
let cmd = make_check_cmd(false);
let mut reader = std::io::Cursor::new(b"q\n" as &[u8]);
let result = cmd
.prompt_and_apply_suggestions(vec![make_amendment()], true, &mut reader)
.await
.unwrap();
assert!(!result, "quit should return false");
}
#[tokio::test]
async fn prompt_and_apply_suggestions_invalid_then_quit_returns_false() {
let cmd = make_check_cmd(false);
let mut reader = std::io::Cursor::new(b"x\nq\n" as &[u8]);
let result = cmd
.prompt_and_apply_suggestions(vec![make_amendment()], true, &mut reader)
.await
.unwrap();
assert!(!result, "invalid then quit should return false");
}
}