llm_git/testing/
runner.rs

1//! Test runner for fixture-based testing
2
3use super::{
4   compare::{CompareResult, compare_analysis},
5   fixture::{Fixture, discover_fixtures},
6};
7use crate::{
8   api::{AnalysisContext, generate_analysis_with_map_reduce},
9   config::CommitConfig,
10   error::Result,
11   normalization::format_commit_message,
12   tokens::create_token_counter,
13   types::{CommitType, ConventionalAnalysis, ConventionalCommit},
14};
15
16/// Result of running a single fixture
17#[derive(Debug)]
18pub struct RunResult {
19   /// Fixture name
20   pub name:          String,
21   /// Comparison result (None if no golden exists)
22   pub comparison:    Option<CompareResult>,
23   /// The actual analysis produced
24   pub analysis:      crate::types::ConventionalAnalysis,
25   /// The actual commit message produced
26   pub final_message: String,
27   /// Error if any
28   pub error:         Option<String>,
29}
30
31/// Test runner configuration
32pub struct TestRunner {
33   /// Fixtures directory
34   pub fixtures_dir: std::path::PathBuf,
35   /// Config to use for analysis
36   pub config:       CommitConfig,
37   /// Filter pattern for fixture names
38   pub filter:       Option<String>,
39}
40
41impl TestRunner {
42   /// Create a new test runner
43   pub fn new(fixtures_dir: impl Into<std::path::PathBuf>, config: CommitConfig) -> Self {
44      Self { fixtures_dir: fixtures_dir.into(), config, filter: None }
45   }
46
47   /// Set filter pattern
48   pub fn with_filter(mut self, filter: Option<String>) -> Self {
49      self.filter = filter;
50      self
51   }
52
53   /// Run all fixtures and return results
54   pub fn run_all(&self) -> Result<Vec<RunResult>> {
55      let fixture_names = discover_fixtures(&self.fixtures_dir)?;
56      let mut results = Vec::new();
57
58      for name in fixture_names {
59         // Apply filter if set
60         if let Some(pattern) = &self.filter
61            && !name.contains(pattern)
62         {
63            continue;
64         }
65
66         let result = self.run_fixture(&name);
67         results.push(result);
68      }
69
70      Ok(results)
71   }
72
73   /// Run a single fixture
74   pub fn run_fixture(&self, name: &str) -> RunResult {
75      match self.run_fixture_inner(name) {
76         Ok(result) => result,
77         Err(e) => RunResult {
78            name:          name.to_string(),
79            comparison:    None,
80            analysis:      ConventionalAnalysis {
81               commit_type: CommitType::new("chore").expect("valid type"),
82               scope:       None,
83               details:     vec![],
84               issue_refs:  vec![],
85            },
86            final_message: String::new(),
87            error:         Some(e.to_string()),
88         },
89      }
90   }
91
92   fn run_fixture_inner(&self, name: &str) -> Result<RunResult> {
93      let fixture = Fixture::load(&self.fixtures_dir, name)?;
94      let token_counter = create_token_counter(&self.config);
95
96      // Build analysis context from fixture
97      let ctx = AnalysisContext {
98         user_context:    fixture.input.context.user_context.as_deref(),
99         recent_commits:  fixture.input.context.recent_commits.as_deref(),
100         common_scopes:   fixture.input.context.common_scopes.as_deref(),
101         project_context: fixture.input.context.project_context.as_deref(),
102      };
103
104      // Run analysis
105      let analysis = generate_analysis_with_map_reduce(
106         &fixture.input.stat,
107         &fixture.input.diff,
108         &self.config.analysis_model,
109         &fixture.input.scope_candidates,
110         &ctx,
111         &self.config,
112         &token_counter,
113      )?;
114
115      // Get summary
116      let detail_points = analysis.body_texts();
117      let summary = crate::api::generate_summary_from_analysis(
118         &fixture.input.stat,
119         analysis.commit_type.as_str(),
120         analysis.scope.as_ref().map(|s| s.as_str()),
121         &detail_points,
122         fixture.input.context.user_context.as_deref(),
123         &self.config,
124      )
125      .unwrap_or_else(|_| {
126         crate::api::fallback_summary(
127            &fixture.input.stat,
128            &detail_points,
129            analysis.commit_type.as_str(),
130            &self.config,
131         )
132      });
133
134      let final_commit = ConventionalCommit {
135         commit_type: analysis.commit_type.clone(),
136         scope: analysis.scope.clone(),
137         summary,
138         body: detail_points,
139         footers: vec![],
140      };
141      let final_message = format_commit_message(&final_commit);
142
143      // Compare to golden if exists
144      let comparison = fixture
145         .golden
146         .as_ref()
147         .map(|g| compare_analysis(&g.analysis, &analysis));
148
149      Ok(RunResult { name: name.to_string(), comparison, analysis, final_message, error: None })
150   }
151
152   /// Update golden files for all fixtures
153   pub fn update_all(&self) -> Result<Vec<String>> {
154      let fixture_names = discover_fixtures(&self.fixtures_dir)?;
155      let mut updated = Vec::new();
156
157      for name in fixture_names {
158         if let Some(pattern) = &self.filter
159            && !name.contains(pattern)
160         {
161            continue;
162         }
163
164         self.update_fixture(&name)?;
165         updated.push(name);
166      }
167
168      Ok(updated)
169   }
170
171   /// Update golden file for a single fixture
172   pub fn update_fixture(&self, name: &str) -> Result<()> {
173      let result = self.run_fixture(name);
174
175      if let Some(err) = result.error {
176         return Err(crate::error::CommitGenError::Other(format!(
177            "Failed to run fixture '{name}': {err}"
178         )));
179      }
180
181      let mut fixture = Fixture::load(&self.fixtures_dir, name)?;
182      fixture.update_golden(result.analysis, result.final_message);
183      fixture.save(&self.fixtures_dir)?;
184
185      Ok(())
186   }
187}
188
189/// Summary of test run
190#[derive(Debug, Default)]
191pub struct TestSummary {
192   pub total:     usize,
193   pub passed:    usize,
194   pub failed:    usize,
195   pub no_golden: usize,
196   pub errors:    usize,
197}
198
199impl TestSummary {
200   /// Create summary from results
201   pub fn from_results(results: &[RunResult]) -> Self {
202      let mut summary = Self { total: results.len(), ..Default::default() };
203
204      for result in results {
205         if result.error.is_some() {
206            summary.errors += 1;
207         } else if let Some(cmp) = &result.comparison {
208            if cmp.passed {
209               summary.passed += 1;
210            } else {
211               summary.failed += 1;
212            }
213         } else {
214            summary.no_golden += 1;
215         }
216      }
217
218      summary
219   }
220
221   /// Check if all tests passed
222   pub const fn all_passed(&self) -> bool {
223      self.failed == 0 && self.errors == 0
224   }
225}