Skip to main content

llm_git/testing/
runner.rs

1//! Test runner for fixture-based testing
2
3use super::{
4   compare::{CompareResult, compare_analysis},
5   fixture::{Fixture, discover_fixtures},
6};
7use crate::{
8   api::{AnalysisContext, generate_analysis_with_map_reduce},
9   config::CommitConfig,
10   error::Result,
11   normalization::format_commit_message,
12   tokens::create_token_counter,
13   types::{CommitType, ConventionalAnalysis, ConventionalCommit},
14};
15
16/// Result of running a single fixture
17#[derive(Debug)]
18pub struct RunResult {
19   /// Fixture name
20   pub name:          String,
21   /// Comparison result (None if no golden exists)
22   pub comparison:    Option<CompareResult>,
23   /// The actual analysis produced
24   pub analysis:      crate::types::ConventionalAnalysis,
25   /// The actual commit message produced
26   pub final_message: String,
27   /// Error if any
28   pub error:         Option<String>,
29}
30
31/// Test runner configuration
32pub struct TestRunner {
33   /// Fixtures directory
34   pub fixtures_dir: std::path::PathBuf,
35   /// Config to use for analysis
36   pub config:       CommitConfig,
37   /// Filter pattern for fixture names
38   pub filter:       Option<String>,
39}
40
41impl TestRunner {
42   /// Create a new test runner
43   pub fn new(fixtures_dir: impl Into<std::path::PathBuf>, config: CommitConfig) -> Self {
44      Self { fixtures_dir: fixtures_dir.into(), config, filter: None }
45   }
46
47   /// Set filter pattern
48   pub fn with_filter(mut self, filter: Option<String>) -> Self {
49      self.filter = filter;
50      self
51   }
52
53   /// Run all fixtures and return results
54   pub async fn run_all(&self) -> Result<Vec<RunResult>> {
55      let fixture_names = discover_fixtures(&self.fixtures_dir)?;
56      let mut results = Vec::new();
57
58      for name in fixture_names {
59         // Apply filter if set
60         if let Some(pattern) = &self.filter
61            && !name.contains(pattern)
62         {
63            continue;
64         }
65
66         let result = self.run_fixture(&name).await;
67         results.push(result);
68      }
69
70      Ok(results)
71   }
72
73   /// Run a single fixture
74   pub async fn run_fixture(&self, name: &str) -> RunResult {
75      match self.run_fixture_inner(name).await {
76         Ok(result) => result,
77         Err(e) => RunResult {
78            name:          name.to_string(),
79            comparison:    None,
80            analysis:      ConventionalAnalysis {
81               commit_type: CommitType::new("chore").expect("valid type"),
82               scope:       None,
83               details:     vec![],
84               issue_refs:  vec![],
85            },
86            final_message: String::new(),
87            error:         Some(e.to_string()),
88         },
89      }
90   }
91
92   async fn run_fixture_inner(&self, name: &str) -> Result<RunResult> {
93      let fixture = Fixture::load(&self.fixtures_dir, name)?;
94      let token_counter = create_token_counter(&self.config);
95
96      // Build analysis context from fixture
97      let ctx = AnalysisContext {
98         user_context:    fixture.input.context.user_context.as_deref(),
99         recent_commits:  fixture.input.context.recent_commits.as_deref(),
100         common_scopes:   fixture.input.context.common_scopes.as_deref(),
101         project_context: fixture.input.context.project_context.as_deref(),
102         debug_output:    None,
103         debug_prefix:    None,
104      };
105
106      // Run analysis
107      let analysis = generate_analysis_with_map_reduce(
108         &fixture.input.stat,
109         &fixture.input.diff,
110         &self.config.model,
111         &fixture.input.scope_candidates,
112         &ctx,
113         &self.config,
114         &token_counter,
115      )
116      .await?;
117
118      // Get summary
119      let detail_points = analysis.body_texts();
120      let summary = crate::api::generate_summary_from_analysis(
121         &fixture.input.stat,
122         analysis.commit_type.as_str(),
123         analysis.scope.as_ref().map(|s| s.as_str()),
124         &detail_points,
125         fixture.input.context.user_context.as_deref(),
126         &self.config,
127         None,
128         None,
129      )
130      .await
131      .unwrap_or_else(|_| {
132         crate::api::fallback_summary(
133            &fixture.input.stat,
134            &detail_points,
135            analysis.commit_type.as_str(),
136            &self.config,
137         )
138      });
139
140      let final_commit = ConventionalCommit {
141         commit_type: analysis.commit_type.clone(),
142         scope: analysis.scope.clone(),
143         summary,
144         body: detail_points,
145         footers: vec![],
146      };
147      let final_message = format_commit_message(&final_commit);
148
149      // Compare to golden if exists
150      let comparison = fixture
151         .golden
152         .as_ref()
153         .map(|g| compare_analysis(&g.analysis, &analysis));
154
155      Ok(RunResult { name: name.to_string(), comparison, analysis, final_message, error: None })
156   }
157
158   /// Update golden files for all fixtures
159   pub async fn update_all(&self) -> Result<Vec<String>> {
160      let fixture_names = discover_fixtures(&self.fixtures_dir)?;
161      let mut updated = Vec::new();
162
163      for name in fixture_names {
164         if let Some(pattern) = &self.filter
165            && !name.contains(pattern)
166         {
167            continue;
168         }
169
170         self.update_fixture(&name).await?;
171         updated.push(name);
172      }
173
174      Ok(updated)
175   }
176
177   /// Update golden file for a single fixture
178   pub async fn update_fixture(&self, name: &str) -> Result<()> {
179      let result = self.run_fixture(name).await;
180
181      if let Some(err) = result.error {
182         return Err(crate::error::CommitGenError::Other(format!(
183            "Failed to run fixture '{name}': {err}"
184         )));
185      }
186
187      let mut fixture = Fixture::load(&self.fixtures_dir, name)?;
188      fixture.update_golden(result.analysis, result.final_message);
189      fixture.save(&self.fixtures_dir)?;
190
191      Ok(())
192   }
193}
194
195/// Summary of test run
196#[derive(Debug, Default)]
197pub struct TestSummary {
198   pub total:     usize,
199   pub passed:    usize,
200   pub failed:    usize,
201   pub no_golden: usize,
202   pub errors:    usize,
203}
204
205impl TestSummary {
206   /// Create summary from results
207   pub fn from_results(results: &[RunResult]) -> Self {
208      let mut summary = Self { total: results.len(), ..Default::default() };
209
210      for result in results {
211         if result.error.is_some() {
212            summary.errors += 1;
213         } else if let Some(cmp) = &result.comparison {
214            if cmp.passed {
215               summary.passed += 1;
216            } else {
217               summary.failed += 1;
218            }
219         } else {
220            summary.no_golden += 1;
221         }
222      }
223
224      summary
225   }
226
227   /// Check if all tests passed
228   pub const fn all_passed(&self) -> bool {
229      self.failed == 0 && self.errors == 0
230   }
231}