1use std::{fmt::Write as _, path::PathBuf, sync::Arc};
2
3use anyhow::{Context, Result};
4use rusqlite::Connection;
5use tokio::sync::Mutex;
6use tokio_util::sync::CancellationToken;
7use tracing::{debug, info, warn};
8
9use crate::{
10 agents::{
11 self, AgentContext, AgentInvocation, AgentRole, Complexity, GraphContextNode,
12 PlannerGraphOutput, Severity, invoke_agent, parse_fixer_output, parse_planner_graph_output,
13 parse_review_output,
14 },
15 config::Config,
16 db::{self, AgentRun, ReviewFinding, Run, RunStatus},
17 git::{self, RebaseOutcome},
18 github::{self, GhClient},
19 issues::{IssueOrigin, IssueProvider, PipelineIssue},
20 process::{self, CommandRunner},
21};
22
23#[derive(Debug)]
25pub struct PipelineOutcome {
26 pub run_id: String,
27 pub pr_number: u32,
28 pub branch: Option<String>,
30 pub worktree_path: PathBuf,
32 pub target_dir: PathBuf,
34}
35
36pub struct PipelineExecutor<R: CommandRunner> {
38 pub runner: Arc<R>,
39 pub github: Arc<GhClient<R>>,
40 pub issues: Arc<dyn IssueProvider>,
41 pub db: Arc<Mutex<Connection>>,
42 pub config: Config,
43 pub cancel_token: CancellationToken,
44 pub repo_dir: PathBuf,
45}
46
47impl<R: CommandRunner + 'static> PipelineExecutor<R> {
48 pub async fn run_issue(&self, issue: &PipelineIssue, auto_merge: bool) -> Result<()> {
50 self.run_issue_with_complexity(issue, auto_merge, None).await
51 }
52
53 pub async fn run_issue_with_complexity(
55 &self,
56 issue: &PipelineIssue,
57 auto_merge: bool,
58 complexity: Option<Complexity>,
59 ) -> Result<()> {
60 let outcome = self.run_issue_pipeline(issue, auto_merge, complexity).await?;
61 self.finalize_merge(&outcome, issue).await
62 }
63
64 pub async fn run_issue_pipeline(
70 &self,
71 issue: &PipelineIssue,
72 auto_merge: bool,
73 complexity: Option<Complexity>,
74 ) -> Result<PipelineOutcome> {
75 let run_id = generate_run_id();
76
77 let (target_dir, is_multi_repo) = self.resolve_target_dir(issue.target_repo.as_ref())?;
79
80 let base_branch = git::default_branch(&target_dir).await?;
81
82 let mut run = new_run(&run_id, issue, auto_merge);
83 if let Some(ref c) = complexity {
84 run.complexity = c.to_string();
85 }
86 {
87 let conn = self.db.lock().await;
88 db::runs::insert_run(&conn, &run)?;
89 }
90
91 self.issues
92 .transition(issue.number, &self.config.labels.ready, &self.config.labels.cooking)
93 .await?;
94
95 let worktree = git::create_worktree(&target_dir, issue.number, &base_branch).await?;
96 self.record_worktree(&run_id, &worktree).await?;
97
98 git::empty_commit(
100 &worktree.path,
101 &format!("chore: start oven pipeline for issue #{}", issue.number),
102 )
103 .await?;
104
105 info!(
106 run_id = %run_id,
107 issue = issue.number,
108 branch = %worktree.branch,
109 target_repo = ?issue.target_repo,
110 "starting pipeline"
111 );
112
113 let pr_number = self.create_pr(&run_id, issue, &worktree.branch, &target_dir).await?;
114
115 let ctx = AgentContext {
116 issue_number: issue.number,
117 issue_title: issue.title.clone(),
118 issue_body: issue.body.clone(),
119 branch: worktree.branch.clone(),
120 pr_number: Some(pr_number),
121 test_command: self.config.project.test.clone(),
122 lint_command: self.config.project.lint.clone(),
123 review_findings: None,
124 cycle: 1,
125 target_repo: if is_multi_repo { issue.target_repo.clone() } else { None },
126 issue_source: issue.source.as_str().to_string(),
127 base_branch: base_branch.clone(),
128 };
129
130 let result = self.run_steps(&run_id, &ctx, &worktree.path, auto_merge, &target_dir).await;
131
132 if let Err(ref e) = result {
133 self.finalize_run(&run_id, issue, pr_number, &result, &target_dir).await?;
137 return Err(anyhow::anyhow!("{e:#}"));
138 }
139
140 self.update_status(&run_id, RunStatus::AwaitingMerge).await?;
142
143 Ok(PipelineOutcome {
144 run_id,
145 pr_number,
146 branch: Some(worktree.branch),
147 worktree_path: worktree.path,
148 target_dir,
149 })
150 }
151
152 pub async fn finalize_merge(
159 &self,
160 outcome: &PipelineOutcome,
161 issue: &PipelineIssue,
162 ) -> Result<()> {
163 self.finalize_run(&outcome.run_id, issue, outcome.pr_number, &Ok(()), &outcome.target_dir)
164 .await?;
165 if let Err(e) = git::remove_worktree(&outcome.target_dir, &outcome.worktree_path).await {
166 warn!(
167 run_id = %outcome.run_id,
168 error = %e,
169 "failed to clean up worktree after merge"
170 );
171 }
172 if let Some(ref branch) = outcome.branch {
173 if let Err(e) = git::delete_branch(&outcome.target_dir, branch).await {
174 warn!(
175 run_id = %outcome.run_id,
176 branch = %branch,
177 error = %e,
178 "failed to delete local branch after merge"
179 );
180 }
181 }
182 Ok(())
183 }
184
185 pub async fn plan_issues(
192 &self,
193 issues: &[PipelineIssue],
194 graph_context: &[GraphContextNode],
195 ) -> Option<PlannerGraphOutput> {
196 let prompt = match agents::planner::build_prompt(issues, graph_context) {
197 Ok(p) => p,
198 Err(e) => {
199 warn!(error = %e, "planner prompt build failed");
200 return None;
201 }
202 };
203 let invocation = AgentInvocation {
204 role: AgentRole::Planner,
205 prompt,
206 working_dir: self.repo_dir.clone(),
207 max_turns: Some(self.config.pipeline.turn_limit),
208 model: self.config.models.model_for(AgentRole::Planner.as_str()).map(String::from),
209 };
210
211 match invoke_agent(self.runner.as_ref(), &invocation).await {
212 Ok(result) => {
213 debug!(output = %result.output, "raw planner output");
214 let parsed = parse_planner_graph_output(&result.output);
215 if parsed.is_none() {
216 warn!(output = %result.output, "planner returned unparseable output, falling back to all-parallel");
217 }
218 parsed
219 }
220 Err(e) => {
221 warn!(error = %e, "planner agent failed, falling back to all-parallel");
222 None
223 }
224 }
225 }
226
227 pub(crate) fn resolve_target_dir(
232 &self,
233 target_repo: Option<&String>,
234 ) -> Result<(PathBuf, bool)> {
235 if !self.config.multi_repo.enabled {
236 return Ok((self.repo_dir.clone(), false));
237 }
238 match target_repo {
239 Some(name) => {
240 let path = self.config.resolve_repo(name)?;
241 Ok((path, true))
242 }
243 None => Ok((self.repo_dir.clone(), false)),
244 }
245 }
246
247 pub async fn reconstruct_outcome(
253 &self,
254 issue: &PipelineIssue,
255 run_id: &str,
256 pr_number: u32,
257 ) -> Result<PipelineOutcome> {
258 let (target_dir, _) = self.resolve_target_dir(issue.target_repo.as_ref())?;
259 let worktree_path =
260 target_dir.join(".oven").join("worktrees").join(format!("issue-{}", issue.number));
261
262 let branch = {
263 let conn = self.db.lock().await;
264 db::runs::get_run(&conn, run_id)?.and_then(|r| r.branch)
265 };
266
267 Ok(PipelineOutcome {
268 run_id: run_id.to_string(),
269 pr_number,
270 branch,
271 worktree_path,
272 target_dir,
273 })
274 }
275
276 async fn record_worktree(&self, run_id: &str, worktree: &git::Worktree) -> Result<()> {
277 let conn = self.db.lock().await;
278 db::runs::update_run_worktree(
279 &conn,
280 run_id,
281 &worktree.branch,
282 &worktree.path.to_string_lossy(),
283 )?;
284 drop(conn);
285 Ok(())
286 }
287
288 async fn create_pr(
289 &self,
290 run_id: &str,
291 issue: &PipelineIssue,
292 branch: &str,
293 repo_dir: &std::path::Path,
294 ) -> Result<u32> {
295 let (pr_title, pr_body) = match issue.source {
296 IssueOrigin::Github => (
297 format!("fix(#{}): {}", issue.number, issue.title),
298 format!(
299 "Resolves #{}\n\nAutomated by [oven](https://github.com/clayharmon/oven-cli).",
300 issue.number
301 ),
302 ),
303 IssueOrigin::Local => (
304 format!("fix: {}", issue.title),
305 format!(
306 "From local issue #{}\n\nAutomated by [oven](https://github.com/clayharmon/oven-cli).",
307 issue.number
308 ),
309 ),
310 };
311
312 git::push_branch(repo_dir, branch).await?;
313 let pr_number =
314 self.github.create_draft_pr_in(&pr_title, branch, &pr_body, repo_dir).await?;
315
316 {
317 let conn = self.db.lock().await;
318 db::runs::update_run_pr(&conn, run_id, pr_number)?;
319 }
320
321 info!(run_id = %run_id, pr = pr_number, "draft PR created");
322 Ok(pr_number)
323 }
324
325 async fn finalize_run(
326 &self,
327 run_id: &str,
328 issue: &PipelineIssue,
329 pr_number: u32,
330 result: &Result<()>,
331 target_dir: &std::path::Path,
332 ) -> Result<()> {
333 let (final_status, error_msg) = match result {
334 Ok(()) => {
335 self.issues
336 .transition(
337 issue.number,
338 &self.config.labels.cooking,
339 &self.config.labels.complete,
340 )
341 .await?;
342
343 let should_close =
347 issue.source == IssueOrigin::Local || issue.target_repo.is_some();
348
349 if should_close {
350 let comment = issue.target_repo.as_ref().map_or_else(
351 || format!("Implemented in #{pr_number}"),
352 |repo_name| format!("Implemented in {repo_name}#{pr_number}"),
353 );
354 if let Err(e) = self.issues.close(issue.number, Some(&comment)).await {
355 warn!(
356 run_id = %run_id,
357 error = %e,
358 "failed to close issue"
359 );
360 }
361 }
362
363 (RunStatus::Complete, None)
364 }
365 Err(e) => {
366 warn!(run_id = %run_id, error = %e, "pipeline failed");
367 github::safe_comment(
368 &self.github,
369 pr_number,
370 &format_pipeline_failure(e),
371 target_dir,
372 )
373 .await;
374 let _ = self
375 .issues
376 .transition(
377 issue.number,
378 &self.config.labels.cooking,
379 &self.config.labels.failed,
380 )
381 .await;
382 (RunStatus::Failed, Some(format!("{e:#}")))
383 }
384 };
385
386 let conn = self.db.lock().await;
387 db::runs::finish_run(&conn, run_id, final_status, error_msg.as_deref())
388 }
389
390 async fn run_steps(
391 &self,
392 run_id: &str,
393 ctx: &AgentContext,
394 worktree_path: &std::path::Path,
395 auto_merge: bool,
396 target_dir: &std::path::Path,
397 ) -> Result<()> {
398 self.check_cancelled()?;
399
400 self.update_status(run_id, RunStatus::Implementing).await?;
402 let impl_prompt = agents::implementer::build_prompt(ctx)?;
403 let impl_result =
404 self.run_agent(run_id, AgentRole::Implementer, &impl_prompt, worktree_path, 1).await?;
405
406 git::push_branch(worktree_path, &ctx.branch).await?;
407
408 if let Some(pr_number) = ctx.pr_number {
410 let body = build_pr_body(&impl_result.output, ctx);
411 if let Err(e) =
412 self.github.edit_pr_in(pr_number, &pr_title(ctx), &body, target_dir).await
413 {
414 warn!(run_id = %run_id, error = %e, "failed to update PR description");
415 }
416 if let Err(e) = self.github.mark_pr_ready_in(pr_number, target_dir).await {
417 warn!(run_id = %run_id, error = %e, "failed to mark PR ready");
418 }
419 }
420
421 if let Some(pr_number) = ctx.pr_number {
423 let summary = extract_impl_summary(&impl_result.output);
424 github::safe_comment(
425 &self.github,
426 pr_number,
427 &format_impl_comment(&summary),
428 target_dir,
429 )
430 .await;
431 }
432
433 self.run_review_fix_loop(run_id, ctx, worktree_path, target_dir).await?;
435
436 self.check_cancelled()?;
438 info!(run_id = %run_id, base = %ctx.base_branch, "rebasing onto base branch");
439 let rebase_outcome =
440 self.rebase_with_agent_fallback(run_id, ctx, worktree_path, target_dir).await?;
441
442 if let Some(pr_number) = ctx.pr_number {
443 github::safe_comment(
444 &self.github,
445 pr_number,
446 &format_rebase_comment(&rebase_outcome),
447 target_dir,
448 )
449 .await;
450 }
451
452 if let RebaseOutcome::Failed(ref msg) = rebase_outcome {
453 anyhow::bail!("rebase failed: {msg}");
454 }
455
456 git::force_push_branch(worktree_path, &ctx.branch).await?;
457
458 if auto_merge {
460 let pr_number = ctx.pr_number.context("no PR number for merge step")?;
461 self.update_status(run_id, RunStatus::Merging).await?;
462
463 self.merge_with_retry(run_id, ctx, worktree_path, target_dir, pr_number).await?;
464
465 if ctx.target_repo.is_none() && ctx.issue_source == "github" {
468 if let Err(e) = self
469 .github
470 .close_issue(ctx.issue_number, Some(&format!("Implemented in #{pr_number}")))
471 .await
472 {
473 warn!(run_id = %run_id, error = %e, "failed to close issue after merge");
474 }
475 }
476
477 github::safe_comment(&self.github, pr_number, &format_merge_comment(), target_dir)
478 .await;
479 } else if let Some(pr_number) = ctx.pr_number {
480 github::safe_comment(&self.github, pr_number, &format_ready_comment(), target_dir)
481 .await;
482 }
483
484 Ok(())
485 }
486
487 async fn merge_with_retry(
490 &self,
491 run_id: &str,
492 ctx: &AgentContext,
493 worktree_path: &std::path::Path,
494 target_dir: &std::path::Path,
495 pr_number: u32,
496 ) -> Result<()> {
497 const MAX_MERGE_ATTEMPTS: u32 = 3;
498
499 for attempt in 1..=MAX_MERGE_ATTEMPTS {
500 self.check_cancelled()?;
501
502 match self
503 .github
504 .merge_pr_in(pr_number, &self.config.pipeline.merge_strategy, target_dir)
505 .await
506 {
507 Ok(()) => {
508 info!(run_id = %run_id, pr = pr_number, attempt, "PR merged");
509 return Ok(());
510 }
511 Err(e) if attempt < MAX_MERGE_ATTEMPTS && is_not_mergeable(&e) => {
512 warn!(
513 run_id = %run_id, pr = pr_number, attempt,
514 "PR not mergeable, re-rebasing and retrying"
515 );
516
517 let rebase_outcome = self
518 .rebase_with_agent_fallback(run_id, ctx, worktree_path, target_dir)
519 .await?;
520
521 if let RebaseOutcome::Failed(ref msg) = rebase_outcome {
522 anyhow::bail!("merge retry rebase failed: {msg}");
523 }
524
525 git::force_push_branch(worktree_path, &ctx.branch).await?;
526 tokio::time::sleep(std::time::Duration::from_secs(5)).await;
528 }
529 Err(e) => return Err(e.context("merge PR failed")),
530 }
531 }
532
533 anyhow::bail!("PR #{pr_number} not mergeable after {MAX_MERGE_ATTEMPTS} rebase attempts");
534 }
535
536 async fn run_review_fix_loop(
537 &self,
538 run_id: &str,
539 ctx: &AgentContext,
540 worktree_path: &std::path::Path,
541 target_dir: &std::path::Path,
542 ) -> Result<()> {
543 let mut pre_fix_ref: Option<String> = None;
544
545 for cycle in 1..=3 {
546 self.check_cancelled()?;
547
548 self.update_status(run_id, RunStatus::Reviewing).await?;
549
550 let (prior_addressed, prior_disputes, prior_unresolved) =
551 self.gather_prior_findings(run_id, cycle).await?;
552
553 let review_prompt = agents::reviewer::build_prompt(
554 ctx,
555 &prior_addressed,
556 &prior_disputes,
557 &prior_unresolved,
558 pre_fix_ref.as_deref(),
559 )?;
560
561 let review_result = match self
563 .run_agent(run_id, AgentRole::Reviewer, &review_prompt, worktree_path, cycle)
564 .await
565 {
566 Ok(result) => result,
567 Err(e) => {
568 warn!(run_id = %run_id, cycle, error = %e, "reviewer agent failed, skipping review");
569 if let Some(pr_number) = ctx.pr_number {
570 github::safe_comment(
571 &self.github,
572 pr_number,
573 &format_review_skipped_comment(cycle, &e),
574 target_dir,
575 )
576 .await;
577 }
578 return Ok(());
579 }
580 };
581
582 let review_output = parse_review_output(&review_result.output);
583 self.store_findings(run_id, &review_output.findings).await?;
584
585 let actionable: Vec<_> =
586 review_output.findings.iter().filter(|f| f.severity != Severity::Info).collect();
587
588 if let Some(pr_number) = ctx.pr_number {
590 github::safe_comment(
591 &self.github,
592 pr_number,
593 &format_review_comment(cycle, &actionable),
594 target_dir,
595 )
596 .await;
597 }
598
599 if actionable.is_empty() {
600 info!(run_id = %run_id, cycle, "review clean");
601 return Ok(());
602 }
603
604 info!(run_id = %run_id, cycle, findings = actionable.len(), "review found issues");
605
606 if cycle == 3 {
607 if let Some(pr_number) = ctx.pr_number {
608 let comment = format_unresolved_comment(&actionable);
609 github::safe_comment(&self.github, pr_number, &comment, target_dir).await;
610 } else {
611 warn!(run_id = %run_id, "no PR number, cannot post unresolved findings");
612 }
613 return Ok(());
614 }
615
616 pre_fix_ref = Some(git::head_sha(worktree_path).await?);
618
619 self.run_fix_step(run_id, ctx, worktree_path, target_dir, cycle).await?;
620 }
621
622 Ok(())
623 }
624
625 async fn gather_prior_findings(
627 &self,
628 run_id: &str,
629 cycle: u32,
630 ) -> Result<(Vec<ReviewFinding>, Vec<ReviewFinding>, Vec<ReviewFinding>)> {
631 if cycle <= 1 {
632 return Ok((Vec::new(), Vec::new(), Vec::new()));
633 }
634
635 let conn = self.db.lock().await;
636 let all_resolved = db::agent_runs::get_resolved_findings(&conn, run_id)?;
637 let all_unresolved = db::agent_runs::get_unresolved_findings(&conn, run_id)?;
638 drop(conn);
639
640 let (mut addressed, disputed): (Vec<_>, Vec<_>) = all_resolved.into_iter().partition(|f| {
641 f.dispute_reason.as_deref().is_some_and(|r| r.starts_with("ADDRESSED: "))
642 });
643
644 for f in &mut addressed {
646 if let Some(ref mut reason) = f.dispute_reason {
647 if let Some(stripped) = reason.strip_prefix("ADDRESSED: ") {
648 *reason = stripped.to_string();
649 }
650 }
651 }
652
653 Ok((addressed, disputed, all_unresolved))
654 }
655
656 async fn run_fix_step(
666 &self,
667 run_id: &str,
668 ctx: &AgentContext,
669 worktree_path: &std::path::Path,
670 target_dir: &std::path::Path,
671 cycle: u32,
672 ) -> Result<()> {
673 self.check_cancelled()?;
674 self.update_status(run_id, RunStatus::Fixing).await?;
675
676 let actionable = self.filter_actionable_findings(run_id).await?;
677
678 if actionable.is_empty() {
679 info!(run_id = %run_id, cycle, "no actionable findings for fixer, skipping");
680 if let Some(pr_number) = ctx.pr_number {
681 github::safe_comment(
682 &self.github,
683 pr_number,
684 &format!(
685 "### Fix skipped (cycle {cycle})\n\n\
686 No actionable findings (all findings lacked file paths).\
687 {COMMENT_FOOTER}"
688 ),
689 target_dir,
690 )
691 .await;
692 }
693 return Ok(());
694 }
695
696 let pre_fix_head = git::head_sha(worktree_path).await?;
698
699 let fix_prompt = agents::fixer::build_prompt(ctx, &actionable)?;
700
701 let fix_result =
703 match self.run_agent(run_id, AgentRole::Fixer, &fix_prompt, worktree_path, cycle).await
704 {
705 Ok(result) => result,
706 Err(e) => {
707 warn!(run_id = %run_id, cycle, error = %e, "fixer agent failed, skipping fix");
708 if let Some(pr_number) = ctx.pr_number {
709 github::safe_comment(
710 &self.github,
711 pr_number,
712 &format_fix_skipped_comment(cycle, &e),
713 target_dir,
714 )
715 .await;
716 }
717 return Ok(());
718 }
719 };
720
721 let fixer_output = parse_fixer_output(&fix_result.output);
723 let fixer_did_nothing =
724 fixer_output.addressed.is_empty() && fixer_output.disputed.is_empty();
725
726 let new_commits = if fixer_did_nothing {
727 git::commit_count_since(worktree_path, &pre_fix_head).await.unwrap_or(0)
728 } else {
729 0
730 };
731
732 if fixer_did_nothing {
733 if new_commits > 0 {
734 warn!(
737 run_id = %run_id, cycle, commits = new_commits,
738 "fixer output unparseable but commits exist, inferring addressed from git"
739 );
740 self.infer_addressed_from_git(run_id, &actionable, worktree_path, &pre_fix_head)
741 .await?;
742 } else {
743 warn!(
745 run_id = %run_id, cycle,
746 "fixer produced no output and no commits, marking findings not actionable"
747 );
748 let conn = self.db.lock().await;
749 for f in &actionable {
750 db::agent_runs::resolve_finding(
751 &conn,
752 f.id,
753 "ADDRESSED: fixer could not act on this finding (no commits, no output)",
754 )?;
755 }
756 drop(conn);
757 }
758 } else {
759 self.process_fixer_results(run_id, &actionable, &fixer_output).await?;
760 }
761
762 if let Some(pr_number) = ctx.pr_number {
764 let comment = if fixer_did_nothing {
765 format_fixer_recovery_comment(cycle, new_commits)
766 } else {
767 format_fix_comment(cycle, &fixer_output)
768 };
769 github::safe_comment(&self.github, pr_number, &comment, target_dir).await;
770 }
771
772 git::push_branch(worktree_path, &ctx.branch).await?;
773 Ok(())
774 }
775
776 async fn process_fixer_results(
784 &self,
785 run_id: &str,
786 findings_sent_to_fixer: &[ReviewFinding],
787 fixer_output: &agents::FixerOutput,
788 ) -> Result<()> {
789 if fixer_output.disputed.is_empty() && fixer_output.addressed.is_empty() {
790 return Ok(());
791 }
792
793 let conn = self.db.lock().await;
794
795 for dispute in &fixer_output.disputed {
796 let idx = dispute.finding.saturating_sub(1) as usize;
797 if let Some(finding) = findings_sent_to_fixer.get(idx) {
798 db::agent_runs::resolve_finding(&conn, finding.id, &dispute.reason)?;
799 info!(
800 run_id = %run_id,
801 finding_id = finding.id,
802 reason = %dispute.reason,
803 "finding disputed by fixer, marked resolved"
804 );
805 }
806 }
807
808 for action in &fixer_output.addressed {
809 let idx = action.finding.saturating_sub(1) as usize;
810 if let Some(finding) = findings_sent_to_fixer.get(idx) {
811 let reason = format!("ADDRESSED: {}", action.action);
812 db::agent_runs::resolve_finding(&conn, finding.id, &reason)?;
813 info!(
814 run_id = %run_id,
815 finding_id = finding.id,
816 action = %action.action,
817 "finding addressed by fixer, marked resolved"
818 );
819 }
820 }
821
822 drop(conn);
823 Ok(())
824 }
825
826 async fn filter_actionable_findings(&self, run_id: &str) -> Result<Vec<ReviewFinding>> {
831 let conn = self.db.lock().await;
832 let unresolved = db::agent_runs::get_unresolved_findings(&conn, run_id)?;
833
834 let (actionable, non_actionable): (Vec<_>, Vec<_>) =
835 unresolved.into_iter().partition(|f| f.file_path.is_some());
836
837 if !non_actionable.is_empty() {
838 warn!(
839 run_id = %run_id,
840 count = non_actionable.len(),
841 "auto-resolving non-actionable findings (no file_path)"
842 );
843 for f in &non_actionable {
844 db::agent_runs::resolve_finding(
845 &conn,
846 f.id,
847 "ADDRESSED: auto-resolved -- finding has no file path, not actionable by fixer",
848 )?;
849 }
850 }
851
852 drop(conn);
853 Ok(actionable)
854 }
855
856 async fn infer_addressed_from_git(
861 &self,
862 run_id: &str,
863 findings: &[ReviewFinding],
864 worktree_path: &std::path::Path,
865 pre_fix_head: &str,
866 ) -> Result<()> {
867 let changed_files =
868 git::changed_files_since(worktree_path, pre_fix_head).await.unwrap_or_default();
869
870 let conn = self.db.lock().await;
871 for f in findings {
872 let was_touched =
873 f.file_path.as_ref().is_some_and(|fp| changed_files.iter().any(|cf| cf == fp));
874
875 let reason = if was_touched {
876 "ADDRESSED: inferred from git -- fixer modified this file (no structured output)"
877 } else {
878 "ADDRESSED: inferred from git -- file not modified (no structured output)"
879 };
880
881 db::agent_runs::resolve_finding(&conn, f.id, reason)?;
882 info!(
883 run_id = %run_id,
884 finding_id = f.id,
885 file = ?f.file_path,
886 touched = was_touched,
887 "finding resolved via git inference"
888 );
889 }
890 drop(conn);
891 Ok(())
892 }
893
894 async fn store_findings(&self, run_id: &str, findings: &[agents::Finding]) -> Result<()> {
895 let conn = self.db.lock().await;
896 let agent_runs = db::agent_runs::get_agent_runs_for_run(&conn, run_id)?;
897 let reviewer_run_id = agent_runs
898 .iter()
899 .rev()
900 .find_map(|ar| if ar.agent == "reviewer" { Some(ar.id) } else { None });
901 if let Some(ar_id) = reviewer_run_id {
902 for finding in findings {
903 let db_finding = ReviewFinding {
904 id: 0,
905 agent_run_id: ar_id,
906 severity: finding.severity.to_string(),
907 category: finding.category.clone(),
908 file_path: finding.file_path.clone(),
909 line_number: finding.line_number,
910 message: finding.message.clone(),
911 resolved: false,
912 dispute_reason: None,
913 };
914 db::agent_runs::insert_finding(&conn, &db_finding)?;
915 }
916 }
917 drop(conn);
918 Ok(())
919 }
920
921 async fn rebase_with_agent_fallback(
925 &self,
926 run_id: &str,
927 ctx: &AgentContext,
928 worktree_path: &std::path::Path,
929 target_dir: &std::path::Path,
930 ) -> Result<RebaseOutcome> {
931 const MAX_REBASE_ROUNDS: u32 = 5;
932
933 let outcome = git::start_rebase(worktree_path, &ctx.base_branch).await;
934
935 let mut conflicting_files = match outcome {
936 RebaseOutcome::RebaseConflicts(files) => files,
937 other => return Ok(other),
938 };
939
940 for round in 1..=MAX_REBASE_ROUNDS {
941 self.check_cancelled()?;
942 info!(
943 run_id = %run_id,
944 round,
945 files = ?conflicting_files,
946 "rebase conflicts, attempting agent resolution"
947 );
948
949 if let Some(pr_number) = ctx.pr_number {
950 github::safe_comment(
951 &self.github,
952 pr_number,
953 &format_rebase_conflict_comment(round, &conflicting_files),
954 target_dir,
955 )
956 .await;
957 }
958
959 let conflict_prompt = format!(
960 "You are resolving rebase conflicts. The following files have unresolved \
961 conflict markers (<<<<<<< / ======= / >>>>>>> markers):\n\n{}\n\n\
962 Open each file, find the conflict markers, and resolve them by choosing \
963 the correct code. Remove all conflict markers. Do NOT add new features \
964 or refactor -- just resolve the conflicts so the code compiles and tests pass.\n\n\
965 After resolving, run any test/lint commands if available:\n\
966 - Test: {}\n\
967 - Lint: {}",
968 conflicting_files.iter().map(|f| format!("- {f}")).collect::<Vec<_>>().join("\n"),
969 ctx.test_command.as_deref().unwrap_or("(none)"),
970 ctx.lint_command.as_deref().unwrap_or("(none)"),
971 );
972
973 if let Err(e) = self
974 .run_agent(run_id, AgentRole::Implementer, &conflict_prompt, worktree_path, 1)
975 .await
976 {
977 warn!(run_id = %run_id, error = %e, "conflict resolution agent failed");
978 git::abort_rebase(worktree_path).await;
979 return Ok(RebaseOutcome::Failed(format!(
980 "agent conflict resolution failed: {e:#}"
981 )));
982 }
983
984 let remaining =
989 git::files_with_conflict_markers(worktree_path, &conflicting_files).await;
990 if !remaining.is_empty() {
991 warn!(
992 run_id = %run_id,
993 remaining = ?remaining,
994 "agent did not resolve all conflicts"
995 );
996 git::abort_rebase(worktree_path).await;
997 return Ok(RebaseOutcome::Failed(format!(
998 "agent could not resolve conflicts in: {}",
999 remaining.join(", ")
1000 )));
1001 }
1002
1003 match git::rebase_continue(worktree_path, &conflicting_files).await {
1005 Ok(None) => {
1006 info!(run_id = %run_id, "agent resolved rebase conflicts");
1007 return Ok(RebaseOutcome::AgentResolved);
1008 }
1009 Ok(Some(new_conflicts)) => {
1010 conflicting_files = new_conflicts;
1012 }
1013 Err(e) => {
1014 git::abort_rebase(worktree_path).await;
1015 return Ok(RebaseOutcome::Failed(format!("rebase --continue failed: {e:#}")));
1016 }
1017 }
1018 }
1019
1020 git::abort_rebase(worktree_path).await;
1022 Ok(RebaseOutcome::Failed(format!(
1023 "rebase conflicts persisted after {MAX_REBASE_ROUNDS} resolution rounds"
1024 )))
1025 }
1026
1027 async fn run_agent(
1028 &self,
1029 run_id: &str,
1030 role: AgentRole,
1031 prompt: &str,
1032 working_dir: &std::path::Path,
1033 cycle: u32,
1034 ) -> Result<crate::process::AgentResult> {
1035 let agent_run_id = self.record_agent_start(run_id, role, cycle).await?;
1036
1037 info!(run_id = %run_id, agent = %role, cycle, "agent starting");
1038
1039 let invocation = AgentInvocation {
1040 role,
1041 prompt: prompt.to_string(),
1042 working_dir: working_dir.to_path_buf(),
1043 max_turns: Some(self.config.pipeline.turn_limit),
1044 model: self.config.models.model_for(role.as_str()).map(String::from),
1045 };
1046
1047 let result = process::run_with_retry(self.runner.as_ref(), &invocation).await;
1048
1049 match &result {
1050 Ok(agent_result) => {
1051 self.record_agent_success(run_id, agent_run_id, agent_result).await?;
1052 }
1053 Err(e) => {
1054 let conn = self.db.lock().await;
1055 db::agent_runs::finish_agent_run(
1056 &conn,
1057 agent_run_id,
1058 "failed",
1059 0.0,
1060 0,
1061 None,
1062 Some(&format!("{e:#}")),
1063 None,
1064 )?;
1065 }
1066 }
1067
1068 result
1069 }
1070
1071 async fn record_agent_start(&self, run_id: &str, role: AgentRole, cycle: u32) -> Result<i64> {
1072 let agent_run = AgentRun {
1073 id: 0,
1074 run_id: run_id.to_string(),
1075 agent: role.to_string(),
1076 cycle,
1077 status: "running".to_string(),
1078 cost_usd: 0.0,
1079 turns: 0,
1080 started_at: chrono::Utc::now().to_rfc3339(),
1081 finished_at: None,
1082 output_summary: None,
1083 error_message: None,
1084 raw_output: None,
1085 };
1086 let conn = self.db.lock().await;
1087 db::agent_runs::insert_agent_run(&conn, &agent_run)
1088 }
1089
1090 async fn record_agent_success(
1091 &self,
1092 run_id: &str,
1093 agent_run_id: i64,
1094 agent_result: &crate::process::AgentResult,
1095 ) -> Result<()> {
1096 let conn = self.db.lock().await;
1097 db::agent_runs::finish_agent_run(
1098 &conn,
1099 agent_run_id,
1100 "complete",
1101 agent_result.cost_usd,
1102 agent_result.turns,
1103 Some(&truncate(&agent_result.output, 500)),
1104 None,
1105 Some(&agent_result.output),
1106 )?;
1107
1108 let new_cost = db::runs::increment_run_cost(&conn, run_id, agent_result.cost_usd)?;
1109 drop(conn);
1110
1111 if new_cost > self.config.pipeline.cost_budget {
1112 anyhow::bail!(
1113 "cost budget exceeded: ${:.2} > ${:.2}",
1114 new_cost,
1115 self.config.pipeline.cost_budget
1116 );
1117 }
1118 Ok(())
1119 }
1120
1121 async fn update_status(&self, run_id: &str, status: RunStatus) -> Result<()> {
1122 let conn = self.db.lock().await;
1123 db::runs::update_run_status(&conn, run_id, status)
1124 }
1125
1126 fn check_cancelled(&self) -> Result<()> {
1127 if self.cancel_token.is_cancelled() {
1128 anyhow::bail!("pipeline cancelled");
1129 }
1130 Ok(())
1131 }
1132}
1133
1134fn is_not_mergeable(err: &anyhow::Error) -> bool {
1140 let msg = format!("{err:#}");
1141 msg.contains("not mergeable") && !msg.contains("not allowed")
1142}
1143
1144const COMMENT_FOOTER: &str =
1145 "\n---\nAutomated by [oven](https://github.com/clayharmon/oven-cli) \u{1F35E}";
1146
1147fn format_unresolved_comment(actionable: &[&agents::Finding]) -> String {
1148 let mut comment = String::from(
1149 "### Unresolved review findings\n\n\
1150 The review-fix loop ran 2 cycles but these findings remain unresolved.\n",
1151 );
1152
1153 for severity in &[Severity::Critical, Severity::Warning] {
1155 let group: Vec<_> = actionable.iter().filter(|f| &f.severity == severity).collect();
1156 if group.is_empty() {
1157 continue;
1158 }
1159 let heading = match severity {
1160 Severity::Critical => "Critical",
1161 Severity::Warning => "Warning",
1162 Severity::Info => unreachable!("loop only iterates Critical and Warning"),
1163 };
1164 let _ = writeln!(comment, "\n#### {heading}\n");
1165 for f in group {
1166 let loc = match (&f.file_path, f.line_number) {
1167 (Some(path), Some(line)) => format!(" in `{path}:{line}`"),
1168 (Some(path), None) => format!(" in `{path}`"),
1169 _ => String::new(),
1170 };
1171 let _ = writeln!(comment, "- **{}**{} -- {}", f.category, loc, f.message);
1172 }
1173 }
1174
1175 comment.push_str(COMMENT_FOOTER);
1176 comment
1177}
1178
1179fn format_impl_comment(summary: &str) -> String {
1180 format!("### Implementation complete\n\n{summary}{COMMENT_FOOTER}")
1181}
1182
1183fn format_review_comment(cycle: u32, actionable: &[&agents::Finding]) -> String {
1184 if actionable.is_empty() {
1185 return format!(
1186 "### Review complete (cycle {cycle})\n\n\
1187 Clean review, no actionable findings.{COMMENT_FOOTER}"
1188 );
1189 }
1190
1191 let mut comment = format!(
1192 "### Review complete (cycle {cycle})\n\n\
1193 **{count} finding{s}:**\n",
1194 count = actionable.len(),
1195 s = if actionable.len() == 1 { "" } else { "s" },
1196 );
1197
1198 for f in actionable {
1199 let loc = match (&f.file_path, f.line_number) {
1200 (Some(path), Some(line)) => format!(" in `{path}:{line}`"),
1201 (Some(path), None) => format!(" in `{path}`"),
1202 _ => String::new(),
1203 };
1204 let _ = writeln!(
1205 comment,
1206 "- [{sev}] **{cat}**{loc} -- {msg}",
1207 sev = f.severity,
1208 cat = f.category,
1209 msg = f.message,
1210 );
1211 }
1212
1213 comment.push_str(COMMENT_FOOTER);
1214 comment
1215}
1216
1217fn format_fix_comment(cycle: u32, fixer: &agents::FixerOutput) -> String {
1218 let addressed = fixer.addressed.len();
1219 let disputed = fixer.disputed.len();
1220 format!(
1221 "### Fix complete (cycle {cycle})\n\n\
1222 **Addressed:** {addressed} finding{a_s}\n\
1223 **Disputed:** {disputed} finding{d_s}{COMMENT_FOOTER}",
1224 a_s = if addressed == 1 { "" } else { "s" },
1225 d_s = if disputed == 1 { "" } else { "s" },
1226 )
1227}
1228
1229fn format_rebase_conflict_comment(round: u32, conflicting_files: &[String]) -> String {
1230 format!(
1231 "### Resolving rebase conflicts (round {round})\n\n\
1232 Attempting agent-assisted resolution for {} conflicting file{}: \
1233 {}{COMMENT_FOOTER}",
1234 conflicting_files.len(),
1235 if conflicting_files.len() == 1 { "" } else { "s" },
1236 conflicting_files.iter().map(|f| format!("`{f}`")).collect::<Vec<_>>().join(", "),
1237 )
1238}
1239
1240fn format_fixer_recovery_comment(cycle: u32, new_commits: u32) -> String {
1241 if new_commits > 0 {
1242 format!(
1243 "### Fix complete (cycle {cycle})\n\n\
1244 Fixer made {new_commits} commit{s} but did not produce structured output. \
1245 Addressed findings inferred from changed files.{COMMENT_FOOTER}",
1246 s = if new_commits == 1 { "" } else { "s" },
1247 )
1248 } else {
1249 format!(
1250 "### Fix complete (cycle {cycle})\n\n\
1251 Fixer could not act on the findings (no code changes made). \
1252 Findings marked as not actionable.{COMMENT_FOOTER}"
1253 )
1254 }
1255}
1256
1257fn format_review_skipped_comment(cycle: u32, error: &anyhow::Error) -> String {
1258 format!(
1259 "### Review skipped (cycle {cycle})\n\n\
1260 Reviewer agent encountered an error. Continuing without review.\n\n\
1261 **Error:** {error:#}{COMMENT_FOOTER}"
1262 )
1263}
1264
1265fn format_fix_skipped_comment(cycle: u32, error: &anyhow::Error) -> String {
1266 format!(
1267 "### Fix skipped (cycle {cycle})\n\n\
1268 Fixer agent encountered an error. Continuing to next cycle.\n\n\
1269 **Error:** {error:#}{COMMENT_FOOTER}"
1270 )
1271}
1272
1273fn format_rebase_comment(outcome: &RebaseOutcome) -> String {
1274 match outcome {
1275 RebaseOutcome::Clean => {
1276 format!("### Rebase\n\nRebased onto base branch cleanly.{COMMENT_FOOTER}")
1277 }
1278 RebaseOutcome::AgentResolved => {
1279 format!(
1280 "### Rebase\n\n\
1281 Rebase had conflicts. Agent resolved them.{COMMENT_FOOTER}"
1282 )
1283 }
1284 RebaseOutcome::RebaseConflicts(_) => {
1285 format!(
1286 "### Rebase\n\n\
1287 Rebase conflicts present (awaiting resolution).{COMMENT_FOOTER}"
1288 )
1289 }
1290 RebaseOutcome::Failed(msg) => {
1291 format!(
1292 "### Rebase failed\n\n\
1293 Could not rebase onto the base branch.\n\n\
1294 **Error:** {msg}{COMMENT_FOOTER}"
1295 )
1296 }
1297 }
1298}
1299
1300fn format_ready_comment() -> String {
1301 format!(
1302 "### Ready for review\n\nPipeline complete. This PR is ready for manual review.{COMMENT_FOOTER}"
1303 )
1304}
1305
1306fn format_merge_comment() -> String {
1307 format!("### Merged\n\nPipeline complete. PR has been merged.{COMMENT_FOOTER}")
1308}
1309
1310fn format_pipeline_failure(e: &anyhow::Error) -> String {
1311 format!(
1312 "## Pipeline failed\n\n\
1313 **Error:** {e:#}\n\n\
1314 The pipeline hit an unrecoverable error. Check the run logs for detail, \
1315 or re-run the pipeline.\
1316 {COMMENT_FOOTER}"
1317 )
1318}
1319
1320fn pr_title(ctx: &AgentContext) -> String {
1325 let prefix = infer_commit_type(&ctx.issue_title);
1326 if ctx.issue_source == "github" {
1327 format!("{prefix}(#{}): {}", ctx.issue_number, ctx.issue_title)
1328 } else {
1329 format!("{prefix}: {}", ctx.issue_title)
1330 }
1331}
1332
1333fn infer_commit_type(title: &str) -> &'static str {
1335 let lower = title.to_lowercase();
1336 if lower.starts_with("feat") || lower.contains("add ") || lower.contains("implement ") {
1337 "feat"
1338 } else if lower.starts_with("refactor") {
1339 "refactor"
1340 } else if lower.starts_with("docs") || lower.starts_with("document") {
1341 "docs"
1342 } else if lower.starts_with("test") || lower.starts_with("add test") {
1343 "test"
1344 } else if lower.starts_with("chore") {
1345 "chore"
1346 } else {
1347 "fix"
1348 }
1349}
1350
1351fn build_pr_body(impl_output: &str, ctx: &AgentContext) -> String {
1353 let issue_ref = if ctx.issue_source == "github" {
1354 format!("Resolves #{}", ctx.issue_number)
1355 } else {
1356 format!("From local issue #{}", ctx.issue_number)
1357 };
1358
1359 let summary = extract_impl_summary(impl_output);
1360
1361 let mut body = String::new();
1362 let _ = writeln!(body, "{issue_ref}\n");
1363 let _ = write!(body, "{summary}");
1364 body.push_str(COMMENT_FOOTER);
1365 body
1366}
1367
1368fn extract_impl_summary(output: &str) -> String {
1374 let idx = output.find("## PR Template").or_else(|| output.find("## Changes Made"));
1376
1377 if let Some(idx) = idx {
1378 let summary = output[idx..].trim();
1379 let summary = summary.strip_prefix("## PR Template").map_or(summary, |s| s.trim_start());
1381 if summary.len() <= 4000 {
1382 return summary.to_string();
1383 }
1384 return truncate(summary, 4000);
1385 }
1386 String::from("*No implementation summary available. See commit history for details.*")
1389}
1390
1391fn new_run(run_id: &str, issue: &PipelineIssue, auto_merge: bool) -> Run {
1392 Run {
1393 id: run_id.to_string(),
1394 issue_number: issue.number,
1395 status: RunStatus::Pending,
1396 pr_number: None,
1397 branch: None,
1398 worktree_path: None,
1399 cost_usd: 0.0,
1400 auto_merge,
1401 started_at: chrono::Utc::now().to_rfc3339(),
1402 finished_at: None,
1403 error_message: None,
1404 complexity: "full".to_string(),
1405 issue_source: issue.source.to_string(),
1406 }
1407}
1408
1409pub fn generate_run_id() -> String {
1411 uuid::Uuid::new_v4().to_string()[..8].to_string()
1412}
1413
1414pub(crate) fn truncate(s: &str, max_len: usize) -> String {
1419 if s.len() <= max_len {
1420 return s.to_string();
1421 }
1422 let target = max_len.saturating_sub(3);
1423 let mut end = target;
1424 while end > 0 && !s.is_char_boundary(end) {
1425 end -= 1;
1426 }
1427 format!("{}...", &s[..end])
1428}
1429
1430#[cfg(test)]
1431mod tests {
1432 use proptest::prelude::*;
1433
1434 use super::*;
1435
1436 proptest! {
1437 #[test]
1438 fn run_ids_always_8_hex_chars(_seed in any::<u64>()) {
1439 let id = generate_run_id();
1440 prop_assert_eq!(id.len(), 8);
1441 prop_assert!(id.chars().all(|c| c.is_ascii_hexdigit()));
1442 }
1443 }
1444
1445 #[test]
1446 fn run_id_is_8_hex_chars() {
1447 let id = generate_run_id();
1448 assert_eq!(id.len(), 8);
1449 assert!(id.chars().all(|c| c.is_ascii_hexdigit()));
1450 }
1451
1452 #[test]
1453 fn run_ids_are_unique() {
1454 let ids: Vec<_> = (0..100).map(|_| generate_run_id()).collect();
1455 let unique: std::collections::HashSet<_> = ids.iter().collect();
1456 assert_eq!(ids.len(), unique.len());
1457 }
1458
1459 #[test]
1460 fn truncate_short_string() {
1461 assert_eq!(truncate("hello", 10), "hello");
1462 }
1463
1464 #[test]
1465 fn truncate_long_string() {
1466 let long = "a".repeat(100);
1467 let result = truncate(&long, 10);
1468 assert_eq!(result.len(), 10); assert!(result.ends_with("..."));
1470 }
1471
1472 #[test]
1473 fn truncate_multibyte_does_not_panic() {
1474 let s = "πππ";
1477 let result = truncate(s, 8);
1478 assert!(result.ends_with("..."));
1479 assert!(result.starts_with("π"));
1480 assert!(result.len() <= 8);
1481 }
1482
1483 #[test]
1484 fn truncate_cjk_boundary() {
1485 let s = "δ½ ε₯½δΈηζ΅θ―"; let result = truncate(s, 10);
1489 assert!(result.ends_with("..."));
1490 assert!(result.starts_with("δ½ ε₯½"));
1491 assert!(result.len() <= 10);
1492 }
1493
1494 #[test]
1495 fn extract_impl_summary_finds_changes_made() {
1496 let output = "Some preamble text\n\n## Changes Made\n- src/foo.rs: added bar\n\n## Tests Added\n- tests/foo.rs: bar test\n";
1497 let summary = extract_impl_summary(output);
1498 assert!(summary.starts_with("## Changes Made"));
1499 assert!(summary.contains("added bar"));
1500 assert!(summary.contains("## Tests Added"));
1501 }
1502
1503 #[test]
1504 fn extract_impl_summary_prefers_pr_template() {
1505 let output = "Preamble\n\n## PR Template\n## Summary\n- Added auth flow\n\n## Testing\n- Unit tests pass\n";
1506 let summary = extract_impl_summary(output);
1507 assert!(!summary.contains("## PR Template"));
1509 assert!(summary.starts_with("## Summary"));
1510 assert!(summary.contains("Added auth flow"));
1511 }
1512
1513 #[test]
1514 fn extract_impl_summary_fallback_on_no_heading() {
1515 let output = "just some raw agent output with no structure";
1516 let summary = extract_impl_summary(output);
1517 assert_eq!(
1518 summary,
1519 "*No implementation summary available. See commit history for details.*"
1520 );
1521 }
1522
1523 #[test]
1524 fn extract_impl_summary_empty_output() {
1525 let placeholder = "*No implementation summary available. See commit history for details.*";
1526 assert_eq!(extract_impl_summary(""), placeholder);
1527 assert_eq!(extract_impl_summary(" "), placeholder);
1528 }
1529
1530 #[test]
1531 fn build_pr_body_github_issue() {
1532 let ctx = AgentContext {
1533 issue_number: 42,
1534 issue_title: "fix the thing".to_string(),
1535 issue_body: String::new(),
1536 branch: "oven/issue-42".to_string(),
1537 pr_number: Some(10),
1538 test_command: None,
1539 lint_command: None,
1540 review_findings: None,
1541 cycle: 1,
1542 target_repo: None,
1543 issue_source: "github".to_string(),
1544 base_branch: "main".to_string(),
1545 };
1546 let body = build_pr_body("## Changes Made\n- added stuff", &ctx);
1547 assert!(body.contains("Resolves #42"));
1548 assert!(body.contains("## Changes Made"));
1549 assert!(body.contains("Automated by [oven]"));
1550 }
1551
1552 #[test]
1553 fn build_pr_body_local_issue() {
1554 let ctx = AgentContext {
1555 issue_number: 7,
1556 issue_title: "local thing".to_string(),
1557 issue_body: String::new(),
1558 branch: "oven/issue-7".to_string(),
1559 pr_number: Some(10),
1560 test_command: None,
1561 lint_command: None,
1562 review_findings: None,
1563 cycle: 1,
1564 target_repo: None,
1565 issue_source: "local".to_string(),
1566 base_branch: "main".to_string(),
1567 };
1568 let body = build_pr_body("## Changes Made\n- did local stuff", &ctx);
1569 assert!(body.contains("From local issue #7"));
1570 assert!(body.contains("## Changes Made"));
1571 }
1572
1573 #[test]
1574 fn pr_title_github() {
1575 let ctx = AgentContext {
1576 issue_number: 42,
1577 issue_title: "fix the thing".to_string(),
1578 issue_body: String::new(),
1579 branch: String::new(),
1580 pr_number: None,
1581 test_command: None,
1582 lint_command: None,
1583 review_findings: None,
1584 cycle: 1,
1585 target_repo: None,
1586 issue_source: "github".to_string(),
1587 base_branch: "main".to_string(),
1588 };
1589 assert_eq!(pr_title(&ctx), "fix(#42): fix the thing");
1590 }
1591
1592 #[test]
1593 fn pr_title_local() {
1594 let ctx = AgentContext {
1595 issue_number: 7,
1596 issue_title: "local thing".to_string(),
1597 issue_body: String::new(),
1598 branch: String::new(),
1599 pr_number: None,
1600 test_command: None,
1601 lint_command: None,
1602 review_findings: None,
1603 cycle: 1,
1604 target_repo: None,
1605 issue_source: "local".to_string(),
1606 base_branch: "main".to_string(),
1607 };
1608 assert_eq!(pr_title(&ctx), "fix: local thing");
1609 }
1610
1611 #[test]
1612 fn infer_commit_type_feat() {
1613 assert_eq!(infer_commit_type("Add dark mode support"), "feat");
1614 assert_eq!(infer_commit_type("Implement caching layer"), "feat");
1615 assert_eq!(infer_commit_type("Feature: new dashboard"), "feat");
1616 }
1617
1618 #[test]
1619 fn infer_commit_type_refactor() {
1620 assert_eq!(infer_commit_type("Refactor auth middleware"), "refactor");
1621 }
1622
1623 #[test]
1624 fn infer_commit_type_docs() {
1625 assert_eq!(infer_commit_type("Document the API endpoints"), "docs");
1626 assert_eq!(infer_commit_type("Docs: update README"), "docs");
1627 }
1628
1629 #[test]
1630 fn infer_commit_type_defaults_to_fix() {
1631 assert_eq!(infer_commit_type("Null pointer in config parser"), "fix");
1632 assert_eq!(infer_commit_type("Crash on empty input"), "fix");
1633 }
1634
1635 #[test]
1636 fn pr_title_feat_github() {
1637 let ctx = AgentContext {
1638 issue_number: 10,
1639 issue_title: "Add dark mode".to_string(),
1640 issue_body: String::new(),
1641 branch: String::new(),
1642 pr_number: None,
1643 test_command: None,
1644 lint_command: None,
1645 review_findings: None,
1646 cycle: 1,
1647 target_repo: None,
1648 issue_source: "github".to_string(),
1649 base_branch: "main".to_string(),
1650 };
1651 assert_eq!(pr_title(&ctx), "feat(#10): Add dark mode");
1652 }
1653
1654 #[test]
1655 fn format_unresolved_comment_groups_by_severity() {
1656 let findings = [
1657 agents::Finding {
1658 severity: Severity::Critical,
1659 category: "bug".to_string(),
1660 file_path: Some("src/main.rs".to_string()),
1661 line_number: Some(42),
1662 message: "null pointer".to_string(),
1663 },
1664 agents::Finding {
1665 severity: Severity::Warning,
1666 category: "style".to_string(),
1667 file_path: None,
1668 line_number: None,
1669 message: "missing docs".to_string(),
1670 },
1671 ];
1672 let refs: Vec<_> = findings.iter().collect();
1673 let comment = format_unresolved_comment(&refs);
1674 assert!(comment.contains("#### Critical"));
1675 assert!(comment.contains("#### Warning"));
1676 assert!(comment.contains("**bug** in `src/main.rs:42` -- null pointer"));
1677 assert!(comment.contains("**style** -- missing docs"));
1678 assert!(comment.contains("Automated by [oven]"));
1679 }
1680
1681 #[test]
1682 fn format_unresolved_comment_skips_empty_severity_groups() {
1683 let findings = [agents::Finding {
1684 severity: Severity::Warning,
1685 category: "testing".to_string(),
1686 file_path: Some("src/lib.rs".to_string()),
1687 line_number: None,
1688 message: "missing edge case test".to_string(),
1689 }];
1690 let refs: Vec<_> = findings.iter().collect();
1691 let comment = format_unresolved_comment(&refs);
1692 assert!(!comment.contains("#### Critical"));
1693 assert!(comment.contains("#### Warning"));
1694 }
1695
1696 #[test]
1697 fn format_pipeline_failure_includes_error() {
1698 let err = anyhow::anyhow!("cost budget exceeded: $12.50 > $10.00");
1699 let comment = format_pipeline_failure(&err);
1700 assert!(comment.contains("## Pipeline failed"));
1701 assert!(comment.contains("cost budget exceeded"));
1702 assert!(comment.contains("Automated by [oven]"));
1703 }
1704
1705 #[test]
1706 fn format_impl_comment_includes_summary() {
1707 let comment = format_impl_comment("Added login endpoint with tests");
1708 assert!(comment.contains("### Implementation complete"));
1709 assert!(comment.contains("Added login endpoint with tests"));
1710 assert!(comment.contains("Automated by [oven]"));
1711 }
1712
1713 #[test]
1714 fn format_review_comment_clean() {
1715 let comment = format_review_comment(1, &[]);
1716 assert!(comment.contains("### Review complete (cycle 1)"));
1717 assert!(comment.contains("Clean review"));
1718 }
1719
1720 #[test]
1721 fn format_review_comment_with_findings() {
1722 let findings = [agents::Finding {
1723 severity: Severity::Critical,
1724 category: "bug".to_string(),
1725 file_path: Some("src/main.rs".to_string()),
1726 line_number: Some(42),
1727 message: "null pointer".to_string(),
1728 }];
1729 let refs: Vec<_> = findings.iter().collect();
1730 let comment = format_review_comment(1, &refs);
1731 assert!(comment.contains("### Review complete (cycle 1)"));
1732 assert!(comment.contains("1 finding"));
1733 assert!(comment.contains("[critical]"));
1734 assert!(comment.contains("`src/main.rs:42`"));
1735 }
1736
1737 #[test]
1738 fn format_fix_comment_counts() {
1739 let fixer = agents::FixerOutput {
1740 addressed: vec![
1741 agents::FixerAction { finding: 1, action: "fixed it".to_string() },
1742 agents::FixerAction { finding: 2, action: "also fixed".to_string() },
1743 ],
1744 disputed: vec![agents::FixerDispute { finding: 3, reason: "not a bug".to_string() }],
1745 };
1746 let comment = format_fix_comment(1, &fixer);
1747 assert!(comment.contains("### Fix complete (cycle 1)"));
1748 assert!(comment.contains("Addressed:** 2 findings"));
1749 assert!(comment.contains("Disputed:** 1 finding\n"));
1750 }
1751
1752 #[test]
1753 fn format_rebase_comment_variants() {
1754 let clean = format_rebase_comment(&RebaseOutcome::Clean);
1755 assert!(clean.contains("Rebased onto base branch cleanly"));
1756
1757 let agent = format_rebase_comment(&RebaseOutcome::AgentResolved);
1758 assert!(agent.contains("Agent resolved them"));
1759
1760 let conflicts =
1761 format_rebase_comment(&RebaseOutcome::RebaseConflicts(vec!["foo.rs".into()]));
1762 assert!(conflicts.contains("awaiting resolution"));
1763
1764 let failed = format_rebase_comment(&RebaseOutcome::Failed("conflict in foo.rs".into()));
1765 assert!(failed.contains("Rebase failed"));
1766 assert!(failed.contains("conflict in foo.rs"));
1767 }
1768
1769 #[test]
1770 fn format_ready_comment_content() {
1771 let comment = format_ready_comment();
1772 assert!(comment.contains("### Ready for review"));
1773 assert!(comment.contains("manual review"));
1774 }
1775
1776 #[test]
1777 fn format_merge_comment_content() {
1778 let comment = format_merge_comment();
1779 assert!(comment.contains("### Merged"));
1780 }
1781
1782 #[test]
1783 fn is_not_mergeable_detects_graphql_error() {
1784 let err = anyhow::anyhow!("GraphQL: Pull Request is not mergeable (mergePullRequest)");
1785 assert!(is_not_mergeable(&err));
1786 }
1787
1788 #[test]
1789 fn is_not_mergeable_rejects_not_allowed_errors() {
1790 let err = anyhow::anyhow!(
1791 "GraphQL: Squash merges are not allowed on this repository. (mergePullRequest)"
1792 );
1793 assert!(!is_not_mergeable(&err));
1794 }
1795
1796 #[test]
1797 fn is_not_mergeable_rejects_unrelated_errors() {
1798 let err = anyhow::anyhow!("network timeout");
1799 assert!(!is_not_mergeable(&err));
1800 }
1801}