1pub mod fixer;
2pub mod implementer;
3pub mod merger;
4pub mod planner;
5pub mod reviewer;
6
7use std::path::PathBuf;
8
9use anyhow::Result;
10use serde::{Deserialize, de::DeserializeOwned};
11
12use crate::{db::ReviewFinding, process::CommandRunner};
13
14#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
16pub enum AgentRole {
17 Planner,
18 Implementer,
19 Reviewer,
20 Fixer,
21 Merger,
22}
23
24impl AgentRole {
25 pub const fn allowed_tools(&self) -> &[&str] {
26 match self {
27 Self::Planner | Self::Reviewer => &["Read", "Glob", "Grep"],
28 Self::Implementer | Self::Fixer => &["Read", "Write", "Edit", "Glob", "Grep", "Bash"],
29 Self::Merger => &["Bash"],
30 }
31 }
32
33 pub const fn as_str(&self) -> &str {
34 match self {
35 Self::Planner => "planner",
36 Self::Implementer => "implementer",
37 Self::Reviewer => "reviewer",
38 Self::Fixer => "fixer",
39 Self::Merger => "merger",
40 }
41 }
42
43 pub fn tools_as_strings(&self) -> Vec<String> {
44 self.allowed_tools().iter().map(|s| (*s).to_string()).collect()
45 }
46}
47
48impl std::fmt::Display for AgentRole {
49 fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
50 f.write_str(self.as_str())
51 }
52}
53
54impl std::str::FromStr for AgentRole {
55 type Err = anyhow::Error;
56
57 fn from_str(s: &str) -> Result<Self, Self::Err> {
58 match s {
59 "planner" => Ok(Self::Planner),
60 "implementer" => Ok(Self::Implementer),
61 "reviewer" => Ok(Self::Reviewer),
62 "fixer" => Ok(Self::Fixer),
63 "merger" => Ok(Self::Merger),
64 other => anyhow::bail!("unknown agent role: {other}"),
65 }
66 }
67}
68
69#[derive(Debug, Clone)]
71pub struct AgentContext {
72 pub issue_number: u32,
73 pub issue_title: String,
74 pub issue_body: String,
75 pub branch: String,
76 pub pr_number: Option<u32>,
77 pub test_command: Option<String>,
78 pub lint_command: Option<String>,
79 pub review_findings: Option<Vec<ReviewFinding>>,
80 pub cycle: u32,
81 pub target_repo: Option<String>,
85 pub issue_source: String,
88 pub base_branch: String,
91}
92
93pub struct AgentInvocation {
95 pub role: AgentRole,
96 pub prompt: String,
97 pub working_dir: PathBuf,
98 pub max_turns: Option<u32>,
99}
100
101pub async fn invoke_agent<R: CommandRunner>(
103 runner: &R,
104 invocation: &AgentInvocation,
105) -> Result<crate::process::AgentResult> {
106 runner
107 .run_claude(
108 &invocation.prompt,
109 &invocation.role.tools_as_strings(),
110 &invocation.working_dir,
111 invocation.max_turns,
112 )
113 .await
114}
115
116#[derive(Debug, Clone, Deserialize, PartialEq, Eq)]
118#[serde(rename_all = "lowercase")]
119pub enum Complexity {
120 Simple,
121 Full,
122}
123
124impl std::fmt::Display for Complexity {
125 fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
126 f.write_str(match self {
127 Self::Simple => "simple",
128 Self::Full => "full",
129 })
130 }
131}
132
133impl std::str::FromStr for Complexity {
134 type Err = anyhow::Error;
135
136 fn from_str(s: &str) -> Result<Self, Self::Err> {
137 match s {
138 "simple" => Ok(Self::Simple),
139 "full" => Ok(Self::Full),
140 other => anyhow::bail!("unknown complexity: {other}"),
141 }
142 }
143}
144
145#[derive(Debug, Deserialize)]
147pub struct PlannerOutput {
148 pub batches: Vec<Batch>,
149 #[serde(default)]
150 pub total_issues: u32,
151 #[serde(default)]
152 pub parallel_capacity: u32,
153}
154
155#[derive(Debug, Deserialize)]
156pub struct Batch {
157 pub batch: u32,
158 pub issues: Vec<PlannedIssue>,
159 #[serde(default)]
160 pub reasoning: String,
161}
162
163#[derive(Debug, Deserialize)]
164pub struct PlannedIssue {
165 pub number: u32,
166 #[serde(default)]
167 pub title: String,
168 #[serde(default)]
169 pub area: String,
170 #[serde(default)]
171 pub predicted_files: Vec<String>,
172 #[serde(default)]
173 pub has_migration: bool,
174 #[serde(default = "default_full")]
175 pub complexity: Complexity,
176}
177
178const fn default_full() -> Complexity {
179 Complexity::Full
180}
181
182#[derive(Debug, Deserialize)]
184pub struct PlannerGraphOutput {
185 pub nodes: Vec<PlannedNode>,
186 #[serde(default)]
187 pub total_issues: u32,
188 #[serde(default)]
189 pub parallel_capacity: u32,
190}
191
192#[derive(Debug, Deserialize)]
194pub struct PlannedNode {
195 pub number: u32,
196 #[serde(default)]
197 pub title: String,
198 #[serde(default)]
199 pub area: String,
200 #[serde(default)]
201 pub predicted_files: Vec<String>,
202 #[serde(default)]
203 pub has_migration: bool,
204 #[serde(default = "default_full")]
205 pub complexity: Complexity,
206 #[serde(default)]
207 pub depends_on: Vec<u32>,
208 #[serde(default)]
209 pub reasoning: String,
210}
211
212#[derive(Debug, Clone)]
214pub struct GraphContextNode {
215 pub number: u32,
216 pub title: String,
217 pub state: crate::db::graph::NodeState,
218 pub area: String,
219 pub predicted_files: Vec<String>,
220 pub has_migration: bool,
221 pub depends_on: Vec<u32>,
222 pub target_repo: Option<String>,
223}
224
225pub fn parse_planner_output(text: &str) -> Option<PlannerOutput> {
230 extract_json(text)
231}
232
233pub fn parse_planner_graph_output(text: &str) -> Option<PlannerGraphOutput> {
236 if let Some(output) = extract_json::<PlannerGraphOutput>(text) {
238 return Some(output);
239 }
240
241 let legacy: PlannerOutput = extract_json(text)?;
243 Some(batches_to_graph_output(&legacy))
244}
245
246fn batches_to_graph_output(legacy: &PlannerOutput) -> PlannerGraphOutput {
250 let mut nodes = Vec::new();
251 let mut prior_batch_issues: Vec<u32> = Vec::new();
252
253 for batch in &legacy.batches {
254 let depends_on = prior_batch_issues.clone();
255 for pi in &batch.issues {
256 nodes.push(PlannedNode {
257 number: pi.number,
258 title: pi.title.clone(),
259 area: pi.area.clone(),
260 predicted_files: pi.predicted_files.clone(),
261 has_migration: pi.has_migration,
262 complexity: pi.complexity.clone(),
263 depends_on: depends_on.clone(),
264 reasoning: batch.reasoning.clone(),
265 });
266 }
267 prior_batch_issues.extend(batch.issues.iter().map(|pi| pi.number));
268 }
269
270 PlannerGraphOutput {
271 total_issues: legacy.total_issues,
272 parallel_capacity: legacy.parallel_capacity,
273 nodes,
274 }
275}
276
277#[derive(Debug, Deserialize)]
279pub struct ReviewOutput {
280 pub findings: Vec<Finding>,
281 #[serde(default)]
282 pub summary: String,
283}
284
285#[derive(Debug, Deserialize)]
286pub struct Finding {
287 pub severity: Severity,
288 pub category: String,
289 #[serde(default)]
290 pub file_path: Option<String>,
291 #[serde(default)]
292 pub line_number: Option<u32>,
293 pub message: String,
294}
295
296#[derive(Debug, Deserialize, PartialEq, Eq)]
297#[serde(rename_all = "lowercase")]
298pub enum Severity {
299 Critical,
300 Warning,
301 Info,
302}
303
304impl Severity {
305 pub const fn as_str(&self) -> &str {
306 match self {
307 Self::Critical => "critical",
308 Self::Warning => "warning",
309 Self::Info => "info",
310 }
311 }
312}
313
314impl std::fmt::Display for Severity {
315 fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
316 f.write_str(self.as_str())
317 }
318}
319
320#[derive(Debug, Deserialize, Default)]
322pub struct FixerOutput {
323 #[serde(default)]
324 pub addressed: Vec<FixerAction>,
325 #[serde(default)]
326 pub disputed: Vec<FixerDispute>,
327}
328
329#[derive(Debug, Deserialize)]
330pub struct FixerAction {
331 pub finding: u32,
333 pub action: String,
334}
335
336#[derive(Debug, Deserialize)]
337pub struct FixerDispute {
338 pub finding: u32,
340 pub reason: String,
341}
342
343pub fn parse_fixer_output(text: &str) -> FixerOutput {
349 extract_json::<FixerOutput>(text).unwrap_or_default()
350}
351
352pub fn parse_review_output(text: &str) -> ReviewOutput {
359 extract_json(text).unwrap_or_else(|| {
360 tracing::warn!("reviewer returned unparseable output, emitting warning finding");
361 ReviewOutput {
362 findings: vec![Finding {
363 severity: Severity::Warning,
364 category: "review-parse".to_string(),
365 file_path: None,
366 line_number: None,
367 message: "reviewer output was unparseable -- manual review recommended".to_string(),
368 }],
369 summary: "review output unparseable, manual review recommended".to_string(),
370 }
371 })
372}
373
374fn extract_json<T: DeserializeOwned>(text: &str) -> Option<T> {
382 if let Ok(val) = serde_json::from_str::<T>(text) {
383 return Some(val);
384 }
385
386 if let Some(json_str) = extract_json_from_fences(text) {
387 if let Ok(val) = serde_json::from_str::<T>(json_str) {
388 return Some(val);
389 }
390 }
391
392 let start = text.find('{')?;
393 let end = text.rfind('}')?;
394 if end > start { serde_json::from_str::<T>(&text[start..=end]).ok() } else { None }
395}
396
397fn extract_json_from_fences(text: &str) -> Option<&str> {
398 let start_markers = ["```json\n", "```json\r\n", "```\n", "```\r\n"];
399 for marker in &start_markers {
400 if let Some(start) = text.find(marker) {
401 let content_start = start + marker.len();
402 if let Some(end) = text[content_start..].find("```") {
403 return Some(&text[content_start..content_start + end]);
404 }
405 }
406 }
407 None
408}
409
410#[cfg(test)]
411mod tests {
412 use proptest::prelude::*;
413
414 use super::*;
415
416 const ALL_ROLES: [AgentRole; 5] = [
417 AgentRole::Planner,
418 AgentRole::Implementer,
419 AgentRole::Reviewer,
420 AgentRole::Fixer,
421 AgentRole::Merger,
422 ];
423
424 proptest! {
425 #[test]
426 fn agent_role_display_fromstr_roundtrip(idx in 0..5usize) {
427 let role = ALL_ROLES[idx];
428 let s = role.to_string();
429 let parsed: AgentRole = s.parse().unwrap();
430 assert_eq!(role, parsed);
431 }
432
433 #[test]
434 fn arbitrary_strings_never_panic_on_role_parse(s in "\\PC{1,50}") {
435 let _ = s.parse::<AgentRole>();
436 }
437
438 #[test]
439 fn parse_review_output_never_panics(text in "\\PC{0,500}") {
440 let _ = parse_review_output(&text);
443 }
444
445 #[test]
446 fn parse_fixer_output_never_panics(text in "\\PC{0,500}") {
447 let _ = parse_fixer_output(&text);
448 }
449
450 #[test]
451 fn valid_review_json_always_parses(
452 severity in prop_oneof!["critical", "warning", "info"],
453 category in "[a-z]{3,15}",
454 message in "[a-zA-Z0-9 ]{1,50}",
455 ) {
456 let json = format!(
457 r#"{{"findings":[{{"severity":"{severity}","category":"{category}","message":"{message}"}}],"summary":"test"}}"#
458 );
459 let output = parse_review_output(&json);
460 assert_eq!(output.findings.len(), 1);
461 assert_eq!(output.findings[0].category, category);
462 }
463
464 #[test]
465 fn review_json_in_fences_parses(
466 severity in prop_oneof!["critical", "warning", "info"],
467 category in "[a-z]{3,15}",
468 message in "[a-zA-Z0-9 ]{1,50}",
469 prefix in "[a-zA-Z ]{0,30}",
470 suffix in "[a-zA-Z ]{0,30}",
471 ) {
472 let json = format!(
473 r#"{{"findings":[{{"severity":"{severity}","category":"{category}","message":"{message}"}}],"summary":"ok"}}"#
474 );
475 let text = format!("{prefix}\n```json\n{json}\n```\n{suffix}");
476 let output = parse_review_output(&text);
477 assert_eq!(output.findings.len(), 1);
478 }
479 }
480
481 #[test]
482 fn tool_scoping_per_role() {
483 assert_eq!(AgentRole::Planner.allowed_tools(), &["Read", "Glob", "Grep"]);
484 assert_eq!(
485 AgentRole::Implementer.allowed_tools(),
486 &["Read", "Write", "Edit", "Glob", "Grep", "Bash"]
487 );
488 assert_eq!(AgentRole::Reviewer.allowed_tools(), &["Read", "Glob", "Grep"]);
489 assert_eq!(
490 AgentRole::Fixer.allowed_tools(),
491 &["Read", "Write", "Edit", "Glob", "Grep", "Bash"]
492 );
493 assert_eq!(AgentRole::Merger.allowed_tools(), &["Bash"]);
494 }
495
496 #[test]
497 fn role_display_roundtrip() {
498 let roles = [
499 AgentRole::Planner,
500 AgentRole::Implementer,
501 AgentRole::Reviewer,
502 AgentRole::Fixer,
503 AgentRole::Merger,
504 ];
505 for role in roles {
506 let s = role.to_string();
507 let parsed: AgentRole = s.parse().unwrap();
508 assert_eq!(role, parsed);
509 }
510 }
511
512 #[test]
513 fn parse_review_output_valid_json() {
514 let json = r#"{"findings":[{"severity":"critical","category":"bug","file_path":"src/main.rs","line_number":10,"message":"null pointer"}],"summary":"one issue found"}"#;
515 let output = parse_review_output(json);
516 assert_eq!(output.findings.len(), 1);
517 assert_eq!(output.findings[0].severity, Severity::Critical);
518 assert_eq!(output.findings[0].message, "null pointer");
519 assert_eq!(output.summary, "one issue found");
520 }
521
522 #[test]
523 fn parse_review_output_in_code_fences() {
524 let text = r#"Here are my findings:
525
526```json
527{"findings":[{"severity":"warning","category":"style","message":"missing docs"}],"summary":"ok"}
528```
529
530That's it."#;
531 let output = parse_review_output(text);
532 assert_eq!(output.findings.len(), 1);
533 assert_eq!(output.findings[0].severity, Severity::Warning);
534 }
535
536 #[test]
537 fn parse_review_output_embedded_json() {
538 let text = r#"I reviewed the code and found: {"findings":[{"severity":"info","category":"note","message":"looks fine"}],"summary":"clean"} end of review"#;
539 let output = parse_review_output(text);
540 assert_eq!(output.findings.len(), 1);
541 }
542
543 #[test]
544 fn parse_review_output_no_json_returns_warning() {
545 let text = "The code looks great, no issues found.";
546 let output = parse_review_output(text);
547 assert_eq!(output.findings.len(), 1);
548 assert_eq!(output.findings[0].severity, Severity::Warning);
549 assert_eq!(output.findings[0].category, "review-parse");
550 assert!(output.summary.contains("unparseable"));
551 }
552
553 #[test]
554 fn parse_review_output_malformed_json_returns_warning() {
555 let text = r#"{"findings": [{"broken json"#;
556 let output = parse_review_output(text);
557 assert_eq!(output.findings.len(), 1);
558 assert_eq!(output.findings[0].severity, Severity::Warning);
559 }
560
561 #[test]
564 fn parse_planner_output_valid_json() {
565 let json = r#"{
566 "batches": [{
567 "batch": 1,
568 "issues": [{
569 "number": 42,
570 "title": "Add login",
571 "area": "auth",
572 "predicted_files": ["src/auth.rs"],
573 "has_migration": false,
574 "complexity": "simple"
575 }],
576 "reasoning": "standalone issue"
577 }],
578 "total_issues": 1,
579 "parallel_capacity": 1
580 }"#;
581 let output = parse_planner_output(json).unwrap();
582 assert_eq!(output.batches.len(), 1);
583 assert_eq!(output.batches[0].issues.len(), 1);
584 assert_eq!(output.batches[0].issues[0].number, 42);
585 assert_eq!(output.batches[0].issues[0].complexity, Complexity::Simple);
586 assert!(!output.batches[0].issues[0].has_migration);
587 }
588
589 #[test]
590 fn parse_planner_output_in_code_fences() {
591 let text = r#"Here's the plan:
592
593```json
594{
595 "batches": [{"batch": 1, "issues": [{"number": 1, "complexity": "full"}], "reasoning": "ok"}],
596 "total_issues": 1,
597 "parallel_capacity": 1
598}
599```
600
601That's the plan."#;
602 let output = parse_planner_output(text).unwrap();
603 assert_eq!(output.batches.len(), 1);
604 assert_eq!(output.batches[0].issues[0].complexity, Complexity::Full);
605 }
606
607 #[test]
608 fn parse_planner_output_malformed_returns_none() {
609 assert!(parse_planner_output("not json at all").is_none());
610 assert!(parse_planner_output(r#"{"batches": "broken"}"#).is_none());
611 assert!(parse_planner_output("").is_none());
612 }
613
614 #[test]
615 fn complexity_deserializes_from_strings() {
616 let simple: Complexity = serde_json::from_str(r#""simple""#).unwrap();
617 assert_eq!(simple, Complexity::Simple);
618 let full: Complexity = serde_json::from_str(r#""full""#).unwrap();
619 assert_eq!(full, Complexity::Full);
620 }
621
622 #[test]
623 fn complexity_display_roundtrip() {
624 for c in [Complexity::Simple, Complexity::Full] {
625 let s = c.to_string();
626 let parsed: Complexity = s.parse().unwrap();
627 assert_eq!(c, parsed);
628 }
629 }
630
631 #[test]
632 fn planner_output_defaults_complexity_to_full() {
633 let json = r#"{"batches": [{"batch": 1, "issues": [{"number": 5}], "reasoning": ""}], "total_issues": 1, "parallel_capacity": 1}"#;
634 let output = parse_planner_output(json).unwrap();
635 assert_eq!(output.batches[0].issues[0].complexity, Complexity::Full);
636 }
637
638 #[test]
639 fn planner_output_with_multiple_batches() {
640 let json = r#"{
641 "batches": [
642 {"batch": 1, "issues": [{"number": 1, "complexity": "simple"}, {"number": 2, "complexity": "simple"}], "reasoning": "independent"},
643 {"batch": 2, "issues": [{"number": 3, "complexity": "full"}], "reasoning": "depends on batch 1"}
644 ],
645 "total_issues": 3,
646 "parallel_capacity": 2
647 }"#;
648 let output = parse_planner_output(json).unwrap();
649 assert_eq!(output.batches.len(), 2);
650 assert_eq!(output.batches[0].issues.len(), 2);
651 assert_eq!(output.batches[1].issues.len(), 1);
652 assert_eq!(output.total_issues, 3);
653 }
654
655 #[test]
658 fn parse_graph_output_new_format() {
659 let json = r#"{
660 "nodes": [
661 {"number": 1, "title": "A", "area": "cli", "depends_on": [], "complexity": "simple"},
662 {"number": 2, "title": "B", "area": "db", "depends_on": [1], "complexity": "full"}
663 ],
664 "total_issues": 2,
665 "parallel_capacity": 2
666 }"#;
667 let output = parse_planner_graph_output(json).unwrap();
668 assert_eq!(output.nodes.len(), 2);
669 assert!(output.nodes[0].depends_on.is_empty());
670 assert_eq!(output.nodes[1].depends_on, vec![1]);
671 }
672
673 #[test]
674 fn parse_graph_output_falls_back_to_batch_format() {
675 let json = r#"{
676 "batches": [
677 {"batch": 1, "issues": [{"number": 1, "complexity": "simple"}, {"number": 2, "complexity": "simple"}], "reasoning": "ok"},
678 {"batch": 2, "issues": [{"number": 3, "complexity": "full"}], "reasoning": "deps"}
679 ],
680 "total_issues": 3,
681 "parallel_capacity": 2
682 }"#;
683 let output = parse_planner_graph_output(json).unwrap();
684 assert_eq!(output.nodes.len(), 3);
685 assert!(output.nodes[0].depends_on.is_empty());
687 assert!(output.nodes[1].depends_on.is_empty());
688 let mut deps = output.nodes[2].depends_on.clone();
690 deps.sort_unstable();
691 assert_eq!(deps, vec![1, 2]);
692 }
693
694 #[test]
695 fn parse_graph_output_malformed_returns_none() {
696 assert!(parse_planner_graph_output("garbage").is_none());
697 }
698
699 #[test]
700 fn batches_to_graph_three_batches() {
701 let legacy = PlannerOutput {
702 batches: vec![
703 Batch {
704 batch: 1,
705 issues: vec![PlannedIssue {
706 number: 1,
707 title: "A".into(),
708 area: "a".into(),
709 predicted_files: vec![],
710 has_migration: false,
711 complexity: Complexity::Simple,
712 }],
713 reasoning: String::new(),
714 },
715 Batch {
716 batch: 2,
717 issues: vec![PlannedIssue {
718 number: 2,
719 title: "B".into(),
720 area: "b".into(),
721 predicted_files: vec![],
722 has_migration: false,
723 complexity: Complexity::Full,
724 }],
725 reasoning: String::new(),
726 },
727 Batch {
728 batch: 3,
729 issues: vec![PlannedIssue {
730 number: 3,
731 title: "C".into(),
732 area: "c".into(),
733 predicted_files: vec![],
734 has_migration: false,
735 complexity: Complexity::Full,
736 }],
737 reasoning: String::new(),
738 },
739 ],
740 total_issues: 3,
741 parallel_capacity: 1,
742 };
743
744 let output = batches_to_graph_output(&legacy);
745 assert_eq!(output.nodes.len(), 3);
746 assert!(output.nodes[0].depends_on.is_empty()); assert_eq!(output.nodes[1].depends_on, vec![1]); let mut deps = output.nodes[2].depends_on.clone();
749 deps.sort_unstable();
750 assert_eq!(deps, vec![1, 2]); }
752
753 #[test]
756 fn parse_fixer_output_valid_json() {
757 let json = r#"{"addressed":[{"finding":1,"action":"fixed"}],"disputed":[{"finding":2,"reason":"prop does not exist in v2"}]}"#;
758 let output = parse_fixer_output(json);
759 assert_eq!(output.addressed.len(), 1);
760 assert_eq!(output.addressed[0].finding, 1);
761 assert_eq!(output.disputed.len(), 1);
762 assert_eq!(output.disputed[0].finding, 2);
763 assert_eq!(output.disputed[0].reason, "prop does not exist in v2");
764 }
765
766 #[test]
767 fn parse_fixer_output_in_code_fences() {
768 let text = "I fixed everything.\n\n```json\n{\"addressed\":[{\"finding\":1,\"action\":\"added test\"}],\"disputed\":[]}\n```\n\nDone.";
769 let output = parse_fixer_output(text);
770 assert_eq!(output.addressed.len(), 1);
771 assert!(output.disputed.is_empty());
772 }
773
774 #[test]
775 fn parse_fixer_output_missing_disputed_defaults_empty() {
776 let json = r#"{"addressed":[{"finding":1,"action":"fixed"}]}"#;
777 let output = parse_fixer_output(json);
778 assert_eq!(output.addressed.len(), 1);
779 assert!(output.disputed.is_empty());
780 }
781
782 #[test]
783 fn parse_fixer_output_missing_addressed_defaults_empty() {
784 let json = r#"{"disputed":[{"finding":1,"reason":"API removed"}]}"#;
785 let output = parse_fixer_output(json);
786 assert!(output.addressed.is_empty());
787 assert_eq!(output.disputed.len(), 1);
788 }
789
790 #[test]
791 fn parse_fixer_output_garbage_returns_default() {
792 let output = parse_fixer_output("This is just prose, no JSON here.");
793 assert!(output.addressed.is_empty());
794 assert!(output.disputed.is_empty());
795 }
796
797 #[test]
798 fn parse_fixer_output_empty_returns_default() {
799 let output = parse_fixer_output("");
800 assert!(output.addressed.is_empty());
801 assert!(output.disputed.is_empty());
802 }
803}