1use std::path::Path;
2use std::process::ExitCode;
3use std::time::Duration;
4
5use fallow_core::duplicates::DuplicationReport;
6use fallow_core::results::AnalysisResults;
7
8use super::{emit_json, normalize_uri};
9use crate::explain;
10use crate::report::grouping::{OwnershipResolver, ResultGroup};
11
12pub(super) fn print_json(
13 results: &AnalysisResults,
14 root: &Path,
15 elapsed: Duration,
16 explain: bool,
17 regression: Option<&crate::regression::RegressionOutcome>,
18 baseline_matched: Option<(usize, usize)>,
19) -> ExitCode {
20 match build_json(results, root, elapsed) {
21 Ok(mut output) => {
22 if let Some(outcome) = regression
23 && let serde_json::Value::Object(ref mut map) = output
24 {
25 map.insert("regression".to_string(), outcome.to_json());
26 }
27 if let Some((entries, matched)) = baseline_matched
28 && let serde_json::Value::Object(ref mut map) = output
29 {
30 map.insert(
31 "baseline".to_string(),
32 serde_json::json!({
33 "entries": entries,
34 "matched": matched,
35 }),
36 );
37 }
38 if explain {
39 insert_meta(&mut output, explain::check_meta());
40 }
41 emit_json(&output, "JSON")
42 }
43 Err(e) => {
44 eprintln!("Error: failed to serialize results: {e}");
45 ExitCode::from(2)
46 }
47 }
48}
49
50#[must_use]
56pub(super) fn print_grouped_json(
57 groups: &[ResultGroup],
58 original: &AnalysisResults,
59 root: &Path,
60 elapsed: Duration,
61 explain: bool,
62 resolver: &OwnershipResolver,
63) -> ExitCode {
64 let root_prefix = format!("{}/", root.display());
65
66 let group_values: Vec<serde_json::Value> = groups
67 .iter()
68 .filter_map(|group| {
69 let mut value = serde_json::to_value(&group.results).ok()?;
70 strip_root_prefix(&mut value, &root_prefix);
71 inject_actions(&mut value);
72
73 if let serde_json::Value::Object(ref mut map) = value {
74 let mut ordered = serde_json::Map::new();
77 ordered.insert("key".to_string(), serde_json::json!(group.key));
78 if let Some(ref owners) = group.owners {
79 ordered.insert("owners".to_string(), serde_json::json!(owners));
80 }
81 ordered.insert(
82 "total_issues".to_string(),
83 serde_json::json!(group.results.total_issues()),
84 );
85 for (k, v) in map.iter() {
86 ordered.insert(k.clone(), v.clone());
87 }
88 Some(serde_json::Value::Object(ordered))
89 } else {
90 Some(value)
91 }
92 })
93 .collect();
94
95 let mut output = serde_json::json!({
96 "schema_version": SCHEMA_VERSION,
97 "version": env!("CARGO_PKG_VERSION"),
98 "elapsed_ms": elapsed.as_millis() as u64,
99 "grouped_by": resolver.mode_label(),
100 "total_issues": original.total_issues(),
101 "groups": group_values,
102 });
103
104 if explain {
105 insert_meta(&mut output, explain::check_meta());
106 }
107
108 emit_json(&output, "JSON")
109}
110
111const SCHEMA_VERSION: u32 = 4;
117
118fn build_json_envelope(report_value: serde_json::Value, elapsed: Duration) -> serde_json::Value {
124 let mut map = serde_json::Map::new();
125 map.insert(
126 "schema_version".to_string(),
127 serde_json::json!(SCHEMA_VERSION),
128 );
129 map.insert(
130 "version".to_string(),
131 serde_json::json!(env!("CARGO_PKG_VERSION")),
132 );
133 map.insert(
134 "elapsed_ms".to_string(),
135 serde_json::json!(elapsed.as_millis()),
136 );
137 if let serde_json::Value::Object(report_map) = report_value {
138 for (key, value) in report_map {
139 map.insert(key, value);
140 }
141 }
142 serde_json::Value::Object(map)
143}
144
145pub fn build_json(
154 results: &AnalysisResults,
155 root: &Path,
156 elapsed: Duration,
157) -> Result<serde_json::Value, serde_json::Error> {
158 let results_value = serde_json::to_value(results)?;
159
160 let mut map = serde_json::Map::new();
161 map.insert(
162 "schema_version".to_string(),
163 serde_json::json!(SCHEMA_VERSION),
164 );
165 map.insert(
166 "version".to_string(),
167 serde_json::json!(env!("CARGO_PKG_VERSION")),
168 );
169 map.insert(
170 "elapsed_ms".to_string(),
171 serde_json::json!(elapsed.as_millis()),
172 );
173 map.insert(
174 "total_issues".to_string(),
175 serde_json::json!(results.total_issues()),
176 );
177
178 if let Some(ref ep) = results.entry_point_summary {
180 let sources: serde_json::Map<String, serde_json::Value> = ep
181 .by_source
182 .iter()
183 .map(|(k, v)| (k.replace(' ', "_"), serde_json::json!(v)))
184 .collect();
185 map.insert(
186 "entry_points".to_string(),
187 serde_json::json!({
188 "total": ep.total,
189 "sources": sources,
190 }),
191 );
192 }
193
194 let summary = serde_json::json!({
196 "total_issues": results.total_issues(),
197 "unused_files": results.unused_files.len(),
198 "unused_exports": results.unused_exports.len(),
199 "unused_types": results.unused_types.len(),
200 "unused_dependencies": results.unused_dependencies.len()
201 + results.unused_dev_dependencies.len()
202 + results.unused_optional_dependencies.len(),
203 "unused_enum_members": results.unused_enum_members.len(),
204 "unused_class_members": results.unused_class_members.len(),
205 "unresolved_imports": results.unresolved_imports.len(),
206 "unlisted_dependencies": results.unlisted_dependencies.len(),
207 "duplicate_exports": results.duplicate_exports.len(),
208 "type_only_dependencies": results.type_only_dependencies.len(),
209 "test_only_dependencies": results.test_only_dependencies.len(),
210 "circular_dependencies": results.circular_dependencies.len(),
211 "boundary_violations": results.boundary_violations.len(),
212 "stale_suppressions": results.stale_suppressions.len(),
213 });
214 map.insert("summary".to_string(), summary);
215
216 if let serde_json::Value::Object(results_map) = results_value {
217 for (key, value) in results_map {
218 map.insert(key, value);
219 }
220 }
221
222 let mut output = serde_json::Value::Object(map);
223 let root_prefix = format!("{}/", root.display());
224 strip_root_prefix(&mut output, &root_prefix);
228 inject_actions(&mut output);
229 Ok(output)
230}
231
232pub fn strip_root_prefix(value: &mut serde_json::Value, prefix: &str) {
237 match value {
238 serde_json::Value::String(s) => {
239 if let Some(rest) = s.strip_prefix(prefix) {
240 *s = rest.to_string();
241 } else {
242 let normalized = normalize_uri(s);
243 let normalized_prefix = normalize_uri(prefix);
244 if let Some(rest) = normalized.strip_prefix(&normalized_prefix) {
245 *s = rest.to_string();
246 }
247 }
248 }
249 serde_json::Value::Array(arr) => {
250 for item in arr {
251 strip_root_prefix(item, prefix);
252 }
253 }
254 serde_json::Value::Object(map) => {
255 for (_, v) in map.iter_mut() {
256 strip_root_prefix(v, prefix);
257 }
258 }
259 _ => {}
260 }
261}
262
263enum SuppressKind {
267 InlineComment,
269 FileComment,
271 ConfigIgnoreDep,
273}
274
275struct ActionSpec {
277 fix_type: &'static str,
278 auto_fixable: bool,
279 description: &'static str,
280 note: Option<&'static str>,
281 suppress: SuppressKind,
282 issue_kind: &'static str,
283}
284
285fn actions_for_issue_type(key: &str) -> Option<ActionSpec> {
287 match key {
288 "unused_files" => Some(ActionSpec {
289 fix_type: "delete-file",
290 auto_fixable: false,
291 description: "Delete this file",
292 note: Some(
293 "File deletion may remove runtime functionality not visible to static analysis",
294 ),
295 suppress: SuppressKind::FileComment,
296 issue_kind: "unused-file",
297 }),
298 "unused_exports" => Some(ActionSpec {
299 fix_type: "remove-export",
300 auto_fixable: true,
301 description: "Remove the `export` keyword from the declaration",
302 note: None,
303 suppress: SuppressKind::InlineComment,
304 issue_kind: "unused-export",
305 }),
306 "unused_types" => Some(ActionSpec {
307 fix_type: "remove-export",
308 auto_fixable: true,
309 description: "Remove the `export` (or `export type`) keyword from the type declaration",
310 note: None,
311 suppress: SuppressKind::InlineComment,
312 issue_kind: "unused-type",
313 }),
314 "unused_dependencies" => Some(ActionSpec {
315 fix_type: "remove-dependency",
316 auto_fixable: true,
317 description: "Remove from dependencies in package.json",
318 note: None,
319 suppress: SuppressKind::ConfigIgnoreDep,
320 issue_kind: "unused-dependency",
321 }),
322 "unused_dev_dependencies" => Some(ActionSpec {
323 fix_type: "remove-dependency",
324 auto_fixable: true,
325 description: "Remove from devDependencies in package.json",
326 note: None,
327 suppress: SuppressKind::ConfigIgnoreDep,
328 issue_kind: "unused-dev-dependency",
329 }),
330 "unused_optional_dependencies" => Some(ActionSpec {
331 fix_type: "remove-dependency",
332 auto_fixable: true,
333 description: "Remove from optionalDependencies in package.json",
334 note: None,
335 suppress: SuppressKind::ConfigIgnoreDep,
336 issue_kind: "unused-dependency",
338 }),
339 "unused_enum_members" => Some(ActionSpec {
340 fix_type: "remove-enum-member",
341 auto_fixable: true,
342 description: "Remove this enum member",
343 note: None,
344 suppress: SuppressKind::InlineComment,
345 issue_kind: "unused-enum-member",
346 }),
347 "unused_class_members" => Some(ActionSpec {
348 fix_type: "remove-class-member",
349 auto_fixable: false,
350 description: "Remove this class member",
351 note: Some("Class member may be used via dependency injection or decorators"),
352 suppress: SuppressKind::InlineComment,
353 issue_kind: "unused-class-member",
354 }),
355 "unresolved_imports" => Some(ActionSpec {
356 fix_type: "resolve-import",
357 auto_fixable: false,
358 description: "Fix the import specifier or install the missing module",
359 note: Some("Verify the module path and check tsconfig paths configuration"),
360 suppress: SuppressKind::InlineComment,
361 issue_kind: "unresolved-import",
362 }),
363 "unlisted_dependencies" => Some(ActionSpec {
364 fix_type: "install-dependency",
365 auto_fixable: false,
366 description: "Add this package to dependencies in package.json",
367 note: Some("Verify this package should be a direct dependency before adding"),
368 suppress: SuppressKind::ConfigIgnoreDep,
369 issue_kind: "unlisted-dependency",
370 }),
371 "duplicate_exports" => Some(ActionSpec {
372 fix_type: "remove-duplicate",
373 auto_fixable: false,
374 description: "Keep one canonical export location and remove the others",
375 note: Some("Review all locations to determine which should be the canonical export"),
376 suppress: SuppressKind::InlineComment,
377 issue_kind: "duplicate-export",
378 }),
379 "type_only_dependencies" => Some(ActionSpec {
380 fix_type: "move-to-dev",
381 auto_fixable: false,
382 description: "Move to devDependencies (only type imports are used)",
383 note: Some(
384 "Type imports are erased at runtime so this dependency is not needed in production",
385 ),
386 suppress: SuppressKind::ConfigIgnoreDep,
387 issue_kind: "type-only-dependency",
388 }),
389 "test_only_dependencies" => Some(ActionSpec {
390 fix_type: "move-to-dev",
391 auto_fixable: false,
392 description: "Move to devDependencies (only test files import this)",
393 note: Some(
394 "Only test files import this package so it does not need to be a production dependency",
395 ),
396 suppress: SuppressKind::ConfigIgnoreDep,
397 issue_kind: "test-only-dependency",
398 }),
399 "circular_dependencies" => Some(ActionSpec {
400 fix_type: "refactor-cycle",
401 auto_fixable: false,
402 description: "Extract shared logic into a separate module to break the cycle",
403 note: Some(
404 "Circular imports can cause initialization issues and make code harder to reason about",
405 ),
406 suppress: SuppressKind::InlineComment,
407 issue_kind: "circular-dependency",
408 }),
409 "boundary_violations" => Some(ActionSpec {
410 fix_type: "refactor-boundary",
411 auto_fixable: false,
412 description: "Move the import through an allowed zone or restructure the dependency",
413 note: Some(
414 "This import crosses an architecture boundary that is not permitted by the configured rules",
415 ),
416 suppress: SuppressKind::InlineComment,
417 issue_kind: "boundary-violation",
418 }),
419 _ => None,
420 }
421}
422
423fn build_actions(
425 item: &serde_json::Value,
426 issue_key: &str,
427 spec: &ActionSpec,
428) -> serde_json::Value {
429 let mut actions = Vec::with_capacity(2);
430 let cross_workspace_dependency = is_dependency_issue(issue_key)
431 && item
432 .get("used_in_workspaces")
433 .and_then(serde_json::Value::as_array)
434 .is_some_and(|workspaces| !workspaces.is_empty());
435
436 let mut fix_action = if cross_workspace_dependency {
438 serde_json::json!({
439 "type": "move-dependency",
440 "auto_fixable": false,
441 "description": "Move this dependency to the workspace package.json that imports it",
442 "note": "fallow fix will not remove dependencies that are imported by another workspace",
443 })
444 } else {
445 serde_json::json!({
446 "type": spec.fix_type,
447 "auto_fixable": spec.auto_fixable,
448 "description": spec.description,
449 })
450 };
451 if let Some(note) = spec.note {
452 fix_action["note"] = serde_json::json!(note);
453 }
454 if (issue_key == "unused_exports" || issue_key == "unused_types")
456 && item
457 .get("is_re_export")
458 .and_then(serde_json::Value::as_bool)
459 == Some(true)
460 {
461 fix_action["note"] = serde_json::json!(
462 "This finding originates from a re-export; verify it is not part of your public API before removing"
463 );
464 }
465 actions.push(fix_action);
466
467 match spec.suppress {
469 SuppressKind::InlineComment => {
470 let mut suppress = serde_json::json!({
471 "type": "suppress-line",
472 "auto_fixable": false,
473 "description": "Suppress with an inline comment above the line",
474 "comment": format!("// fallow-ignore-next-line {}", spec.issue_kind),
475 });
476 if issue_key == "duplicate_exports" {
478 suppress["scope"] = serde_json::json!("per-location");
479 }
480 actions.push(suppress);
481 }
482 SuppressKind::FileComment => {
483 actions.push(serde_json::json!({
484 "type": "suppress-file",
485 "auto_fixable": false,
486 "description": "Suppress with a file-level comment at the top of the file",
487 "comment": format!("// fallow-ignore-file {}", spec.issue_kind),
488 }));
489 }
490 SuppressKind::ConfigIgnoreDep => {
491 let pkg = item
493 .get("package_name")
494 .and_then(serde_json::Value::as_str)
495 .unwrap_or("package-name");
496 actions.push(serde_json::json!({
497 "type": "add-to-config",
498 "auto_fixable": false,
499 "description": format!("Add \"{pkg}\" to ignoreDependencies in fallow config"),
500 "config_key": "ignoreDependencies",
501 "value": pkg,
502 }));
503 }
504 }
505
506 serde_json::Value::Array(actions)
507}
508
509fn is_dependency_issue(issue_key: &str) -> bool {
510 matches!(
511 issue_key,
512 "unused_dependencies" | "unused_dev_dependencies" | "unused_optional_dependencies"
513 )
514}
515
516fn inject_actions(output: &mut serde_json::Value) {
521 let Some(map) = output.as_object_mut() else {
522 return;
523 };
524
525 for (key, value) in map.iter_mut() {
526 let Some(spec) = actions_for_issue_type(key) else {
527 continue;
528 };
529 let Some(arr) = value.as_array_mut() else {
530 continue;
531 };
532 for item in arr {
533 let actions = build_actions(item, key, &spec);
534 if let serde_json::Value::Object(obj) = item {
535 obj.insert("actions".to_string(), actions);
536 }
537 }
538 }
539}
540
541pub fn build_baseline_deltas_json<'a>(
549 total_delta: i64,
550 per_category: impl Iterator<Item = (&'a str, usize, usize, i64)>,
551) -> serde_json::Value {
552 let mut per_cat = serde_json::Map::new();
553 for (cat, current, baseline, delta) in per_category {
554 per_cat.insert(
555 cat.to_string(),
556 serde_json::json!({
557 "current": current,
558 "baseline": baseline,
559 "delta": delta,
560 }),
561 );
562 }
563 serde_json::json!({
564 "total_delta": total_delta,
565 "per_category": per_cat
566 })
567}
568
569const SECONDARY_REFACTOR_BAND: u16 = 5;
579
580#[derive(Debug, Clone, Copy, Default)]
595pub struct HealthActionOptions {
596 pub omit_suppress_line: bool,
598 pub omit_reason: Option<&'static str>,
603}
604
605#[allow(
612 clippy::redundant_pub_crate,
613 reason = "pub(crate) needed, used by audit.rs via re-export, but not part of public API"
614)]
615pub(crate) fn inject_health_actions(output: &mut serde_json::Value, opts: HealthActionOptions) {
616 let Some(map) = output.as_object_mut() else {
617 return;
618 };
619
620 let max_cyclomatic_threshold = map
623 .get("summary")
624 .and_then(|s| s.get("max_cyclomatic_threshold"))
625 .and_then(serde_json::Value::as_u64)
626 .and_then(|v| u16::try_from(v).ok())
627 .unwrap_or(20);
628 let max_cognitive_threshold = map
629 .get("summary")
630 .and_then(|s| s.get("max_cognitive_threshold"))
631 .and_then(serde_json::Value::as_u64)
632 .and_then(|v| u16::try_from(v).ok())
633 .unwrap_or(15);
634 let max_crap_threshold = map
635 .get("summary")
636 .and_then(|s| s.get("max_crap_threshold"))
637 .and_then(serde_json::Value::as_f64)
638 .unwrap_or(30.0);
639
640 if let Some(findings) = map.get_mut("findings").and_then(|v| v.as_array_mut()) {
642 for item in findings {
643 let actions = build_health_finding_actions(
644 item,
645 opts,
646 max_cyclomatic_threshold,
647 max_cognitive_threshold,
648 max_crap_threshold,
649 );
650 if let serde_json::Value::Object(obj) = item {
651 obj.insert("actions".to_string(), actions);
652 }
653 }
654 }
655
656 if let Some(targets) = map.get_mut("targets").and_then(|v| v.as_array_mut()) {
658 for item in targets {
659 let actions = build_refactoring_target_actions(item);
660 if let serde_json::Value::Object(obj) = item {
661 obj.insert("actions".to_string(), actions);
662 }
663 }
664 }
665
666 if let Some(hotspots) = map.get_mut("hotspots").and_then(|v| v.as_array_mut()) {
668 for item in hotspots {
669 let actions = build_hotspot_actions(item);
670 if let serde_json::Value::Object(obj) = item {
671 obj.insert("actions".to_string(), actions);
672 }
673 }
674 }
675
676 if let Some(gaps) = map.get_mut("coverage_gaps").and_then(|v| v.as_object_mut()) {
678 if let Some(files) = gaps.get_mut("files").and_then(|v| v.as_array_mut()) {
679 for item in files {
680 let actions = build_untested_file_actions(item);
681 if let serde_json::Value::Object(obj) = item {
682 obj.insert("actions".to_string(), actions);
683 }
684 }
685 }
686 if let Some(exports) = gaps.get_mut("exports").and_then(|v| v.as_array_mut()) {
687 for item in exports {
688 let actions = build_untested_export_actions(item);
689 if let serde_json::Value::Object(obj) = item {
690 obj.insert("actions".to_string(), actions);
691 }
692 }
693 }
694 }
695
696 if opts.omit_suppress_line {
704 let reason = opts.omit_reason.unwrap_or("unspecified");
705 map.insert(
706 "actions_meta".to_string(),
707 serde_json::json!({
708 "suppression_hints_omitted": true,
709 "reason": reason,
710 "scope": "health-findings",
711 }),
712 );
713 }
714}
715
716fn build_health_finding_actions(
740 item: &serde_json::Value,
741 opts: HealthActionOptions,
742 max_cyclomatic_threshold: u16,
743 max_cognitive_threshold: u16,
744 max_crap_threshold: f64,
745) -> serde_json::Value {
746 let name = item
747 .get("name")
748 .and_then(serde_json::Value::as_str)
749 .unwrap_or("function");
750 let path = item
751 .get("path")
752 .and_then(serde_json::Value::as_str)
753 .unwrap_or("");
754 let exceeded = item
755 .get("exceeded")
756 .and_then(serde_json::Value::as_str)
757 .unwrap_or("");
758 let includes_crap = matches!(
759 exceeded,
760 "crap" | "cyclomatic_crap" | "cognitive_crap" | "all"
761 );
762 let crap_only = exceeded == "crap";
763 let tier = item
764 .get("coverage_tier")
765 .and_then(serde_json::Value::as_str);
766 let cyclomatic = item
767 .get("cyclomatic")
768 .and_then(serde_json::Value::as_u64)
769 .and_then(|v| u16::try_from(v).ok())
770 .unwrap_or(0);
771 let cognitive = item
772 .get("cognitive")
773 .and_then(serde_json::Value::as_u64)
774 .and_then(|v| u16::try_from(v).ok())
775 .unwrap_or(0);
776 let full_coverage_can_clear_crap = !includes_crap || f64::from(cyclomatic) < max_crap_threshold;
777
778 let mut actions: Vec<serde_json::Value> = Vec::new();
779
780 if includes_crap {
782 let coverage_action = build_crap_coverage_action(name, tier, full_coverage_can_clear_crap);
783 if let Some(action) = coverage_action {
784 actions.push(action);
785 }
786 }
787
788 let crap_only_needs_complexity_reduction = crap_only && !full_coverage_can_clear_crap;
804 let cognitive_floor = max_cognitive_threshold / 2;
805 let near_cyclomatic_threshold = crap_only
806 && cyclomatic > 0
807 && cyclomatic >= max_cyclomatic_threshold.saturating_sub(SECONDARY_REFACTOR_BAND)
808 && cognitive >= cognitive_floor;
809 let is_template = name == "<template>";
810 if !crap_only || crap_only_needs_complexity_reduction || near_cyclomatic_threshold {
811 let (description, note) = if is_template {
812 (
813 format!(
814 "Refactor `{name}` to reduce template complexity (simplify control flow and bindings)"
815 ),
816 "Consider splitting complex template branches into smaller components or simpler bindings",
817 )
818 } else {
819 (
820 format!(
821 "Refactor `{name}` to reduce complexity (extract helper functions, simplify branching)"
822 ),
823 "Consider splitting into smaller functions with single responsibilities",
824 )
825 };
826 actions.push(serde_json::json!({
827 "type": "refactor-function",
828 "auto_fixable": false,
829 "description": description,
830 "note": note,
831 }));
832 }
833
834 if !opts.omit_suppress_line {
835 if is_template
836 && Path::new(path)
837 .extension()
838 .is_some_and(|ext| ext.eq_ignore_ascii_case("html"))
839 {
840 actions.push(serde_json::json!({
841 "type": "suppress-file",
842 "auto_fixable": false,
843 "description": "Suppress with an HTML comment at the top of the template",
844 "comment": "<!-- fallow-ignore-file complexity -->",
845 "placement": "top-of-template",
846 }));
847 } else if is_template {
848 actions.push(serde_json::json!({
849 "type": "suppress-line",
850 "auto_fixable": false,
851 "description": "Suppress with an inline comment above the Angular decorator",
852 "comment": "// fallow-ignore-next-line complexity",
853 "placement": "above-angular-decorator",
854 }));
855 } else {
856 actions.push(serde_json::json!({
857 "type": "suppress-line",
858 "auto_fixable": false,
859 "description": "Suppress with an inline comment above the function declaration",
860 "comment": "// fallow-ignore-next-line complexity",
861 "placement": "above-function-declaration",
862 }));
863 }
864 }
865
866 serde_json::Value::Array(actions)
867}
868
869fn build_crap_coverage_action(
875 name: &str,
876 tier: Option<&str>,
877 full_coverage_can_clear_crap: bool,
878) -> Option<serde_json::Value> {
879 if !full_coverage_can_clear_crap {
880 return None;
881 }
882
883 match tier {
884 Some("partial" | "high") => Some(serde_json::json!({
889 "type": "increase-coverage",
890 "auto_fixable": false,
891 "description": format!("Increase test coverage for `{name}` (file is reachable from existing tests; add targeted assertions for uncovered branches)"),
892 "note": "CRAP = CC^2 * (1 - cov/100)^3 + CC; targeted branch coverage is more efficient than scaffolding new test files when the file already has coverage",
893 })),
894 _ => Some(serde_json::json!({
896 "type": "add-tests",
897 "auto_fixable": false,
898 "description": format!("Add test coverage for `{name}` to lower its CRAP score (coverage reduces risk even without refactoring)"),
899 "note": "CRAP = CC^2 * (1 - cov/100)^3 + CC; higher coverage is the fastest way to bring CRAP under threshold",
900 })),
901 }
902}
903
904fn build_hotspot_actions(item: &serde_json::Value) -> serde_json::Value {
906 let path = item
907 .get("path")
908 .and_then(serde_json::Value::as_str)
909 .unwrap_or("file");
910
911 let mut actions = vec![
912 serde_json::json!({
913 "type": "refactor-file",
914 "auto_fixable": false,
915 "description": format!("Refactor `{path}`, high complexity combined with frequent changes makes this a maintenance risk"),
916 "note": "Prioritize extracting complex functions, adding tests, or splitting the module",
917 }),
918 serde_json::json!({
919 "type": "add-tests",
920 "auto_fixable": false,
921 "description": format!("Add test coverage for `{path}` to reduce change risk"),
922 "note": "Frequently changed complex files benefit most from comprehensive test coverage",
923 }),
924 ];
925
926 if let Some(ownership) = item.get("ownership") {
927 if ownership
929 .get("bus_factor")
930 .and_then(serde_json::Value::as_u64)
931 == Some(1)
932 {
933 let top = ownership.get("top_contributor");
934 let owner = top
935 .and_then(|t| t.get("identifier"))
936 .and_then(serde_json::Value::as_str)
937 .unwrap_or("the sole contributor");
938 let commits = top
943 .and_then(|t| t.get("commits"))
944 .and_then(serde_json::Value::as_u64)
945 .unwrap_or(0);
946 let suggested: Vec<String> = ownership
952 .get("suggested_reviewers")
953 .and_then(serde_json::Value::as_array)
954 .map(|arr| {
955 arr.iter()
956 .filter_map(|r| {
957 r.get("identifier")
958 .and_then(serde_json::Value::as_str)
959 .map(String::from)
960 })
961 .collect()
962 })
963 .unwrap_or_default();
964 let mut low_bus_action = serde_json::json!({
965 "type": "low-bus-factor",
966 "auto_fixable": false,
967 "description": format!(
968 "{owner} is the sole recent contributor to `{path}`; adding a second reviewer reduces knowledge-loss risk"
969 ),
970 });
971 if !suggested.is_empty() {
972 let list = suggested
973 .iter()
974 .map(|s| format!("@{s}"))
975 .collect::<Vec<_>>()
976 .join(", ");
977 low_bus_action["note"] =
978 serde_json::Value::String(format!("Candidate reviewers: {list}"));
979 } else if commits < 5 {
980 low_bus_action["note"] = serde_json::Value::String(
981 "Single recent contributor on a low-commit file. Consider a pair review for major changes."
982 .to_string(),
983 );
984 }
985 actions.push(low_bus_action);
987 }
988
989 if ownership
992 .get("unowned")
993 .and_then(serde_json::Value::as_bool)
994 == Some(true)
995 {
996 actions.push(serde_json::json!({
997 "type": "unowned-hotspot",
998 "auto_fixable": false,
999 "description": format!("Add a CODEOWNERS entry for `{path}`"),
1000 "note": "Frequently-changed files without declared owners create review bottlenecks",
1001 "suggested_pattern": suggest_codeowners_pattern(path),
1002 "heuristic": "directory-deepest",
1003 }));
1004 }
1005
1006 if ownership.get("drift").and_then(serde_json::Value::as_bool) == Some(true) {
1009 let reason = ownership
1010 .get("drift_reason")
1011 .and_then(serde_json::Value::as_str)
1012 .unwrap_or("ownership has shifted from the original author");
1013 actions.push(serde_json::json!({
1014 "type": "ownership-drift",
1015 "auto_fixable": false,
1016 "description": format!("Update CODEOWNERS for `{path}`: {reason}"),
1017 "note": "Drift suggests the declared or original owner is no longer the right reviewer",
1018 }));
1019 }
1020 }
1021
1022 serde_json::Value::Array(actions)
1023}
1024
1025fn suggest_codeowners_pattern(path: &str) -> String {
1038 let normalized = path.replace('\\', "/");
1039 let trimmed = normalized.trim_start_matches('/');
1040 let mut components: Vec<&str> = trimmed.split('/').collect();
1041 components.pop(); if components.is_empty() {
1043 return format!("/{trimmed}");
1044 }
1045 format!("/{}/", components.join("/"))
1046}
1047
1048fn build_refactoring_target_actions(item: &serde_json::Value) -> serde_json::Value {
1050 let recommendation = item
1051 .get("recommendation")
1052 .and_then(serde_json::Value::as_str)
1053 .unwrap_or("Apply the recommended refactoring");
1054
1055 let category = item
1056 .get("category")
1057 .and_then(serde_json::Value::as_str)
1058 .unwrap_or("refactoring");
1059
1060 let mut actions = vec![serde_json::json!({
1061 "type": "apply-refactoring",
1062 "auto_fixable": false,
1063 "description": recommendation,
1064 "category": category,
1065 })];
1066
1067 if item.get("evidence").is_some() {
1069 actions.push(serde_json::json!({
1070 "type": "suppress-line",
1071 "auto_fixable": false,
1072 "description": "Suppress the underlying complexity finding",
1073 "comment": "// fallow-ignore-next-line complexity",
1074 }));
1075 }
1076
1077 serde_json::Value::Array(actions)
1078}
1079
1080fn build_untested_file_actions(item: &serde_json::Value) -> serde_json::Value {
1082 let path = item
1083 .get("path")
1084 .and_then(serde_json::Value::as_str)
1085 .unwrap_or("file");
1086
1087 serde_json::Value::Array(vec![
1088 serde_json::json!({
1089 "type": "add-tests",
1090 "auto_fixable": false,
1091 "description": format!("Add test coverage for `{path}`"),
1092 "note": "No test dependency path reaches this runtime file",
1093 }),
1094 serde_json::json!({
1095 "type": "suppress-file",
1096 "auto_fixable": false,
1097 "description": format!("Suppress coverage gap reporting for `{path}`"),
1098 "comment": "// fallow-ignore-file coverage-gaps",
1099 }),
1100 ])
1101}
1102
1103fn build_untested_export_actions(item: &serde_json::Value) -> serde_json::Value {
1105 let path = item
1106 .get("path")
1107 .and_then(serde_json::Value::as_str)
1108 .unwrap_or("file");
1109 let export_name = item
1110 .get("export_name")
1111 .and_then(serde_json::Value::as_str)
1112 .unwrap_or("export");
1113
1114 serde_json::Value::Array(vec![
1115 serde_json::json!({
1116 "type": "add-test-import",
1117 "auto_fixable": false,
1118 "description": format!("Import and test `{export_name}` from `{path}`"),
1119 "note": "This export is runtime-reachable but no test-reachable module references it",
1120 }),
1121 serde_json::json!({
1122 "type": "suppress-file",
1123 "auto_fixable": false,
1124 "description": format!("Suppress coverage gap reporting for `{path}`"),
1125 "comment": "// fallow-ignore-file coverage-gaps",
1126 }),
1127 ])
1128}
1129
1130#[allow(
1137 clippy::redundant_pub_crate,
1138 reason = "pub(crate) needed — used by audit.rs via re-export, but not part of public API"
1139)]
1140pub(crate) fn inject_dupes_actions(output: &mut serde_json::Value) {
1141 let Some(map) = output.as_object_mut() else {
1142 return;
1143 };
1144
1145 if let Some(families) = map.get_mut("clone_families").and_then(|v| v.as_array_mut()) {
1147 for item in families {
1148 let actions = build_clone_family_actions(item);
1149 if let serde_json::Value::Object(obj) = item {
1150 obj.insert("actions".to_string(), actions);
1151 }
1152 }
1153 }
1154
1155 if let Some(groups) = map.get_mut("clone_groups").and_then(|v| v.as_array_mut()) {
1157 for item in groups {
1158 let actions = build_clone_group_actions(item);
1159 if let serde_json::Value::Object(obj) = item {
1160 obj.insert("actions".to_string(), actions);
1161 }
1162 }
1163 }
1164}
1165
1166fn build_clone_family_actions(item: &serde_json::Value) -> serde_json::Value {
1168 let group_count = item
1169 .get("groups")
1170 .and_then(|v| v.as_array())
1171 .map_or(0, Vec::len);
1172
1173 let total_lines = item
1174 .get("total_duplicated_lines")
1175 .and_then(serde_json::Value::as_u64)
1176 .unwrap_or(0);
1177
1178 let mut actions = vec![serde_json::json!({
1179 "type": "extract-shared",
1180 "auto_fixable": false,
1181 "description": format!(
1182 "Extract {group_count} duplicated code block{} ({total_lines} lines) into a shared module",
1183 if group_count == 1 { "" } else { "s" }
1184 ),
1185 "note": "These clone groups share the same files, indicating a structural relationship — refactor together",
1186 })];
1187
1188 if let Some(suggestions) = item.get("suggestions").and_then(|v| v.as_array()) {
1190 for suggestion in suggestions {
1191 if let Some(desc) = suggestion
1192 .get("description")
1193 .and_then(serde_json::Value::as_str)
1194 {
1195 actions.push(serde_json::json!({
1196 "type": "apply-suggestion",
1197 "auto_fixable": false,
1198 "description": desc,
1199 }));
1200 }
1201 }
1202 }
1203
1204 actions.push(serde_json::json!({
1205 "type": "suppress-line",
1206 "auto_fixable": false,
1207 "description": "Suppress with an inline comment above the duplicated code",
1208 "comment": "// fallow-ignore-next-line code-duplication",
1209 }));
1210
1211 serde_json::Value::Array(actions)
1212}
1213
1214fn build_clone_group_actions(item: &serde_json::Value) -> serde_json::Value {
1216 let instance_count = item
1217 .get("instances")
1218 .and_then(|v| v.as_array())
1219 .map_or(0, Vec::len);
1220
1221 let line_count = item
1222 .get("line_count")
1223 .and_then(serde_json::Value::as_u64)
1224 .unwrap_or(0);
1225
1226 let actions = vec![
1227 serde_json::json!({
1228 "type": "extract-shared",
1229 "auto_fixable": false,
1230 "description": format!(
1231 "Extract duplicated code ({line_count} lines, {instance_count} instance{}) into a shared function",
1232 if instance_count == 1 { "" } else { "s" }
1233 ),
1234 }),
1235 serde_json::json!({
1236 "type": "suppress-line",
1237 "auto_fixable": false,
1238 "description": "Suppress with an inline comment above the duplicated code",
1239 "comment": "// fallow-ignore-next-line code-duplication",
1240 }),
1241 ];
1242
1243 serde_json::Value::Array(actions)
1244}
1245
1246fn insert_meta(output: &mut serde_json::Value, meta: serde_json::Value) {
1248 if let serde_json::Value::Object(map) = output {
1249 map.insert("_meta".to_string(), meta);
1250 }
1251}
1252
1253pub fn build_health_json(
1261 report: &crate::health_types::HealthReport,
1262 root: &Path,
1263 elapsed: Duration,
1264 explain: bool,
1265 action_opts: HealthActionOptions,
1266) -> Result<serde_json::Value, serde_json::Error> {
1267 let report_value = serde_json::to_value(report)?;
1268 let mut output = build_json_envelope(report_value, elapsed);
1269 let root_prefix = format!("{}/", root.display());
1270 strip_root_prefix(&mut output, &root_prefix);
1271 inject_health_actions(&mut output, action_opts);
1272 if explain {
1273 insert_meta(&mut output, explain::health_meta());
1274 }
1275 Ok(output)
1276}
1277
1278pub(super) fn print_health_json(
1279 report: &crate::health_types::HealthReport,
1280 root: &Path,
1281 elapsed: Duration,
1282 explain: bool,
1283 action_opts: HealthActionOptions,
1284) -> ExitCode {
1285 match build_health_json(report, root, elapsed, explain, action_opts) {
1286 Ok(output) => emit_json(&output, "JSON"),
1287 Err(e) => {
1288 eprintln!("Error: failed to serialize health report: {e}");
1289 ExitCode::from(2)
1290 }
1291 }
1292}
1293
1294pub fn build_grouped_health_json(
1314 report: &crate::health_types::HealthReport,
1315 grouping: &crate::health_types::HealthGrouping,
1316 root: &Path,
1317 elapsed: Duration,
1318 explain: bool,
1319 action_opts: HealthActionOptions,
1320) -> Result<serde_json::Value, serde_json::Error> {
1321 let root_prefix = format!("{}/", root.display());
1322 let report_value = serde_json::to_value(report)?;
1323 let mut output = build_json_envelope(report_value, elapsed);
1324 strip_root_prefix(&mut output, &root_prefix);
1325 inject_health_actions(&mut output, action_opts);
1326
1327 if let serde_json::Value::Object(ref mut map) = output {
1328 map.insert("grouped_by".to_string(), serde_json::json!(grouping.mode));
1329 }
1330
1331 let group_values: Vec<serde_json::Value> = grouping
1339 .groups
1340 .iter()
1341 .map(|g| {
1342 let mut value = serde_json::to_value(g)?;
1343 strip_root_prefix(&mut value, &root_prefix);
1344 inject_health_actions(&mut value, action_opts);
1345 Ok(value)
1346 })
1347 .collect::<Result<_, serde_json::Error>>()?;
1348
1349 if let serde_json::Value::Object(ref mut map) = output {
1350 map.insert("groups".to_string(), serde_json::Value::Array(group_values));
1351 }
1352
1353 if explain {
1354 insert_meta(&mut output, explain::health_meta());
1355 }
1356
1357 Ok(output)
1358}
1359
1360pub(super) fn print_grouped_health_json(
1361 report: &crate::health_types::HealthReport,
1362 grouping: &crate::health_types::HealthGrouping,
1363 root: &Path,
1364 elapsed: Duration,
1365 explain: bool,
1366 action_opts: HealthActionOptions,
1367) -> ExitCode {
1368 match build_grouped_health_json(report, grouping, root, elapsed, explain, action_opts) {
1369 Ok(output) => emit_json(&output, "JSON"),
1370 Err(e) => {
1371 eprintln!("Error: failed to serialize grouped health report: {e}");
1372 ExitCode::from(2)
1373 }
1374 }
1375}
1376
1377pub fn build_duplication_json(
1384 report: &DuplicationReport,
1385 root: &Path,
1386 elapsed: Duration,
1387 explain: bool,
1388) -> Result<serde_json::Value, serde_json::Error> {
1389 let report_value = serde_json::to_value(report)?;
1390
1391 let mut output = build_json_envelope(report_value, elapsed);
1392 let root_prefix = format!("{}/", root.display());
1393 strip_root_prefix(&mut output, &root_prefix);
1394 inject_dupes_actions(&mut output);
1395
1396 if explain {
1397 insert_meta(&mut output, explain::dupes_meta());
1398 }
1399
1400 Ok(output)
1401}
1402
1403pub(super) fn print_duplication_json(
1404 report: &DuplicationReport,
1405 root: &Path,
1406 elapsed: Duration,
1407 explain: bool,
1408) -> ExitCode {
1409 match build_duplication_json(report, root, elapsed, explain) {
1410 Ok(output) => emit_json(&output, "JSON"),
1411 Err(e) => {
1412 eprintln!("Error: failed to serialize duplication report: {e}");
1413 ExitCode::from(2)
1414 }
1415 }
1416}
1417
1418pub fn build_grouped_duplication_json(
1439 report: &DuplicationReport,
1440 grouping: &super::dupes_grouping::DuplicationGrouping,
1441 root: &Path,
1442 elapsed: Duration,
1443 explain: bool,
1444) -> Result<serde_json::Value, serde_json::Error> {
1445 let report_value = serde_json::to_value(report)?;
1446 let mut output = build_json_envelope(report_value, elapsed);
1447 let root_prefix = format!("{}/", root.display());
1448 strip_root_prefix(&mut output, &root_prefix);
1449 inject_dupes_actions(&mut output);
1450
1451 if let serde_json::Value::Object(ref mut map) = output {
1452 map.insert("grouped_by".to_string(), serde_json::json!(grouping.mode));
1453 map.insert(
1459 "total_issues".to_string(),
1460 serde_json::json!(report.clone_groups.len()),
1461 );
1462 }
1463
1464 let group_values: Vec<serde_json::Value> = grouping
1465 .groups
1466 .iter()
1467 .map(|g| {
1468 let mut value = serde_json::to_value(g)?;
1469 strip_root_prefix(&mut value, &root_prefix);
1470 inject_dupes_actions(&mut value);
1471 Ok(value)
1472 })
1473 .collect::<Result<_, serde_json::Error>>()?;
1474
1475 if let serde_json::Value::Object(ref mut map) = output {
1476 map.insert("groups".to_string(), serde_json::Value::Array(group_values));
1477 }
1478
1479 if explain {
1480 insert_meta(&mut output, explain::dupes_meta());
1481 }
1482
1483 Ok(output)
1484}
1485
1486pub(super) fn print_grouped_duplication_json(
1487 report: &DuplicationReport,
1488 grouping: &super::dupes_grouping::DuplicationGrouping,
1489 root: &Path,
1490 elapsed: Duration,
1491 explain: bool,
1492) -> ExitCode {
1493 match build_grouped_duplication_json(report, grouping, root, elapsed, explain) {
1494 Ok(output) => emit_json(&output, "JSON"),
1495 Err(e) => {
1496 eprintln!("Error: failed to serialize grouped duplication report: {e}");
1497 ExitCode::from(2)
1498 }
1499 }
1500}
1501
1502pub(super) fn print_trace_json<T: serde::Serialize>(value: &T) {
1503 match serde_json::to_string_pretty(value) {
1504 Ok(json) => println!("{json}"),
1505 Err(e) => {
1506 eprintln!("Error: failed to serialize trace output: {e}");
1507 #[expect(
1508 clippy::exit,
1509 reason = "fatal serialization error requires immediate exit"
1510 )]
1511 std::process::exit(2);
1512 }
1513 }
1514}
1515
1516#[cfg(test)]
1517mod tests {
1518 use super::*;
1519 use crate::health_types::{
1520 RuntimeCoverageAction, RuntimeCoverageConfidence, RuntimeCoverageEvidence,
1521 RuntimeCoverageFinding, RuntimeCoverageHotPath, RuntimeCoverageMessage,
1522 RuntimeCoverageReport, RuntimeCoverageReportVerdict, RuntimeCoverageSummary,
1523 RuntimeCoverageVerdict, RuntimeCoverageWatermark,
1524 };
1525 use crate::report::test_helpers::sample_results;
1526 use fallow_core::extract::MemberKind;
1527 use fallow_core::results::*;
1528 use std::path::PathBuf;
1529 use std::time::Duration;
1530
1531 #[test]
1532 fn json_output_has_metadata_fields() {
1533 let root = PathBuf::from("/project");
1534 let results = AnalysisResults::default();
1535 let elapsed = Duration::from_millis(123);
1536 let output = build_json(&results, &root, elapsed).expect("should serialize");
1537
1538 assert_eq!(output["schema_version"], 4);
1539 assert!(output["version"].is_string());
1540 assert_eq!(output["elapsed_ms"], 123);
1541 assert_eq!(output["total_issues"], 0);
1542 }
1543
1544 #[test]
1545 fn json_output_includes_issue_arrays() {
1546 let root = PathBuf::from("/project");
1547 let results = sample_results(&root);
1548 let elapsed = Duration::from_millis(50);
1549 let output = build_json(&results, &root, elapsed).expect("should serialize");
1550
1551 assert_eq!(output["unused_files"].as_array().unwrap().len(), 1);
1552 assert_eq!(output["unused_exports"].as_array().unwrap().len(), 1);
1553 assert_eq!(output["unused_types"].as_array().unwrap().len(), 1);
1554 assert_eq!(output["unused_dependencies"].as_array().unwrap().len(), 1);
1555 assert_eq!(
1556 output["unused_dev_dependencies"].as_array().unwrap().len(),
1557 1
1558 );
1559 assert_eq!(output["unused_enum_members"].as_array().unwrap().len(), 1);
1560 assert_eq!(output["unused_class_members"].as_array().unwrap().len(), 1);
1561 assert_eq!(output["unresolved_imports"].as_array().unwrap().len(), 1);
1562 assert_eq!(output["unlisted_dependencies"].as_array().unwrap().len(), 1);
1563 assert_eq!(output["duplicate_exports"].as_array().unwrap().len(), 1);
1564 assert_eq!(
1565 output["type_only_dependencies"].as_array().unwrap().len(),
1566 1
1567 );
1568 assert_eq!(output["circular_dependencies"].as_array().unwrap().len(), 1);
1569 }
1570
1571 #[test]
1572 fn health_json_includes_runtime_coverage_with_relative_paths_and_actions() {
1573 let root = PathBuf::from("/project");
1574 let report = crate::health_types::HealthReport {
1575 runtime_coverage: Some(RuntimeCoverageReport {
1576 verdict: RuntimeCoverageReportVerdict::ColdCodeDetected,
1577 summary: RuntimeCoverageSummary {
1578 functions_tracked: 3,
1579 functions_hit: 1,
1580 functions_unhit: 1,
1581 functions_untracked: 1,
1582 coverage_percent: 33.3,
1583 trace_count: 2_847_291,
1584 period_days: 30,
1585 deployments_seen: 14,
1586 capture_quality: Some(crate::health_types::RuntimeCoverageCaptureQuality {
1587 window_seconds: 720,
1588 instances_observed: 1,
1589 lazy_parse_warning: true,
1590 untracked_ratio_percent: 42.5,
1591 }),
1592 },
1593 findings: vec![RuntimeCoverageFinding {
1594 id: "fallow:prod:deadbeef".to_owned(),
1595 path: root.join("src/cold.ts"),
1596 function: "coldPath".to_owned(),
1597 line: 12,
1598 verdict: RuntimeCoverageVerdict::ReviewRequired,
1599 invocations: Some(0),
1600 confidence: RuntimeCoverageConfidence::Medium,
1601 evidence: RuntimeCoverageEvidence {
1602 static_status: "used".to_owned(),
1603 test_coverage: "not_covered".to_owned(),
1604 v8_tracking: "tracked".to_owned(),
1605 untracked_reason: None,
1606 observation_days: 30,
1607 deployments_observed: 14,
1608 },
1609 actions: vec![RuntimeCoverageAction {
1610 kind: "review-deletion".to_owned(),
1611 description: "Tracked in runtime coverage with zero invocations."
1612 .to_owned(),
1613 auto_fixable: false,
1614 }],
1615 }],
1616 hot_paths: vec![RuntimeCoverageHotPath {
1617 id: "fallow:hot:cafebabe".to_owned(),
1618 path: root.join("src/hot.ts"),
1619 function: "hotPath".to_owned(),
1620 line: 3,
1621 invocations: 250,
1622 percentile: 99,
1623 actions: vec![],
1624 }],
1625 watermark: Some(RuntimeCoverageWatermark::LicenseExpiredGrace),
1626 warnings: vec![RuntimeCoverageMessage {
1627 code: "partial-merge".to_owned(),
1628 message: "Merged coverage omitted one chunk.".to_owned(),
1629 }],
1630 }),
1631 ..Default::default()
1632 };
1633
1634 let report_value = serde_json::to_value(&report).expect("should serialize health report");
1635 let mut output = build_json_envelope(report_value, Duration::from_millis(7));
1636 strip_root_prefix(&mut output, "/project/");
1637 inject_health_actions(&mut output, HealthActionOptions::default());
1638
1639 assert_eq!(
1640 output["runtime_coverage"]["verdict"],
1641 serde_json::Value::String("cold-code-detected".to_owned())
1642 );
1643 assert_eq!(
1644 output["runtime_coverage"]["summary"]["functions_tracked"],
1645 serde_json::Value::from(3)
1646 );
1647 assert_eq!(
1648 output["runtime_coverage"]["summary"]["coverage_percent"],
1649 serde_json::Value::from(33.3)
1650 );
1651 let finding = &output["runtime_coverage"]["findings"][0];
1652 assert_eq!(finding["path"], "src/cold.ts");
1653 assert_eq!(finding["verdict"], "review_required");
1654 assert_eq!(finding["id"], "fallow:prod:deadbeef");
1655 assert_eq!(finding["actions"][0]["type"], "review-deletion");
1656 let hot_path = &output["runtime_coverage"]["hot_paths"][0];
1657 assert_eq!(hot_path["path"], "src/hot.ts");
1658 assert_eq!(hot_path["function"], "hotPath");
1659 assert_eq!(hot_path["percentile"], 99);
1660 assert_eq!(
1661 output["runtime_coverage"]["watermark"],
1662 serde_json::Value::String("license-expired-grace".to_owned())
1663 );
1664 assert_eq!(
1665 output["runtime_coverage"]["warnings"][0]["code"],
1666 serde_json::Value::String("partial-merge".to_owned())
1667 );
1668 }
1669
1670 #[test]
1671 fn json_metadata_fields_appear_first() {
1672 let root = PathBuf::from("/project");
1673 let results = AnalysisResults::default();
1674 let elapsed = Duration::from_millis(0);
1675 let output = build_json(&results, &root, elapsed).expect("should serialize");
1676 let keys: Vec<&String> = output.as_object().unwrap().keys().collect();
1677 assert_eq!(keys[0], "schema_version");
1678 assert_eq!(keys[1], "version");
1679 assert_eq!(keys[2], "elapsed_ms");
1680 assert_eq!(keys[3], "total_issues");
1681 }
1682
1683 #[test]
1684 fn json_total_issues_matches_results() {
1685 let root = PathBuf::from("/project");
1686 let results = sample_results(&root);
1687 let total = results.total_issues();
1688 let elapsed = Duration::from_millis(0);
1689 let output = build_json(&results, &root, elapsed).expect("should serialize");
1690
1691 assert_eq!(output["total_issues"], total);
1692 }
1693
1694 #[test]
1695 fn json_unused_export_contains_expected_fields() {
1696 let root = PathBuf::from("/project");
1697 let mut results = AnalysisResults::default();
1698 results.unused_exports.push(UnusedExport {
1699 path: root.join("src/utils.ts"),
1700 export_name: "helperFn".to_string(),
1701 is_type_only: false,
1702 line: 10,
1703 col: 4,
1704 span_start: 120,
1705 is_re_export: false,
1706 });
1707 let elapsed = Duration::from_millis(0);
1708 let output = build_json(&results, &root, elapsed).expect("should serialize");
1709
1710 let export = &output["unused_exports"][0];
1711 assert_eq!(export["export_name"], "helperFn");
1712 assert_eq!(export["line"], 10);
1713 assert_eq!(export["col"], 4);
1714 assert_eq!(export["is_type_only"], false);
1715 assert_eq!(export["span_start"], 120);
1716 assert_eq!(export["is_re_export"], false);
1717 }
1718
1719 #[test]
1720 fn json_serializes_to_valid_json() {
1721 let root = PathBuf::from("/project");
1722 let results = sample_results(&root);
1723 let elapsed = Duration::from_millis(42);
1724 let output = build_json(&results, &root, elapsed).expect("should serialize");
1725
1726 let json_str = serde_json::to_string_pretty(&output).expect("should stringify");
1727 let reparsed: serde_json::Value =
1728 serde_json::from_str(&json_str).expect("JSON output should be valid JSON");
1729 assert_eq!(reparsed, output);
1730 }
1731
1732 #[test]
1735 fn json_empty_results_produce_valid_structure() {
1736 let root = PathBuf::from("/project");
1737 let results = AnalysisResults::default();
1738 let elapsed = Duration::from_millis(0);
1739 let output = build_json(&results, &root, elapsed).expect("should serialize");
1740
1741 assert_eq!(output["total_issues"], 0);
1742 assert_eq!(output["unused_files"].as_array().unwrap().len(), 0);
1743 assert_eq!(output["unused_exports"].as_array().unwrap().len(), 0);
1744 assert_eq!(output["unused_types"].as_array().unwrap().len(), 0);
1745 assert_eq!(output["unused_dependencies"].as_array().unwrap().len(), 0);
1746 assert_eq!(
1747 output["unused_dev_dependencies"].as_array().unwrap().len(),
1748 0
1749 );
1750 assert_eq!(output["unused_enum_members"].as_array().unwrap().len(), 0);
1751 assert_eq!(output["unused_class_members"].as_array().unwrap().len(), 0);
1752 assert_eq!(output["unresolved_imports"].as_array().unwrap().len(), 0);
1753 assert_eq!(output["unlisted_dependencies"].as_array().unwrap().len(), 0);
1754 assert_eq!(output["duplicate_exports"].as_array().unwrap().len(), 0);
1755 assert_eq!(
1756 output["type_only_dependencies"].as_array().unwrap().len(),
1757 0
1758 );
1759 assert_eq!(output["circular_dependencies"].as_array().unwrap().len(), 0);
1760 }
1761
1762 #[test]
1763 fn json_empty_results_round_trips_through_string() {
1764 let root = PathBuf::from("/project");
1765 let results = AnalysisResults::default();
1766 let elapsed = Duration::from_millis(0);
1767 let output = build_json(&results, &root, elapsed).expect("should serialize");
1768
1769 let json_str = serde_json::to_string(&output).expect("should stringify");
1770 let reparsed: serde_json::Value =
1771 serde_json::from_str(&json_str).expect("should parse back");
1772 assert_eq!(reparsed["total_issues"], 0);
1773 }
1774
1775 #[test]
1778 fn json_paths_are_relative_to_root() {
1779 let root = PathBuf::from("/project");
1780 let mut results = AnalysisResults::default();
1781 results.unused_files.push(UnusedFile {
1782 path: root.join("src/deep/nested/file.ts"),
1783 });
1784 let elapsed = Duration::from_millis(0);
1785 let output = build_json(&results, &root, elapsed).expect("should serialize");
1786
1787 let path = output["unused_files"][0]["path"].as_str().unwrap();
1788 assert_eq!(path, "src/deep/nested/file.ts");
1789 assert!(!path.starts_with("/project"));
1790 }
1791
1792 #[test]
1793 fn json_strips_root_from_nested_locations() {
1794 let root = PathBuf::from("/project");
1795 let mut results = AnalysisResults::default();
1796 results.unlisted_dependencies.push(UnlistedDependency {
1797 package_name: "chalk".to_string(),
1798 imported_from: vec![ImportSite {
1799 path: root.join("src/cli.ts"),
1800 line: 2,
1801 col: 0,
1802 }],
1803 });
1804 let elapsed = Duration::from_millis(0);
1805 let output = build_json(&results, &root, elapsed).expect("should serialize");
1806
1807 let site_path = output["unlisted_dependencies"][0]["imported_from"][0]["path"]
1808 .as_str()
1809 .unwrap();
1810 assert_eq!(site_path, "src/cli.ts");
1811 }
1812
1813 #[test]
1814 fn json_strips_root_from_duplicate_export_locations() {
1815 let root = PathBuf::from("/project");
1816 let mut results = AnalysisResults::default();
1817 results.duplicate_exports.push(DuplicateExport {
1818 export_name: "Config".to_string(),
1819 locations: vec![
1820 DuplicateLocation {
1821 path: root.join("src/config.ts"),
1822 line: 15,
1823 col: 0,
1824 },
1825 DuplicateLocation {
1826 path: root.join("src/types.ts"),
1827 line: 30,
1828 col: 0,
1829 },
1830 ],
1831 });
1832 let elapsed = Duration::from_millis(0);
1833 let output = build_json(&results, &root, elapsed).expect("should serialize");
1834
1835 let loc0 = output["duplicate_exports"][0]["locations"][0]["path"]
1836 .as_str()
1837 .unwrap();
1838 let loc1 = output["duplicate_exports"][0]["locations"][1]["path"]
1839 .as_str()
1840 .unwrap();
1841 assert_eq!(loc0, "src/config.ts");
1842 assert_eq!(loc1, "src/types.ts");
1843 }
1844
1845 #[test]
1846 fn json_strips_root_from_circular_dependency_files() {
1847 let root = PathBuf::from("/project");
1848 let mut results = AnalysisResults::default();
1849 results.circular_dependencies.push(CircularDependency {
1850 files: vec![root.join("src/a.ts"), root.join("src/b.ts")],
1851 length: 2,
1852 line: 1,
1853 col: 0,
1854 is_cross_package: false,
1855 });
1856 let elapsed = Duration::from_millis(0);
1857 let output = build_json(&results, &root, elapsed).expect("should serialize");
1858
1859 let files = output["circular_dependencies"][0]["files"]
1860 .as_array()
1861 .unwrap();
1862 assert_eq!(files[0].as_str().unwrap(), "src/a.ts");
1863 assert_eq!(files[1].as_str().unwrap(), "src/b.ts");
1864 }
1865
1866 #[test]
1867 fn json_path_outside_root_not_stripped() {
1868 let root = PathBuf::from("/project");
1869 let mut results = AnalysisResults::default();
1870 results.unused_files.push(UnusedFile {
1871 path: PathBuf::from("/other/project/src/file.ts"),
1872 });
1873 let elapsed = Duration::from_millis(0);
1874 let output = build_json(&results, &root, elapsed).expect("should serialize");
1875
1876 let path = output["unused_files"][0]["path"].as_str().unwrap();
1877 assert!(path.contains("/other/project/"));
1878 }
1879
1880 #[test]
1883 fn json_unused_file_contains_path() {
1884 let root = PathBuf::from("/project");
1885 let mut results = AnalysisResults::default();
1886 results.unused_files.push(UnusedFile {
1887 path: root.join("src/orphan.ts"),
1888 });
1889 let elapsed = Duration::from_millis(0);
1890 let output = build_json(&results, &root, elapsed).expect("should serialize");
1891
1892 let file = &output["unused_files"][0];
1893 assert_eq!(file["path"], "src/orphan.ts");
1894 }
1895
1896 #[test]
1897 fn json_unused_type_contains_expected_fields() {
1898 let root = PathBuf::from("/project");
1899 let mut results = AnalysisResults::default();
1900 results.unused_types.push(UnusedExport {
1901 path: root.join("src/types.ts"),
1902 export_name: "OldInterface".to_string(),
1903 is_type_only: true,
1904 line: 20,
1905 col: 0,
1906 span_start: 300,
1907 is_re_export: false,
1908 });
1909 let elapsed = Duration::from_millis(0);
1910 let output = build_json(&results, &root, elapsed).expect("should serialize");
1911
1912 let typ = &output["unused_types"][0];
1913 assert_eq!(typ["export_name"], "OldInterface");
1914 assert_eq!(typ["is_type_only"], true);
1915 assert_eq!(typ["line"], 20);
1916 assert_eq!(typ["path"], "src/types.ts");
1917 }
1918
1919 #[test]
1920 fn json_unused_dependency_contains_expected_fields() {
1921 let root = PathBuf::from("/project");
1922 let mut results = AnalysisResults::default();
1923 results.unused_dependencies.push(UnusedDependency {
1924 package_name: "axios".to_string(),
1925 location: DependencyLocation::Dependencies,
1926 path: root.join("package.json"),
1927 line: 10,
1928 used_in_workspaces: Vec::new(),
1929 });
1930 let elapsed = Duration::from_millis(0);
1931 let output = build_json(&results, &root, elapsed).expect("should serialize");
1932
1933 let dep = &output["unused_dependencies"][0];
1934 assert_eq!(dep["package_name"], "axios");
1935 assert_eq!(dep["line"], 10);
1936 assert!(dep.get("used_in_workspaces").is_none());
1937 }
1938
1939 #[test]
1940 fn json_unused_dependency_includes_cross_workspace_context() {
1941 let root = PathBuf::from("/project");
1942 let mut results = AnalysisResults::default();
1943 results.unused_dependencies.push(UnusedDependency {
1944 package_name: "lodash-es".to_string(),
1945 location: DependencyLocation::Dependencies,
1946 path: root.join("packages/shared/package.json"),
1947 line: 6,
1948 used_in_workspaces: vec![root.join("packages/consumer")],
1949 });
1950 let elapsed = Duration::from_millis(0);
1951 let output = build_json(&results, &root, elapsed).expect("should serialize");
1952
1953 let dep = &output["unused_dependencies"][0];
1954 assert_eq!(
1955 dep["used_in_workspaces"],
1956 serde_json::json!(["packages/consumer"])
1957 );
1958 }
1959
1960 #[test]
1961 fn json_unused_dev_dependency_contains_expected_fields() {
1962 let root = PathBuf::from("/project");
1963 let mut results = AnalysisResults::default();
1964 results.unused_dev_dependencies.push(UnusedDependency {
1965 package_name: "vitest".to_string(),
1966 location: DependencyLocation::DevDependencies,
1967 path: root.join("package.json"),
1968 line: 15,
1969 used_in_workspaces: Vec::new(),
1970 });
1971 let elapsed = Duration::from_millis(0);
1972 let output = build_json(&results, &root, elapsed).expect("should serialize");
1973
1974 let dep = &output["unused_dev_dependencies"][0];
1975 assert_eq!(dep["package_name"], "vitest");
1976 }
1977
1978 #[test]
1979 fn json_unused_optional_dependency_contains_expected_fields() {
1980 let root = PathBuf::from("/project");
1981 let mut results = AnalysisResults::default();
1982 results.unused_optional_dependencies.push(UnusedDependency {
1983 package_name: "fsevents".to_string(),
1984 location: DependencyLocation::OptionalDependencies,
1985 path: root.join("package.json"),
1986 line: 12,
1987 used_in_workspaces: Vec::new(),
1988 });
1989 let elapsed = Duration::from_millis(0);
1990 let output = build_json(&results, &root, elapsed).expect("should serialize");
1991
1992 let dep = &output["unused_optional_dependencies"][0];
1993 assert_eq!(dep["package_name"], "fsevents");
1994 assert_eq!(output["total_issues"], 1);
1995 }
1996
1997 #[test]
1998 fn json_unused_enum_member_contains_expected_fields() {
1999 let root = PathBuf::from("/project");
2000 let mut results = AnalysisResults::default();
2001 results.unused_enum_members.push(UnusedMember {
2002 path: root.join("src/enums.ts"),
2003 parent_name: "Color".to_string(),
2004 member_name: "Purple".to_string(),
2005 kind: MemberKind::EnumMember,
2006 line: 5,
2007 col: 2,
2008 });
2009 let elapsed = Duration::from_millis(0);
2010 let output = build_json(&results, &root, elapsed).expect("should serialize");
2011
2012 let member = &output["unused_enum_members"][0];
2013 assert_eq!(member["parent_name"], "Color");
2014 assert_eq!(member["member_name"], "Purple");
2015 assert_eq!(member["line"], 5);
2016 assert_eq!(member["path"], "src/enums.ts");
2017 }
2018
2019 #[test]
2020 fn json_unused_class_member_contains_expected_fields() {
2021 let root = PathBuf::from("/project");
2022 let mut results = AnalysisResults::default();
2023 results.unused_class_members.push(UnusedMember {
2024 path: root.join("src/api.ts"),
2025 parent_name: "ApiClient".to_string(),
2026 member_name: "deprecatedFetch".to_string(),
2027 kind: MemberKind::ClassMethod,
2028 line: 100,
2029 col: 4,
2030 });
2031 let elapsed = Duration::from_millis(0);
2032 let output = build_json(&results, &root, elapsed).expect("should serialize");
2033
2034 let member = &output["unused_class_members"][0];
2035 assert_eq!(member["parent_name"], "ApiClient");
2036 assert_eq!(member["member_name"], "deprecatedFetch");
2037 assert_eq!(member["line"], 100);
2038 }
2039
2040 #[test]
2041 fn json_unresolved_import_contains_expected_fields() {
2042 let root = PathBuf::from("/project");
2043 let mut results = AnalysisResults::default();
2044 results.unresolved_imports.push(UnresolvedImport {
2045 path: root.join("src/app.ts"),
2046 specifier: "@acme/missing-pkg".to_string(),
2047 line: 7,
2048 col: 0,
2049 specifier_col: 0,
2050 });
2051 let elapsed = Duration::from_millis(0);
2052 let output = build_json(&results, &root, elapsed).expect("should serialize");
2053
2054 let import = &output["unresolved_imports"][0];
2055 assert_eq!(import["specifier"], "@acme/missing-pkg");
2056 assert_eq!(import["line"], 7);
2057 assert_eq!(import["path"], "src/app.ts");
2058 }
2059
2060 #[test]
2061 fn json_unlisted_dependency_contains_import_sites() {
2062 let root = PathBuf::from("/project");
2063 let mut results = AnalysisResults::default();
2064 results.unlisted_dependencies.push(UnlistedDependency {
2065 package_name: "dotenv".to_string(),
2066 imported_from: vec![
2067 ImportSite {
2068 path: root.join("src/config.ts"),
2069 line: 1,
2070 col: 0,
2071 },
2072 ImportSite {
2073 path: root.join("src/server.ts"),
2074 line: 3,
2075 col: 0,
2076 },
2077 ],
2078 });
2079 let elapsed = Duration::from_millis(0);
2080 let output = build_json(&results, &root, elapsed).expect("should serialize");
2081
2082 let dep = &output["unlisted_dependencies"][0];
2083 assert_eq!(dep["package_name"], "dotenv");
2084 let sites = dep["imported_from"].as_array().unwrap();
2085 assert_eq!(sites.len(), 2);
2086 assert_eq!(sites[0]["path"], "src/config.ts");
2087 assert_eq!(sites[1]["path"], "src/server.ts");
2088 }
2089
2090 #[test]
2091 fn json_duplicate_export_contains_locations() {
2092 let root = PathBuf::from("/project");
2093 let mut results = AnalysisResults::default();
2094 results.duplicate_exports.push(DuplicateExport {
2095 export_name: "Button".to_string(),
2096 locations: vec![
2097 DuplicateLocation {
2098 path: root.join("src/ui.ts"),
2099 line: 10,
2100 col: 0,
2101 },
2102 DuplicateLocation {
2103 path: root.join("src/components.ts"),
2104 line: 25,
2105 col: 0,
2106 },
2107 ],
2108 });
2109 let elapsed = Duration::from_millis(0);
2110 let output = build_json(&results, &root, elapsed).expect("should serialize");
2111
2112 let dup = &output["duplicate_exports"][0];
2113 assert_eq!(dup["export_name"], "Button");
2114 let locs = dup["locations"].as_array().unwrap();
2115 assert_eq!(locs.len(), 2);
2116 assert_eq!(locs[0]["line"], 10);
2117 assert_eq!(locs[1]["line"], 25);
2118 }
2119
2120 #[test]
2121 fn json_type_only_dependency_contains_expected_fields() {
2122 let root = PathBuf::from("/project");
2123 let mut results = AnalysisResults::default();
2124 results.type_only_dependencies.push(TypeOnlyDependency {
2125 package_name: "zod".to_string(),
2126 path: root.join("package.json"),
2127 line: 8,
2128 });
2129 let elapsed = Duration::from_millis(0);
2130 let output = build_json(&results, &root, elapsed).expect("should serialize");
2131
2132 let dep = &output["type_only_dependencies"][0];
2133 assert_eq!(dep["package_name"], "zod");
2134 assert_eq!(dep["line"], 8);
2135 }
2136
2137 #[test]
2138 fn json_circular_dependency_contains_expected_fields() {
2139 let root = PathBuf::from("/project");
2140 let mut results = AnalysisResults::default();
2141 results.circular_dependencies.push(CircularDependency {
2142 files: vec![
2143 root.join("src/a.ts"),
2144 root.join("src/b.ts"),
2145 root.join("src/c.ts"),
2146 ],
2147 length: 3,
2148 line: 5,
2149 col: 0,
2150 is_cross_package: false,
2151 });
2152 let elapsed = Duration::from_millis(0);
2153 let output = build_json(&results, &root, elapsed).expect("should serialize");
2154
2155 let cycle = &output["circular_dependencies"][0];
2156 assert_eq!(cycle["length"], 3);
2157 assert_eq!(cycle["line"], 5);
2158 let files = cycle["files"].as_array().unwrap();
2159 assert_eq!(files.len(), 3);
2160 }
2161
2162 #[test]
2165 fn json_re_export_flagged_correctly() {
2166 let root = PathBuf::from("/project");
2167 let mut results = AnalysisResults::default();
2168 results.unused_exports.push(UnusedExport {
2169 path: root.join("src/index.ts"),
2170 export_name: "reExported".to_string(),
2171 is_type_only: false,
2172 line: 1,
2173 col: 0,
2174 span_start: 0,
2175 is_re_export: true,
2176 });
2177 let elapsed = Duration::from_millis(0);
2178 let output = build_json(&results, &root, elapsed).expect("should serialize");
2179
2180 assert_eq!(output["unused_exports"][0]["is_re_export"], true);
2181 }
2182
2183 #[test]
2186 fn json_schema_version_is_4() {
2187 let root = PathBuf::from("/project");
2188 let results = AnalysisResults::default();
2189 let elapsed = Duration::from_millis(0);
2190 let output = build_json(&results, &root, elapsed).expect("should serialize");
2191
2192 assert_eq!(output["schema_version"], SCHEMA_VERSION);
2193 assert_eq!(output["schema_version"], 4);
2194 }
2195
2196 #[test]
2199 fn json_version_matches_cargo_pkg_version() {
2200 let root = PathBuf::from("/project");
2201 let results = AnalysisResults::default();
2202 let elapsed = Duration::from_millis(0);
2203 let output = build_json(&results, &root, elapsed).expect("should serialize");
2204
2205 assert_eq!(output["version"], env!("CARGO_PKG_VERSION"));
2206 }
2207
2208 #[test]
2211 fn json_elapsed_ms_zero_duration() {
2212 let root = PathBuf::from("/project");
2213 let results = AnalysisResults::default();
2214 let output = build_json(&results, &root, Duration::ZERO).expect("should serialize");
2215
2216 assert_eq!(output["elapsed_ms"], 0);
2217 }
2218
2219 #[test]
2220 fn json_elapsed_ms_large_duration() {
2221 let root = PathBuf::from("/project");
2222 let results = AnalysisResults::default();
2223 let elapsed = Duration::from_mins(2);
2224 let output = build_json(&results, &root, elapsed).expect("should serialize");
2225
2226 assert_eq!(output["elapsed_ms"], 120_000);
2227 }
2228
2229 #[test]
2230 fn json_elapsed_ms_sub_millisecond_truncated() {
2231 let root = PathBuf::from("/project");
2232 let results = AnalysisResults::default();
2233 let elapsed = Duration::from_micros(500);
2235 let output = build_json(&results, &root, elapsed).expect("should serialize");
2236
2237 assert_eq!(output["elapsed_ms"], 0);
2238 }
2239
2240 #[test]
2243 fn json_multiple_unused_files() {
2244 let root = PathBuf::from("/project");
2245 let mut results = AnalysisResults::default();
2246 results.unused_files.push(UnusedFile {
2247 path: root.join("src/a.ts"),
2248 });
2249 results.unused_files.push(UnusedFile {
2250 path: root.join("src/b.ts"),
2251 });
2252 results.unused_files.push(UnusedFile {
2253 path: root.join("src/c.ts"),
2254 });
2255 let elapsed = Duration::from_millis(0);
2256 let output = build_json(&results, &root, elapsed).expect("should serialize");
2257
2258 assert_eq!(output["unused_files"].as_array().unwrap().len(), 3);
2259 assert_eq!(output["total_issues"], 3);
2260 }
2261
2262 #[test]
2265 fn strip_root_prefix_on_string_value() {
2266 let mut value = serde_json::json!("/project/src/file.ts");
2267 strip_root_prefix(&mut value, "/project/");
2268 assert_eq!(value, "src/file.ts");
2269 }
2270
2271 #[test]
2272 fn strip_root_prefix_leaves_non_matching_string() {
2273 let mut value = serde_json::json!("/other/src/file.ts");
2274 strip_root_prefix(&mut value, "/project/");
2275 assert_eq!(value, "/other/src/file.ts");
2276 }
2277
2278 #[test]
2279 fn strip_root_prefix_recurses_into_arrays() {
2280 let mut value = serde_json::json!(["/project/a.ts", "/project/b.ts", "/other/c.ts"]);
2281 strip_root_prefix(&mut value, "/project/");
2282 assert_eq!(value[0], "a.ts");
2283 assert_eq!(value[1], "b.ts");
2284 assert_eq!(value[2], "/other/c.ts");
2285 }
2286
2287 #[test]
2288 fn strip_root_prefix_recurses_into_nested_objects() {
2289 let mut value = serde_json::json!({
2290 "outer": {
2291 "path": "/project/src/nested.ts"
2292 }
2293 });
2294 strip_root_prefix(&mut value, "/project/");
2295 assert_eq!(value["outer"]["path"], "src/nested.ts");
2296 }
2297
2298 #[test]
2299 fn strip_root_prefix_leaves_numbers_and_booleans() {
2300 let mut value = serde_json::json!({
2301 "line": 42,
2302 "is_type_only": false,
2303 "path": "/project/src/file.ts"
2304 });
2305 strip_root_prefix(&mut value, "/project/");
2306 assert_eq!(value["line"], 42);
2307 assert_eq!(value["is_type_only"], false);
2308 assert_eq!(value["path"], "src/file.ts");
2309 }
2310
2311 #[test]
2312 fn strip_root_prefix_normalizes_windows_separators() {
2313 let mut value = serde_json::json!(r"/project\src\file.ts");
2314 strip_root_prefix(&mut value, "/project/");
2315 assert_eq!(value, "src/file.ts");
2316 }
2317
2318 #[test]
2319 fn strip_root_prefix_handles_empty_string_after_strip() {
2320 let mut value = serde_json::json!("/project/");
2323 strip_root_prefix(&mut value, "/project/");
2324 assert_eq!(value, "");
2325 }
2326
2327 #[test]
2328 fn strip_root_prefix_deeply_nested_array_of_objects() {
2329 let mut value = serde_json::json!({
2330 "groups": [{
2331 "instances": [{
2332 "file": "/project/src/a.ts"
2333 }, {
2334 "file": "/project/src/b.ts"
2335 }]
2336 }]
2337 });
2338 strip_root_prefix(&mut value, "/project/");
2339 assert_eq!(value["groups"][0]["instances"][0]["file"], "src/a.ts");
2340 assert_eq!(value["groups"][0]["instances"][1]["file"], "src/b.ts");
2341 }
2342
2343 #[test]
2346 fn json_full_sample_results_total_issues_correct() {
2347 let root = PathBuf::from("/project");
2348 let results = sample_results(&root);
2349 let elapsed = Duration::from_millis(100);
2350 let output = build_json(&results, &root, elapsed).expect("should serialize");
2351
2352 assert_eq!(output["total_issues"], results.total_issues());
2358 }
2359
2360 #[test]
2361 fn json_full_sample_no_absolute_paths_in_output() {
2362 let root = PathBuf::from("/project");
2363 let results = sample_results(&root);
2364 let elapsed = Duration::from_millis(0);
2365 let output = build_json(&results, &root, elapsed).expect("should serialize");
2366
2367 let json_str = serde_json::to_string(&output).expect("should stringify");
2368 assert!(!json_str.contains("/project/src/"));
2370 assert!(!json_str.contains("/project/package.json"));
2371 }
2372
2373 #[test]
2376 fn json_output_is_deterministic() {
2377 let root = PathBuf::from("/project");
2378 let results = sample_results(&root);
2379 let elapsed = Duration::from_millis(50);
2380
2381 let output1 = build_json(&results, &root, elapsed).expect("first build");
2382 let output2 = build_json(&results, &root, elapsed).expect("second build");
2383
2384 assert_eq!(output1, output2);
2385 }
2386
2387 #[test]
2390 fn json_results_fields_do_not_shadow_metadata() {
2391 let root = PathBuf::from("/project");
2394 let results = AnalysisResults::default();
2395 let elapsed = Duration::from_millis(99);
2396 let output = build_json(&results, &root, elapsed).expect("should serialize");
2397
2398 assert_eq!(output["schema_version"], 4);
2400 assert_eq!(output["elapsed_ms"], 99);
2401 }
2402
2403 #[test]
2406 fn json_all_issue_type_arrays_present_in_empty_results() {
2407 let root = PathBuf::from("/project");
2408 let results = AnalysisResults::default();
2409 let elapsed = Duration::from_millis(0);
2410 let output = build_json(&results, &root, elapsed).expect("should serialize");
2411
2412 let expected_arrays = [
2413 "unused_files",
2414 "unused_exports",
2415 "unused_types",
2416 "unused_dependencies",
2417 "unused_dev_dependencies",
2418 "unused_optional_dependencies",
2419 "unused_enum_members",
2420 "unused_class_members",
2421 "unresolved_imports",
2422 "unlisted_dependencies",
2423 "duplicate_exports",
2424 "type_only_dependencies",
2425 "test_only_dependencies",
2426 "circular_dependencies",
2427 ];
2428 for key in &expected_arrays {
2429 assert!(
2430 output[key].is_array(),
2431 "expected '{key}' to be an array in JSON output"
2432 );
2433 }
2434 }
2435
2436 #[test]
2439 fn insert_meta_adds_key_to_object() {
2440 let mut output = serde_json::json!({ "foo": 1 });
2441 let meta = serde_json::json!({ "docs": "https://example.com" });
2442 insert_meta(&mut output, meta.clone());
2443 assert_eq!(output["_meta"], meta);
2444 }
2445
2446 #[test]
2447 fn insert_meta_noop_on_non_object() {
2448 let mut output = serde_json::json!([1, 2, 3]);
2449 let meta = serde_json::json!({ "docs": "https://example.com" });
2450 insert_meta(&mut output, meta);
2451 assert!(output.is_array());
2453 }
2454
2455 #[test]
2456 fn insert_meta_overwrites_existing_meta() {
2457 let mut output = serde_json::json!({ "_meta": "old" });
2458 let meta = serde_json::json!({ "new": true });
2459 insert_meta(&mut output, meta.clone());
2460 assert_eq!(output["_meta"], meta);
2461 }
2462
2463 #[test]
2466 fn build_json_envelope_has_metadata_fields() {
2467 let report = serde_json::json!({ "findings": [] });
2468 let elapsed = Duration::from_millis(42);
2469 let output = build_json_envelope(report, elapsed);
2470
2471 assert_eq!(output["schema_version"], 4);
2472 assert!(output["version"].is_string());
2473 assert_eq!(output["elapsed_ms"], 42);
2474 assert!(output["findings"].is_array());
2475 }
2476
2477 #[test]
2478 fn build_json_envelope_metadata_appears_first() {
2479 let report = serde_json::json!({ "data": "value" });
2480 let output = build_json_envelope(report, Duration::from_millis(10));
2481
2482 let keys: Vec<&String> = output.as_object().unwrap().keys().collect();
2483 assert_eq!(keys[0], "schema_version");
2484 assert_eq!(keys[1], "version");
2485 assert_eq!(keys[2], "elapsed_ms");
2486 }
2487
2488 #[test]
2489 fn build_json_envelope_non_object_report() {
2490 let report = serde_json::json!("not an object");
2492 let output = build_json_envelope(report, Duration::from_millis(0));
2493
2494 let obj = output.as_object().unwrap();
2495 assert_eq!(obj.len(), 3);
2496 assert!(obj.contains_key("schema_version"));
2497 assert!(obj.contains_key("version"));
2498 assert!(obj.contains_key("elapsed_ms"));
2499 }
2500
2501 #[test]
2504 fn strip_root_prefix_null_unchanged() {
2505 let mut value = serde_json::Value::Null;
2506 strip_root_prefix(&mut value, "/project/");
2507 assert!(value.is_null());
2508 }
2509
2510 #[test]
2513 fn strip_root_prefix_empty_string() {
2514 let mut value = serde_json::json!("");
2515 strip_root_prefix(&mut value, "/project/");
2516 assert_eq!(value, "");
2517 }
2518
2519 #[test]
2522 fn strip_root_prefix_mixed_types() {
2523 let mut value = serde_json::json!({
2524 "path": "/project/src/file.ts",
2525 "line": 42,
2526 "flag": true,
2527 "nested": {
2528 "items": ["/project/a.ts", 99, null, "/project/b.ts"],
2529 "deep": { "path": "/project/c.ts" }
2530 }
2531 });
2532 strip_root_prefix(&mut value, "/project/");
2533 assert_eq!(value["path"], "src/file.ts");
2534 assert_eq!(value["line"], 42);
2535 assert_eq!(value["flag"], true);
2536 assert_eq!(value["nested"]["items"][0], "a.ts");
2537 assert_eq!(value["nested"]["items"][1], 99);
2538 assert!(value["nested"]["items"][2].is_null());
2539 assert_eq!(value["nested"]["items"][3], "b.ts");
2540 assert_eq!(value["nested"]["deep"]["path"], "c.ts");
2541 }
2542
2543 #[test]
2546 fn json_check_meta_integrates_correctly() {
2547 let root = PathBuf::from("/project");
2548 let results = AnalysisResults::default();
2549 let elapsed = Duration::from_millis(0);
2550 let mut output = build_json(&results, &root, elapsed).expect("should serialize");
2551 insert_meta(&mut output, crate::explain::check_meta());
2552
2553 assert!(output["_meta"]["docs"].is_string());
2554 assert!(output["_meta"]["rules"].is_object());
2555 }
2556
2557 #[test]
2560 fn json_unused_member_kind_serialized() {
2561 let root = PathBuf::from("/project");
2562 let mut results = AnalysisResults::default();
2563 results.unused_enum_members.push(UnusedMember {
2564 path: root.join("src/enums.ts"),
2565 parent_name: "Color".to_string(),
2566 member_name: "Red".to_string(),
2567 kind: MemberKind::EnumMember,
2568 line: 3,
2569 col: 2,
2570 });
2571 results.unused_class_members.push(UnusedMember {
2572 path: root.join("src/class.ts"),
2573 parent_name: "Foo".to_string(),
2574 member_name: "bar".to_string(),
2575 kind: MemberKind::ClassMethod,
2576 line: 10,
2577 col: 4,
2578 });
2579
2580 let elapsed = Duration::from_millis(0);
2581 let output = build_json(&results, &root, elapsed).expect("should serialize");
2582
2583 let enum_member = &output["unused_enum_members"][0];
2584 assert!(enum_member["kind"].is_string());
2585 let class_member = &output["unused_class_members"][0];
2586 assert!(class_member["kind"].is_string());
2587 }
2588
2589 #[test]
2592 fn json_unused_export_has_actions() {
2593 let root = PathBuf::from("/project");
2594 let mut results = AnalysisResults::default();
2595 results.unused_exports.push(UnusedExport {
2596 path: root.join("src/utils.ts"),
2597 export_name: "helperFn".to_string(),
2598 is_type_only: false,
2599 line: 10,
2600 col: 4,
2601 span_start: 120,
2602 is_re_export: false,
2603 });
2604 let output = build_json(&results, &root, Duration::ZERO).unwrap();
2605
2606 let actions = output["unused_exports"][0]["actions"].as_array().unwrap();
2607 assert_eq!(actions.len(), 2);
2608
2609 assert_eq!(actions[0]["type"], "remove-export");
2611 assert_eq!(actions[0]["auto_fixable"], true);
2612 assert!(actions[0].get("note").is_none());
2613
2614 assert_eq!(actions[1]["type"], "suppress-line");
2616 assert_eq!(
2617 actions[1]["comment"],
2618 "// fallow-ignore-next-line unused-export"
2619 );
2620 }
2621
2622 #[test]
2623 fn json_unused_file_has_file_suppress_and_note() {
2624 let root = PathBuf::from("/project");
2625 let mut results = AnalysisResults::default();
2626 results.unused_files.push(UnusedFile {
2627 path: root.join("src/dead.ts"),
2628 });
2629 let output = build_json(&results, &root, Duration::ZERO).unwrap();
2630
2631 let actions = output["unused_files"][0]["actions"].as_array().unwrap();
2632 assert_eq!(actions[0]["type"], "delete-file");
2633 assert_eq!(actions[0]["auto_fixable"], false);
2634 assert!(actions[0]["note"].is_string());
2635 assert_eq!(actions[1]["type"], "suppress-file");
2636 assert_eq!(actions[1]["comment"], "// fallow-ignore-file unused-file");
2637 }
2638
2639 #[test]
2640 fn json_unused_dependency_has_config_suppress_with_package_name() {
2641 let root = PathBuf::from("/project");
2642 let mut results = AnalysisResults::default();
2643 results.unused_dependencies.push(UnusedDependency {
2644 package_name: "lodash".to_string(),
2645 location: DependencyLocation::Dependencies,
2646 path: root.join("package.json"),
2647 line: 5,
2648 used_in_workspaces: Vec::new(),
2649 });
2650 let output = build_json(&results, &root, Duration::ZERO).unwrap();
2651
2652 let actions = output["unused_dependencies"][0]["actions"]
2653 .as_array()
2654 .unwrap();
2655 assert_eq!(actions[0]["type"], "remove-dependency");
2656 assert_eq!(actions[0]["auto_fixable"], true);
2657
2658 assert_eq!(actions[1]["type"], "add-to-config");
2660 assert_eq!(actions[1]["config_key"], "ignoreDependencies");
2661 assert_eq!(actions[1]["value"], "lodash");
2662 }
2663
2664 #[test]
2665 fn json_cross_workspace_dependency_is_not_auto_fixable() {
2666 let root = PathBuf::from("/project");
2667 let mut results = AnalysisResults::default();
2668 results.unused_dependencies.push(UnusedDependency {
2669 package_name: "lodash-es".to_string(),
2670 location: DependencyLocation::Dependencies,
2671 path: root.join("packages/shared/package.json"),
2672 line: 5,
2673 used_in_workspaces: vec![root.join("packages/consumer")],
2674 });
2675 let output = build_json(&results, &root, Duration::ZERO).unwrap();
2676
2677 let actions = output["unused_dependencies"][0]["actions"]
2678 .as_array()
2679 .unwrap();
2680 assert_eq!(actions[0]["type"], "move-dependency");
2681 assert_eq!(actions[0]["auto_fixable"], false);
2682 assert!(
2683 actions[0]["note"]
2684 .as_str()
2685 .unwrap()
2686 .contains("will not remove")
2687 );
2688 assert_eq!(actions[1]["type"], "add-to-config");
2689 }
2690
2691 #[test]
2692 fn json_empty_results_have_no_actions_in_empty_arrays() {
2693 let root = PathBuf::from("/project");
2694 let results = AnalysisResults::default();
2695 let output = build_json(&results, &root, Duration::ZERO).unwrap();
2696
2697 assert!(output["unused_exports"].as_array().unwrap().is_empty());
2699 assert!(output["unused_files"].as_array().unwrap().is_empty());
2700 }
2701
2702 #[test]
2703 fn json_all_issue_types_have_actions() {
2704 let root = PathBuf::from("/project");
2705 let results = sample_results(&root);
2706 let output = build_json(&results, &root, Duration::ZERO).unwrap();
2707
2708 let issue_keys = [
2709 "unused_files",
2710 "unused_exports",
2711 "unused_types",
2712 "unused_dependencies",
2713 "unused_dev_dependencies",
2714 "unused_optional_dependencies",
2715 "unused_enum_members",
2716 "unused_class_members",
2717 "unresolved_imports",
2718 "unlisted_dependencies",
2719 "duplicate_exports",
2720 "type_only_dependencies",
2721 "test_only_dependencies",
2722 "circular_dependencies",
2723 ];
2724
2725 for key in &issue_keys {
2726 let arr = output[key].as_array().unwrap();
2727 if !arr.is_empty() {
2728 let actions = arr[0]["actions"].as_array();
2729 assert!(
2730 actions.is_some() && !actions.unwrap().is_empty(),
2731 "missing actions for {key}"
2732 );
2733 }
2734 }
2735 }
2736
2737 #[test]
2740 fn health_finding_has_actions() {
2741 let mut output = serde_json::json!({
2742 "findings": [{
2743 "path": "src/utils.ts",
2744 "name": "processData",
2745 "line": 10,
2746 "col": 0,
2747 "cyclomatic": 25,
2748 "cognitive": 30,
2749 "line_count": 150,
2750 "exceeded": "both"
2751 }]
2752 });
2753
2754 inject_health_actions(&mut output, HealthActionOptions::default());
2755
2756 let actions = output["findings"][0]["actions"].as_array().unwrap();
2757 assert_eq!(actions.len(), 2);
2758 assert_eq!(actions[0]["type"], "refactor-function");
2759 assert_eq!(actions[0]["auto_fixable"], false);
2760 assert!(
2761 actions[0]["description"]
2762 .as_str()
2763 .unwrap()
2764 .contains("processData")
2765 );
2766 assert_eq!(actions[1]["type"], "suppress-line");
2767 assert_eq!(
2768 actions[1]["comment"],
2769 "// fallow-ignore-next-line complexity"
2770 );
2771 }
2772
2773 #[test]
2774 fn refactoring_target_has_actions() {
2775 let mut output = serde_json::json!({
2776 "targets": [{
2777 "path": "src/big-module.ts",
2778 "priority": 85.0,
2779 "efficiency": 42.5,
2780 "recommendation": "Split module: 12 exports, 4 unused",
2781 "category": "split_high_impact",
2782 "effort": "medium",
2783 "confidence": "high",
2784 "evidence": { "unused_exports": 4 }
2785 }]
2786 });
2787
2788 inject_health_actions(&mut output, HealthActionOptions::default());
2789
2790 let actions = output["targets"][0]["actions"].as_array().unwrap();
2791 assert_eq!(actions.len(), 2);
2792 assert_eq!(actions[0]["type"], "apply-refactoring");
2793 assert_eq!(
2794 actions[0]["description"],
2795 "Split module: 12 exports, 4 unused"
2796 );
2797 assert_eq!(actions[0]["category"], "split_high_impact");
2798 assert_eq!(actions[1]["type"], "suppress-line");
2800 }
2801
2802 #[test]
2803 fn refactoring_target_without_evidence_has_no_suppress() {
2804 let mut output = serde_json::json!({
2805 "targets": [{
2806 "path": "src/simple.ts",
2807 "priority": 30.0,
2808 "efficiency": 15.0,
2809 "recommendation": "Consider extracting helper functions",
2810 "category": "extract_complex_functions",
2811 "effort": "small",
2812 "confidence": "medium"
2813 }]
2814 });
2815
2816 inject_health_actions(&mut output, HealthActionOptions::default());
2817
2818 let actions = output["targets"][0]["actions"].as_array().unwrap();
2819 assert_eq!(actions.len(), 1);
2820 assert_eq!(actions[0]["type"], "apply-refactoring");
2821 }
2822
2823 #[test]
2824 fn health_empty_findings_no_actions() {
2825 let mut output = serde_json::json!({
2826 "findings": [],
2827 "targets": []
2828 });
2829
2830 inject_health_actions(&mut output, HealthActionOptions::default());
2831
2832 assert!(output["findings"].as_array().unwrap().is_empty());
2833 assert!(output["targets"].as_array().unwrap().is_empty());
2834 }
2835
2836 #[test]
2837 fn hotspot_has_actions() {
2838 let mut output = serde_json::json!({
2839 "hotspots": [{
2840 "path": "src/utils.ts",
2841 "complexity_score": 45.0,
2842 "churn_score": 12,
2843 "hotspot_score": 540.0
2844 }]
2845 });
2846
2847 inject_health_actions(&mut output, HealthActionOptions::default());
2848
2849 let actions = output["hotspots"][0]["actions"].as_array().unwrap();
2850 assert_eq!(actions.len(), 2);
2851 assert_eq!(actions[0]["type"], "refactor-file");
2852 assert!(
2853 actions[0]["description"]
2854 .as_str()
2855 .unwrap()
2856 .contains("src/utils.ts")
2857 );
2858 assert_eq!(actions[1]["type"], "add-tests");
2859 }
2860
2861 #[test]
2862 fn hotspot_low_bus_factor_emits_action() {
2863 let mut output = serde_json::json!({
2864 "hotspots": [{
2865 "path": "src/api.ts",
2866 "ownership": {
2867 "bus_factor": 1,
2868 "contributor_count": 1,
2869 "top_contributor": {"identifier": "alice@x", "share": 1.0, "stale_days": 5, "commits": 30},
2870 "unowned": null,
2871 "drift": false,
2872 }
2873 }]
2874 });
2875
2876 inject_health_actions(&mut output, HealthActionOptions::default());
2877
2878 let actions = output["hotspots"][0]["actions"].as_array().unwrap();
2879 assert!(
2880 actions
2881 .iter()
2882 .filter_map(|a| a["type"].as_str())
2883 .any(|t| t == "low-bus-factor"),
2884 "low-bus-factor action should be present",
2885 );
2886 let bus = actions
2887 .iter()
2888 .find(|a| a["type"] == "low-bus-factor")
2889 .unwrap();
2890 assert!(bus["description"].as_str().unwrap().contains("alice@x"));
2891 }
2892
2893 #[test]
2894 fn hotspot_unowned_emits_action_with_pattern() {
2895 let mut output = serde_json::json!({
2896 "hotspots": [{
2897 "path": "src/api/users.ts",
2898 "ownership": {
2899 "bus_factor": 2,
2900 "contributor_count": 4,
2901 "top_contributor": {"identifier": "alice@x", "share": 0.5, "stale_days": 5, "commits": 10},
2902 "unowned": true,
2903 "drift": false,
2904 }
2905 }]
2906 });
2907
2908 inject_health_actions(&mut output, HealthActionOptions::default());
2909
2910 let actions = output["hotspots"][0]["actions"].as_array().unwrap();
2911 let unowned = actions
2912 .iter()
2913 .find(|a| a["type"] == "unowned-hotspot")
2914 .expect("unowned-hotspot action should be present");
2915 assert_eq!(unowned["suggested_pattern"], "/src/api/");
2918 assert_eq!(unowned["heuristic"], "directory-deepest");
2919 }
2920
2921 #[test]
2922 fn hotspot_unowned_skipped_when_codeowners_missing() {
2923 let mut output = serde_json::json!({
2924 "hotspots": [{
2925 "path": "src/api.ts",
2926 "ownership": {
2927 "bus_factor": 2,
2928 "contributor_count": 4,
2929 "top_contributor": {"identifier": "alice@x", "share": 0.5, "stale_days": 5, "commits": 10},
2930 "unowned": null,
2931 "drift": false,
2932 }
2933 }]
2934 });
2935
2936 inject_health_actions(&mut output, HealthActionOptions::default());
2937
2938 let actions = output["hotspots"][0]["actions"].as_array().unwrap();
2939 assert!(
2940 !actions.iter().any(|a| a["type"] == "unowned-hotspot"),
2941 "unowned action must not fire when CODEOWNERS file is absent"
2942 );
2943 }
2944
2945 #[test]
2946 fn hotspot_drift_emits_action() {
2947 let mut output = serde_json::json!({
2948 "hotspots": [{
2949 "path": "src/old.ts",
2950 "ownership": {
2951 "bus_factor": 1,
2952 "contributor_count": 2,
2953 "top_contributor": {"identifier": "bob@x", "share": 0.9, "stale_days": 1, "commits": 18},
2954 "unowned": null,
2955 "drift": true,
2956 "drift_reason": "original author alice@x has 5% share",
2957 }
2958 }]
2959 });
2960
2961 inject_health_actions(&mut output, HealthActionOptions::default());
2962
2963 let actions = output["hotspots"][0]["actions"].as_array().unwrap();
2964 let drift = actions
2965 .iter()
2966 .find(|a| a["type"] == "ownership-drift")
2967 .expect("ownership-drift action should be present");
2968 assert!(drift["description"].as_str().unwrap().contains("alice@x"));
2969 }
2970
2971 #[test]
2974 fn codeowners_pattern_uses_deepest_directory() {
2975 assert_eq!(
2978 suggest_codeowners_pattern("src/api/users/handlers.ts"),
2979 "/src/api/users/"
2980 );
2981 }
2982
2983 #[test]
2984 fn codeowners_pattern_for_root_file() {
2985 assert_eq!(suggest_codeowners_pattern("README.md"), "/README.md");
2986 }
2987
2988 #[test]
2989 fn codeowners_pattern_normalizes_backslashes() {
2990 assert_eq!(
2991 suggest_codeowners_pattern("src\\api\\users.ts"),
2992 "/src/api/"
2993 );
2994 }
2995
2996 #[test]
2997 fn codeowners_pattern_two_level_path() {
2998 assert_eq!(suggest_codeowners_pattern("src/foo.ts"), "/src/");
2999 }
3000
3001 #[test]
3002 fn health_finding_suppress_has_placement() {
3003 let mut output = serde_json::json!({
3004 "findings": [{
3005 "path": "src/utils.ts",
3006 "name": "processData",
3007 "line": 10,
3008 "col": 0,
3009 "cyclomatic": 25,
3010 "cognitive": 30,
3011 "line_count": 150,
3012 "exceeded": "both"
3013 }]
3014 });
3015
3016 inject_health_actions(&mut output, HealthActionOptions::default());
3017
3018 let suppress = &output["findings"][0]["actions"][1];
3019 assert_eq!(suppress["placement"], "above-function-declaration");
3020 }
3021
3022 #[test]
3023 fn html_template_health_finding_uses_html_suppression() {
3024 let mut output = serde_json::json!({
3025 "findings": [{
3026 "path": "src/app.component.html",
3027 "name": "<template>",
3028 "line": 1,
3029 "col": 0,
3030 "cyclomatic": 25,
3031 "cognitive": 30,
3032 "line_count": 40,
3033 "exceeded": "both"
3034 }]
3035 });
3036
3037 inject_health_actions(&mut output, HealthActionOptions::default());
3038
3039 let suppress = &output["findings"][0]["actions"][1];
3040 assert_eq!(suppress["type"], "suppress-file");
3041 assert_eq!(
3042 suppress["comment"],
3043 "<!-- fallow-ignore-file complexity -->"
3044 );
3045 assert_eq!(suppress["placement"], "top-of-template");
3046 }
3047
3048 #[test]
3049 fn inline_template_health_finding_uses_decorator_suppression() {
3050 let mut output = serde_json::json!({
3051 "findings": [{
3052 "path": "src/app.component.ts",
3053 "name": "<template>",
3054 "line": 5,
3055 "col": 0,
3056 "cyclomatic": 25,
3057 "cognitive": 30,
3058 "line_count": 40,
3059 "exceeded": "both"
3060 }]
3061 });
3062
3063 inject_health_actions(&mut output, HealthActionOptions::default());
3064
3065 let refactor = &output["findings"][0]["actions"][0];
3066 assert_eq!(refactor["type"], "refactor-function");
3067 assert!(
3068 refactor["description"]
3069 .as_str()
3070 .unwrap()
3071 .contains("template complexity")
3072 );
3073 let suppress = &output["findings"][0]["actions"][1];
3074 assert_eq!(suppress["type"], "suppress-line");
3075 assert_eq!(
3076 suppress["description"],
3077 "Suppress with an inline comment above the Angular decorator"
3078 );
3079 assert_eq!(suppress["placement"], "above-angular-decorator");
3080 }
3081
3082 #[test]
3085 fn clone_family_has_actions() {
3086 let mut output = serde_json::json!({
3087 "clone_families": [{
3088 "files": ["src/a.ts", "src/b.ts"],
3089 "groups": [
3090 { "instances": [{"file": "src/a.ts"}, {"file": "src/b.ts"}], "token_count": 100, "line_count": 20 }
3091 ],
3092 "total_duplicated_lines": 20,
3093 "total_duplicated_tokens": 100,
3094 "suggestions": [
3095 { "kind": "ExtractFunction", "description": "Extract shared validation logic", "estimated_savings": 15 }
3096 ]
3097 }]
3098 });
3099
3100 inject_dupes_actions(&mut output);
3101
3102 let actions = output["clone_families"][0]["actions"].as_array().unwrap();
3103 assert_eq!(actions.len(), 3);
3104 assert_eq!(actions[0]["type"], "extract-shared");
3105 assert_eq!(actions[0]["auto_fixable"], false);
3106 assert!(
3107 actions[0]["description"]
3108 .as_str()
3109 .unwrap()
3110 .contains("20 lines")
3111 );
3112 assert_eq!(actions[1]["type"], "apply-suggestion");
3114 assert!(
3115 actions[1]["description"]
3116 .as_str()
3117 .unwrap()
3118 .contains("validation logic")
3119 );
3120 assert_eq!(actions[2]["type"], "suppress-line");
3122 assert_eq!(
3123 actions[2]["comment"],
3124 "// fallow-ignore-next-line code-duplication"
3125 );
3126 }
3127
3128 #[test]
3129 fn clone_group_has_actions() {
3130 let mut output = serde_json::json!({
3131 "clone_groups": [{
3132 "instances": [
3133 {"file": "src/a.ts", "start_line": 1, "end_line": 10},
3134 {"file": "src/b.ts", "start_line": 5, "end_line": 14}
3135 ],
3136 "token_count": 50,
3137 "line_count": 10
3138 }]
3139 });
3140
3141 inject_dupes_actions(&mut output);
3142
3143 let actions = output["clone_groups"][0]["actions"].as_array().unwrap();
3144 assert_eq!(actions.len(), 2);
3145 assert_eq!(actions[0]["type"], "extract-shared");
3146 assert!(
3147 actions[0]["description"]
3148 .as_str()
3149 .unwrap()
3150 .contains("10 lines")
3151 );
3152 assert!(
3153 actions[0]["description"]
3154 .as_str()
3155 .unwrap()
3156 .contains("2 instances")
3157 );
3158 assert_eq!(actions[1]["type"], "suppress-line");
3159 }
3160
3161 #[test]
3162 fn dupes_empty_results_no_actions() {
3163 let mut output = serde_json::json!({
3164 "clone_families": [],
3165 "clone_groups": []
3166 });
3167
3168 inject_dupes_actions(&mut output);
3169
3170 assert!(output["clone_families"].as_array().unwrap().is_empty());
3171 assert!(output["clone_groups"].as_array().unwrap().is_empty());
3172 }
3173
3174 fn crap_only_finding_envelope(
3183 coverage_tier: Option<&str>,
3184 cyclomatic: u16,
3185 max_cyclomatic_threshold: u16,
3186 ) -> serde_json::Value {
3187 crap_only_finding_envelope_with_max_crap(
3188 coverage_tier,
3189 cyclomatic,
3190 12,
3191 max_cyclomatic_threshold,
3192 15,
3193 30.0,
3194 )
3195 }
3196
3197 fn crap_only_finding_envelope_with_cognitive(
3198 coverage_tier: Option<&str>,
3199 cyclomatic: u16,
3200 cognitive: u16,
3201 max_cyclomatic_threshold: u16,
3202 ) -> serde_json::Value {
3203 crap_only_finding_envelope_with_max_crap(
3204 coverage_tier,
3205 cyclomatic,
3206 cognitive,
3207 max_cyclomatic_threshold,
3208 15,
3209 30.0,
3210 )
3211 }
3212
3213 fn crap_only_finding_envelope_with_max_crap(
3214 coverage_tier: Option<&str>,
3215 cyclomatic: u16,
3216 cognitive: u16,
3217 max_cyclomatic_threshold: u16,
3218 max_cognitive_threshold: u16,
3219 max_crap_threshold: f64,
3220 ) -> serde_json::Value {
3221 let mut finding = serde_json::json!({
3222 "path": "src/risk.ts",
3223 "name": "computeScore",
3224 "line": 12,
3225 "col": 0,
3226 "cyclomatic": cyclomatic,
3227 "cognitive": cognitive,
3228 "line_count": 40,
3229 "exceeded": "crap",
3230 "crap": 35.5,
3231 });
3232 if let Some(tier) = coverage_tier {
3233 finding["coverage_tier"] = serde_json::Value::String(tier.to_owned());
3234 }
3235 serde_json::json!({
3236 "findings": [finding],
3237 "summary": {
3238 "max_cyclomatic_threshold": max_cyclomatic_threshold,
3239 "max_cognitive_threshold": max_cognitive_threshold,
3240 "max_crap_threshold": max_crap_threshold,
3241 },
3242 })
3243 }
3244
3245 #[test]
3246 fn crap_only_tier_none_emits_add_tests() {
3247 let mut output = crap_only_finding_envelope(Some("none"), 6, 20);
3248 inject_health_actions(&mut output, HealthActionOptions::default());
3249 let actions = output["findings"][0]["actions"].as_array().unwrap();
3250 assert!(
3251 actions.iter().any(|a| a["type"] == "add-tests"),
3252 "tier=none crap-only must emit add-tests, got {actions:?}"
3253 );
3254 assert!(
3255 !actions.iter().any(|a| a["type"] == "increase-coverage"),
3256 "tier=none must not emit increase-coverage"
3257 );
3258 }
3259
3260 #[test]
3261 fn crap_only_tier_partial_emits_increase_coverage() {
3262 let mut output = crap_only_finding_envelope(Some("partial"), 6, 20);
3263 inject_health_actions(&mut output, HealthActionOptions::default());
3264 let actions = output["findings"][0]["actions"].as_array().unwrap();
3265 assert!(
3266 actions.iter().any(|a| a["type"] == "increase-coverage"),
3267 "tier=partial crap-only must emit increase-coverage, got {actions:?}"
3268 );
3269 assert!(
3270 !actions.iter().any(|a| a["type"] == "add-tests"),
3271 "tier=partial must not emit add-tests"
3272 );
3273 }
3274
3275 #[test]
3276 fn crap_only_tier_high_emits_increase_coverage_when_full_coverage_can_clear_crap() {
3277 let mut output = crap_only_finding_envelope(Some("high"), 20, 30);
3281 inject_health_actions(&mut output, HealthActionOptions::default());
3282 let actions = output["findings"][0]["actions"].as_array().unwrap();
3283 assert!(
3284 actions.iter().any(|a| a["type"] == "increase-coverage"),
3285 "tier=high crap-only must still emit increase-coverage when full coverage can clear CRAP, got {actions:?}"
3286 );
3287 assert!(
3288 !actions.iter().any(|a| a["type"] == "refactor-function"),
3289 "coverage-remediable crap-only findings should not get refactor-function unless near the cyclomatic threshold"
3290 );
3291 assert!(
3292 !actions.iter().any(|a| a["type"] == "add-tests"),
3293 "tier=high must not emit add-tests"
3294 );
3295 }
3296
3297 #[test]
3298 fn crap_only_emits_refactor_when_full_coverage_cannot_clear_crap() {
3299 let mut output =
3303 crap_only_finding_envelope_with_max_crap(Some("high"), 35, 12, 50, 15, 30.0);
3304 inject_health_actions(&mut output, HealthActionOptions::default());
3305 let actions = output["findings"][0]["actions"].as_array().unwrap();
3306 assert!(
3307 actions.iter().any(|a| a["type"] == "refactor-function"),
3308 "full-coverage-impossible CRAP-only finding must emit refactor-function, got {actions:?}"
3309 );
3310 assert!(
3311 !actions.iter().any(|a| a["type"] == "increase-coverage"),
3312 "must not emit increase-coverage when even 100% coverage cannot clear CRAP"
3313 );
3314 assert!(
3315 !actions.iter().any(|a| a["type"] == "add-tests"),
3316 "must not emit add-tests when even 100% coverage cannot clear CRAP"
3317 );
3318 }
3319
3320 #[test]
3321 fn crap_only_high_cc_appends_secondary_refactor() {
3322 let mut output = crap_only_finding_envelope(Some("none"), 16, 20);
3325 inject_health_actions(&mut output, HealthActionOptions::default());
3326 let actions = output["findings"][0]["actions"].as_array().unwrap();
3327 assert!(
3328 actions.iter().any(|a| a["type"] == "add-tests"),
3329 "near-threshold crap-only still emits the primary tier action"
3330 );
3331 assert!(
3332 actions.iter().any(|a| a["type"] == "refactor-function"),
3333 "near-threshold crap-only must also emit secondary refactor-function"
3334 );
3335 }
3336
3337 #[test]
3338 fn crap_only_far_below_threshold_no_secondary_refactor() {
3339 let mut output = crap_only_finding_envelope(Some("none"), 6, 20);
3341 inject_health_actions(&mut output, HealthActionOptions::default());
3342 let actions = output["findings"][0]["actions"].as_array().unwrap();
3343 assert!(
3344 !actions.iter().any(|a| a["type"] == "refactor-function"),
3345 "low-CC crap-only should not get a secondary refactor-function"
3346 );
3347 }
3348
3349 #[test]
3350 fn crap_only_near_threshold_low_cognitive_no_secondary_refactor() {
3351 let mut output = crap_only_finding_envelope_with_cognitive(Some("none"), 17, 2, 20);
3360 inject_health_actions(&mut output, HealthActionOptions::default());
3361 let actions = output["findings"][0]["actions"].as_array().unwrap();
3362 assert!(
3363 actions.iter().any(|a| a["type"] == "add-tests"),
3364 "primary tier action still emits"
3365 );
3366 assert!(
3367 !actions.iter().any(|a| a["type"] == "refactor-function"),
3368 "near-threshold CC with cognitive below floor must NOT emit secondary refactor (got {actions:?})"
3369 );
3370 }
3371
3372 #[test]
3373 fn crap_only_near_threshold_high_cognitive_emits_secondary_refactor() {
3374 let mut output = crap_only_finding_envelope_with_cognitive(Some("none"), 16, 10, 20);
3380 inject_health_actions(&mut output, HealthActionOptions::default());
3381 let actions = output["findings"][0]["actions"].as_array().unwrap();
3382 assert!(
3383 actions.iter().any(|a| a["type"] == "add-tests"),
3384 "primary tier action still emits"
3385 );
3386 assert!(
3387 actions.iter().any(|a| a["type"] == "refactor-function"),
3388 "near-threshold CC with cognitive above floor must emit secondary refactor (got {actions:?})"
3389 );
3390 }
3391
3392 #[test]
3393 fn cyclomatic_only_emits_only_refactor_function() {
3394 let mut output = serde_json::json!({
3395 "findings": [{
3396 "path": "src/cyclo.ts",
3397 "name": "branchy",
3398 "line": 5,
3399 "col": 0,
3400 "cyclomatic": 25,
3401 "cognitive": 10,
3402 "line_count": 80,
3403 "exceeded": "cyclomatic",
3404 }],
3405 "summary": { "max_cyclomatic_threshold": 20 },
3406 });
3407 inject_health_actions(&mut output, HealthActionOptions::default());
3408 let actions = output["findings"][0]["actions"].as_array().unwrap();
3409 assert!(
3410 actions.iter().any(|a| a["type"] == "refactor-function"),
3411 "non-CRAP findings emit refactor-function"
3412 );
3413 assert!(
3414 !actions.iter().any(|a| a["type"] == "add-tests"),
3415 "non-CRAP findings must not emit add-tests"
3416 );
3417 assert!(
3418 !actions.iter().any(|a| a["type"] == "increase-coverage"),
3419 "non-CRAP findings must not emit increase-coverage"
3420 );
3421 }
3422
3423 #[test]
3426 fn suppress_line_omitted_when_baseline_active() {
3427 let mut output = crap_only_finding_envelope(Some("none"), 6, 20);
3428 inject_health_actions(
3429 &mut output,
3430 HealthActionOptions {
3431 omit_suppress_line: true,
3432 omit_reason: Some("baseline-active"),
3433 },
3434 );
3435 let actions = output["findings"][0]["actions"].as_array().unwrap();
3436 assert!(
3437 !actions.iter().any(|a| a["type"] == "suppress-line"),
3438 "baseline-active must not emit suppress-line, got {actions:?}"
3439 );
3440 assert_eq!(
3441 output["actions_meta"]["suppression_hints_omitted"],
3442 serde_json::Value::Bool(true)
3443 );
3444 assert_eq!(output["actions_meta"]["reason"], "baseline-active");
3445 assert_eq!(output["actions_meta"]["scope"], "health-findings");
3446 }
3447
3448 #[test]
3449 fn suppress_line_omitted_when_config_disabled() {
3450 let mut output = crap_only_finding_envelope(Some("none"), 6, 20);
3451 inject_health_actions(
3452 &mut output,
3453 HealthActionOptions {
3454 omit_suppress_line: true,
3455 omit_reason: Some("config-disabled"),
3456 },
3457 );
3458 assert_eq!(output["actions_meta"]["reason"], "config-disabled");
3459 }
3460
3461 #[test]
3462 fn suppress_line_emitted_by_default() {
3463 let mut output = crap_only_finding_envelope(Some("none"), 6, 20);
3464 inject_health_actions(&mut output, HealthActionOptions::default());
3465 let actions = output["findings"][0]["actions"].as_array().unwrap();
3466 assert!(
3467 actions.iter().any(|a| a["type"] == "suppress-line"),
3468 "default opts must emit suppress-line"
3469 );
3470 assert!(
3471 output.get("actions_meta").is_none(),
3472 "actions_meta must be absent when no omission occurred"
3473 );
3474 }
3475
3476 #[test]
3483 fn every_emitted_health_action_type_is_in_schema_enum() {
3484 let cases = [
3488 ("crap", Some("none"), 6_u16, 20_u16),
3490 ("crap", Some("partial"), 6, 20),
3491 ("crap", Some("high"), 12, 20),
3492 ("crap", Some("none"), 16, 20), ("cyclomatic", None, 25, 20),
3494 ("cognitive_crap", Some("partial"), 6, 20),
3495 ("all", Some("none"), 25, 20),
3496 ];
3497
3498 let mut emitted: std::collections::BTreeSet<String> = std::collections::BTreeSet::new();
3499 for (exceeded, tier, cc, max) in cases {
3500 let mut finding = serde_json::json!({
3501 "path": "src/x.ts",
3502 "name": "fn",
3503 "line": 1,
3504 "col": 0,
3505 "cyclomatic": cc,
3506 "cognitive": 5,
3507 "line_count": 10,
3508 "exceeded": exceeded,
3509 "crap": 35.0,
3510 });
3511 if let Some(t) = tier {
3512 finding["coverage_tier"] = serde_json::Value::String(t.to_owned());
3513 }
3514 let mut output = serde_json::json!({
3515 "findings": [finding],
3516 "summary": { "max_cyclomatic_threshold": max },
3517 });
3518 inject_health_actions(&mut output, HealthActionOptions::default());
3519 for action in output["findings"][0]["actions"].as_array().unwrap() {
3520 if let Some(ty) = action["type"].as_str() {
3521 emitted.insert(ty.to_owned());
3522 }
3523 }
3524 }
3525
3526 let schema_path = std::path::Path::new(env!("CARGO_MANIFEST_DIR"))
3528 .join("..")
3529 .join("..")
3530 .join("docs")
3531 .join("output-schema.json");
3532 let raw = std::fs::read_to_string(&schema_path)
3533 .expect("docs/output-schema.json must be readable for the drift-guard test");
3534 let schema: serde_json::Value = serde_json::from_str(&raw).expect("schema parses");
3535 let enum_values: std::collections::BTreeSet<String> =
3536 schema["definitions"]["HealthFindingAction"]["properties"]["type"]["enum"]
3537 .as_array()
3538 .expect("HealthFindingAction.type.enum is an array")
3539 .iter()
3540 .filter_map(|v| v.as_str().map(str::to_owned))
3541 .collect();
3542
3543 for ty in &emitted {
3544 assert!(
3545 enum_values.contains(ty),
3546 "build_health_finding_actions emitted action type `{ty}` but \
3547 docs/output-schema.json HealthFindingAction.type enum does \
3548 not list it. Add it to the schema (and any downstream \
3549 typed consumers) when introducing a new action type."
3550 );
3551 }
3552 }
3553}