use std::path::Path;
use std::process::ExitCode;
use std::time::Duration;
use fallow_core::duplicates::DuplicationReport;
use fallow_core::results::AnalysisResults;
use super::{emit_json, normalize_uri};
use crate::explain;
use crate::report::grouping::{OwnershipResolver, ResultGroup};
pub(super) fn print_json(
results: &AnalysisResults,
root: &Path,
elapsed: Duration,
explain: bool,
regression: Option<&crate::regression::RegressionOutcome>,
baseline_matched: Option<(usize, usize)>,
) -> ExitCode {
match build_json(results, root, elapsed) {
Ok(mut output) => {
if let Some(outcome) = regression
&& let serde_json::Value::Object(ref mut map) = output
{
map.insert("regression".to_string(), outcome.to_json());
}
if let Some((entries, matched)) = baseline_matched
&& let serde_json::Value::Object(ref mut map) = output
{
map.insert(
"baseline".to_string(),
serde_json::json!({
"entries": entries,
"matched": matched,
}),
);
}
if explain {
insert_meta(&mut output, explain::check_meta());
}
emit_json(&output, "JSON")
}
Err(e) => {
eprintln!("Error: failed to serialize results: {e}");
ExitCode::from(2)
}
}
}
#[must_use]
pub(super) fn print_grouped_json(
groups: &[ResultGroup],
original: &AnalysisResults,
root: &Path,
elapsed: Duration,
explain: bool,
resolver: &OwnershipResolver,
) -> ExitCode {
let root_prefix = format!("{}/", root.display());
let group_values: Vec<serde_json::Value> = groups
.iter()
.filter_map(|group| {
let mut value = serde_json::to_value(&group.results).ok()?;
strip_root_prefix(&mut value, &root_prefix);
inject_actions(&mut value);
if let serde_json::Value::Object(ref mut map) = value {
let mut ordered = serde_json::Map::new();
ordered.insert("key".to_string(), serde_json::json!(group.key));
ordered.insert(
"total_issues".to_string(),
serde_json::json!(group.results.total_issues()),
);
for (k, v) in map.iter() {
ordered.insert(k.clone(), v.clone());
}
Some(serde_json::Value::Object(ordered))
} else {
Some(value)
}
})
.collect();
let mut output = serde_json::json!({
"schema_version": SCHEMA_VERSION,
"version": env!("CARGO_PKG_VERSION"),
"elapsed_ms": elapsed.as_millis() as u64,
"grouped_by": resolver.mode_label(),
"total_issues": original.total_issues(),
"groups": group_values,
});
if explain {
insert_meta(&mut output, explain::check_meta());
}
emit_json(&output, "JSON")
}
const SCHEMA_VERSION: u32 = 4;
fn build_json_envelope(report_value: serde_json::Value, elapsed: Duration) -> serde_json::Value {
let mut map = serde_json::Map::new();
map.insert(
"schema_version".to_string(),
serde_json::json!(SCHEMA_VERSION),
);
map.insert(
"version".to_string(),
serde_json::json!(env!("CARGO_PKG_VERSION")),
);
map.insert(
"elapsed_ms".to_string(),
serde_json::json!(elapsed.as_millis()),
);
if let serde_json::Value::Object(report_map) = report_value {
for (key, value) in report_map {
map.insert(key, value);
}
}
serde_json::Value::Object(map)
}
pub fn build_json(
results: &AnalysisResults,
root: &Path,
elapsed: Duration,
) -> Result<serde_json::Value, serde_json::Error> {
let results_value = serde_json::to_value(results)?;
let mut map = serde_json::Map::new();
map.insert(
"schema_version".to_string(),
serde_json::json!(SCHEMA_VERSION),
);
map.insert(
"version".to_string(),
serde_json::json!(env!("CARGO_PKG_VERSION")),
);
map.insert(
"elapsed_ms".to_string(),
serde_json::json!(elapsed.as_millis()),
);
map.insert(
"total_issues".to_string(),
serde_json::json!(results.total_issues()),
);
if let Some(ref ep) = results.entry_point_summary {
let sources: serde_json::Map<String, serde_json::Value> = ep
.by_source
.iter()
.map(|(k, v)| (k.replace(' ', "_"), serde_json::json!(v)))
.collect();
map.insert(
"entry_points".to_string(),
serde_json::json!({
"total": ep.total,
"sources": sources,
}),
);
}
let summary = serde_json::json!({
"total_issues": results.total_issues(),
"unused_files": results.unused_files.len(),
"unused_exports": results.unused_exports.len(),
"unused_types": results.unused_types.len(),
"unused_dependencies": results.unused_dependencies.len()
+ results.unused_dev_dependencies.len()
+ results.unused_optional_dependencies.len(),
"unused_enum_members": results.unused_enum_members.len(),
"unused_class_members": results.unused_class_members.len(),
"unresolved_imports": results.unresolved_imports.len(),
"unlisted_dependencies": results.unlisted_dependencies.len(),
"duplicate_exports": results.duplicate_exports.len(),
"type_only_dependencies": results.type_only_dependencies.len(),
"test_only_dependencies": results.test_only_dependencies.len(),
"circular_dependencies": results.circular_dependencies.len(),
"boundary_violations": results.boundary_violations.len(),
"stale_suppressions": results.stale_suppressions.len(),
});
map.insert("summary".to_string(), summary);
if let serde_json::Value::Object(results_map) = results_value {
for (key, value) in results_map {
map.insert(key, value);
}
}
let mut output = serde_json::Value::Object(map);
let root_prefix = format!("{}/", root.display());
strip_root_prefix(&mut output, &root_prefix);
inject_actions(&mut output);
Ok(output)
}
pub fn strip_root_prefix(value: &mut serde_json::Value, prefix: &str) {
match value {
serde_json::Value::String(s) => {
if let Some(rest) = s.strip_prefix(prefix) {
*s = rest.to_string();
} else {
let normalized = normalize_uri(s);
let normalized_prefix = normalize_uri(prefix);
if let Some(rest) = normalized.strip_prefix(&normalized_prefix) {
*s = rest.to_string();
}
}
}
serde_json::Value::Array(arr) => {
for item in arr {
strip_root_prefix(item, prefix);
}
}
serde_json::Value::Object(map) => {
for (_, v) in map.iter_mut() {
strip_root_prefix(v, prefix);
}
}
_ => {}
}
}
enum SuppressKind {
InlineComment,
FileComment,
ConfigIgnoreDep,
}
struct ActionSpec {
fix_type: &'static str,
auto_fixable: bool,
description: &'static str,
note: Option<&'static str>,
suppress: SuppressKind,
issue_kind: &'static str,
}
fn actions_for_issue_type(key: &str) -> Option<ActionSpec> {
match key {
"unused_files" => Some(ActionSpec {
fix_type: "delete-file",
auto_fixable: false,
description: "Delete this file",
note: Some(
"File deletion may remove runtime functionality not visible to static analysis",
),
suppress: SuppressKind::FileComment,
issue_kind: "unused-file",
}),
"unused_exports" => Some(ActionSpec {
fix_type: "remove-export",
auto_fixable: true,
description: "Remove the `export` keyword from the declaration",
note: None,
suppress: SuppressKind::InlineComment,
issue_kind: "unused-export",
}),
"unused_types" => Some(ActionSpec {
fix_type: "remove-export",
auto_fixable: true,
description: "Remove the `export` (or `export type`) keyword from the type declaration",
note: None,
suppress: SuppressKind::InlineComment,
issue_kind: "unused-type",
}),
"unused_dependencies" => Some(ActionSpec {
fix_type: "remove-dependency",
auto_fixable: true,
description: "Remove from dependencies in package.json",
note: None,
suppress: SuppressKind::ConfigIgnoreDep,
issue_kind: "unused-dependency",
}),
"unused_dev_dependencies" => Some(ActionSpec {
fix_type: "remove-dependency",
auto_fixable: true,
description: "Remove from devDependencies in package.json",
note: None,
suppress: SuppressKind::ConfigIgnoreDep,
issue_kind: "unused-dev-dependency",
}),
"unused_optional_dependencies" => Some(ActionSpec {
fix_type: "remove-dependency",
auto_fixable: true,
description: "Remove from optionalDependencies in package.json",
note: None,
suppress: SuppressKind::ConfigIgnoreDep,
issue_kind: "unused-dependency",
}),
"unused_enum_members" => Some(ActionSpec {
fix_type: "remove-enum-member",
auto_fixable: true,
description: "Remove this enum member",
note: None,
suppress: SuppressKind::InlineComment,
issue_kind: "unused-enum-member",
}),
"unused_class_members" => Some(ActionSpec {
fix_type: "remove-class-member",
auto_fixable: false,
description: "Remove this class member",
note: Some("Class member may be used via dependency injection or decorators"),
suppress: SuppressKind::InlineComment,
issue_kind: "unused-class-member",
}),
"unresolved_imports" => Some(ActionSpec {
fix_type: "resolve-import",
auto_fixable: false,
description: "Fix the import specifier or install the missing module",
note: Some("Verify the module path and check tsconfig paths configuration"),
suppress: SuppressKind::InlineComment,
issue_kind: "unresolved-import",
}),
"unlisted_dependencies" => Some(ActionSpec {
fix_type: "install-dependency",
auto_fixable: false,
description: "Add this package to dependencies in package.json",
note: Some("Verify this package should be a direct dependency before adding"),
suppress: SuppressKind::ConfigIgnoreDep,
issue_kind: "unlisted-dependency",
}),
"duplicate_exports" => Some(ActionSpec {
fix_type: "remove-duplicate",
auto_fixable: false,
description: "Keep one canonical export location and remove the others",
note: Some("Review all locations to determine which should be the canonical export"),
suppress: SuppressKind::InlineComment,
issue_kind: "duplicate-export",
}),
"type_only_dependencies" => Some(ActionSpec {
fix_type: "move-to-dev",
auto_fixable: false,
description: "Move to devDependencies (only type imports are used)",
note: Some(
"Type imports are erased at runtime so this dependency is not needed in production",
),
suppress: SuppressKind::ConfigIgnoreDep,
issue_kind: "type-only-dependency",
}),
"test_only_dependencies" => Some(ActionSpec {
fix_type: "move-to-dev",
auto_fixable: false,
description: "Move to devDependencies (only test files import this)",
note: Some(
"Only test files import this package so it does not need to be a production dependency",
),
suppress: SuppressKind::ConfigIgnoreDep,
issue_kind: "test-only-dependency",
}),
"circular_dependencies" => Some(ActionSpec {
fix_type: "refactor-cycle",
auto_fixable: false,
description: "Extract shared logic into a separate module to break the cycle",
note: Some(
"Circular imports can cause initialization issues and make code harder to reason about",
),
suppress: SuppressKind::InlineComment,
issue_kind: "circular-dependency",
}),
"boundary_violations" => Some(ActionSpec {
fix_type: "refactor-boundary",
auto_fixable: false,
description: "Move the import through an allowed zone or restructure the dependency",
note: Some(
"This import crosses an architecture boundary that is not permitted by the configured rules",
),
suppress: SuppressKind::InlineComment,
issue_kind: "boundary-violation",
}),
_ => None,
}
}
fn build_actions(
item: &serde_json::Value,
issue_key: &str,
spec: &ActionSpec,
) -> serde_json::Value {
let mut actions = Vec::with_capacity(2);
let mut fix_action = serde_json::json!({
"type": spec.fix_type,
"auto_fixable": spec.auto_fixable,
"description": spec.description,
});
if let Some(note) = spec.note {
fix_action["note"] = serde_json::json!(note);
}
if (issue_key == "unused_exports" || issue_key == "unused_types")
&& item
.get("is_re_export")
.and_then(serde_json::Value::as_bool)
== Some(true)
{
fix_action["note"] = serde_json::json!(
"This finding originates from a re-export; verify it is not part of your public API before removing"
);
}
actions.push(fix_action);
match spec.suppress {
SuppressKind::InlineComment => {
let mut suppress = serde_json::json!({
"type": "suppress-line",
"auto_fixable": false,
"description": "Suppress with an inline comment above the line",
"comment": format!("// fallow-ignore-next-line {}", spec.issue_kind),
});
if issue_key == "duplicate_exports" {
suppress["scope"] = serde_json::json!("per-location");
}
actions.push(suppress);
}
SuppressKind::FileComment => {
actions.push(serde_json::json!({
"type": "suppress-file",
"auto_fixable": false,
"description": "Suppress with a file-level comment at the top of the file",
"comment": format!("// fallow-ignore-file {}", spec.issue_kind),
}));
}
SuppressKind::ConfigIgnoreDep => {
let pkg = item
.get("package_name")
.and_then(serde_json::Value::as_str)
.unwrap_or("package-name");
actions.push(serde_json::json!({
"type": "add-to-config",
"auto_fixable": false,
"description": format!("Add \"{pkg}\" to ignoreDependencies in fallow config"),
"config_key": "ignoreDependencies",
"value": pkg,
}));
}
}
serde_json::Value::Array(actions)
}
fn inject_actions(output: &mut serde_json::Value) {
let Some(map) = output.as_object_mut() else {
return;
};
for (key, value) in map.iter_mut() {
let Some(spec) = actions_for_issue_type(key) else {
continue;
};
let Some(arr) = value.as_array_mut() else {
continue;
};
for item in arr {
let actions = build_actions(item, key, &spec);
if let serde_json::Value::Object(obj) = item {
obj.insert("actions".to_string(), actions);
}
}
}
}
pub fn build_baseline_deltas_json<'a>(
total_delta: i64,
per_category: impl Iterator<Item = (&'a str, usize, usize, i64)>,
) -> serde_json::Value {
let mut per_cat = serde_json::Map::new();
for (cat, current, baseline, delta) in per_category {
per_cat.insert(
cat.to_string(),
serde_json::json!({
"current": current,
"baseline": baseline,
"delta": delta,
}),
);
}
serde_json::json!({
"total_delta": total_delta,
"per_category": per_cat
})
}
#[allow(
clippy::redundant_pub_crate,
reason = "pub(crate) needed — used by audit.rs via re-export, but not part of public API"
)]
pub(crate) fn inject_health_actions(output: &mut serde_json::Value) {
let Some(map) = output.as_object_mut() else {
return;
};
if let Some(findings) = map.get_mut("findings").and_then(|v| v.as_array_mut()) {
for item in findings {
let actions = build_health_finding_actions(item);
if let serde_json::Value::Object(obj) = item {
obj.insert("actions".to_string(), actions);
}
}
}
if let Some(targets) = map.get_mut("targets").and_then(|v| v.as_array_mut()) {
for item in targets {
let actions = build_refactoring_target_actions(item);
if let serde_json::Value::Object(obj) = item {
obj.insert("actions".to_string(), actions);
}
}
}
if let Some(hotspots) = map.get_mut("hotspots").and_then(|v| v.as_array_mut()) {
for item in hotspots {
let actions = build_hotspot_actions(item);
if let serde_json::Value::Object(obj) = item {
obj.insert("actions".to_string(), actions);
}
}
}
if let Some(gaps) = map.get_mut("coverage_gaps").and_then(|v| v.as_object_mut()) {
if let Some(files) = gaps.get_mut("files").and_then(|v| v.as_array_mut()) {
for item in files {
let actions = build_untested_file_actions(item);
if let serde_json::Value::Object(obj) = item {
obj.insert("actions".to_string(), actions);
}
}
}
if let Some(exports) = gaps.get_mut("exports").and_then(|v| v.as_array_mut()) {
for item in exports {
let actions = build_untested_export_actions(item);
if let serde_json::Value::Object(obj) = item {
obj.insert("actions".to_string(), actions);
}
}
}
}
}
fn build_health_finding_actions(item: &serde_json::Value) -> serde_json::Value {
let name = item
.get("name")
.and_then(serde_json::Value::as_str)
.unwrap_or("function");
let mut actions = vec![serde_json::json!({
"type": "refactor-function",
"auto_fixable": false,
"description": format!("Refactor `{name}` to reduce complexity (extract helper functions, simplify branching)"),
"note": "Consider splitting into smaller functions with single responsibilities",
})];
actions.push(serde_json::json!({
"type": "suppress-line",
"auto_fixable": false,
"description": "Suppress with an inline comment above the function declaration",
"comment": "// fallow-ignore-next-line complexity",
"placement": "above-function-declaration",
}));
serde_json::Value::Array(actions)
}
fn build_hotspot_actions(item: &serde_json::Value) -> serde_json::Value {
let path = item
.get("path")
.and_then(serde_json::Value::as_str)
.unwrap_or("file");
let mut actions = vec![
serde_json::json!({
"type": "refactor-file",
"auto_fixable": false,
"description": format!("Refactor `{path}`, high complexity combined with frequent changes makes this a maintenance risk"),
"note": "Prioritize extracting complex functions, adding tests, or splitting the module",
}),
serde_json::json!({
"type": "add-tests",
"auto_fixable": false,
"description": format!("Add test coverage for `{path}` to reduce change risk"),
"note": "Frequently changed complex files benefit most from comprehensive test coverage",
}),
];
if let Some(ownership) = item.get("ownership") {
if ownership
.get("bus_factor")
.and_then(serde_json::Value::as_u64)
== Some(1)
{
let top = ownership.get("top_contributor");
let owner = top
.and_then(|t| t.get("identifier"))
.and_then(serde_json::Value::as_str)
.unwrap_or("the sole contributor");
let commits = top
.and_then(|t| t.get("commits"))
.and_then(serde_json::Value::as_u64)
.unwrap_or(0);
let suggested: Vec<String> = ownership
.get("suggested_reviewers")
.and_then(serde_json::Value::as_array)
.map(|arr| {
arr.iter()
.filter_map(|r| {
r.get("identifier")
.and_then(serde_json::Value::as_str)
.map(String::from)
})
.collect()
})
.unwrap_or_default();
let mut low_bus_action = serde_json::json!({
"type": "low-bus-factor",
"auto_fixable": false,
"description": format!(
"{owner} is the sole recent contributor to `{path}`; adding a second reviewer reduces knowledge-loss risk"
),
});
if !suggested.is_empty() {
let list = suggested
.iter()
.map(|s| format!("@{s}"))
.collect::<Vec<_>>()
.join(", ");
low_bus_action["note"] =
serde_json::Value::String(format!("Candidate reviewers: {list}"));
} else if commits < 5 {
low_bus_action["note"] = serde_json::Value::String(
"Single recent contributor on a low-commit file. Consider a pair review for major changes."
.to_string(),
);
}
actions.push(low_bus_action);
}
if ownership
.get("unowned")
.and_then(serde_json::Value::as_bool)
== Some(true)
{
actions.push(serde_json::json!({
"type": "unowned-hotspot",
"auto_fixable": false,
"description": format!("Add a CODEOWNERS entry for `{path}`"),
"note": "Frequently-changed files without declared owners create review bottlenecks",
"suggested_pattern": suggest_codeowners_pattern(path),
"heuristic": "directory-deepest",
}));
}
if ownership.get("drift").and_then(serde_json::Value::as_bool) == Some(true) {
let reason = ownership
.get("drift_reason")
.and_then(serde_json::Value::as_str)
.unwrap_or("ownership has shifted from the original author");
actions.push(serde_json::json!({
"type": "ownership-drift",
"auto_fixable": false,
"description": format!("Update CODEOWNERS for `{path}`: {reason}"),
"note": "Drift suggests the declared or original owner is no longer the right reviewer",
}));
}
}
serde_json::Value::Array(actions)
}
fn suggest_codeowners_pattern(path: &str) -> String {
let normalized = path.replace('\\', "/");
let trimmed = normalized.trim_start_matches('/');
let mut components: Vec<&str> = trimmed.split('/').collect();
components.pop(); if components.is_empty() {
return format!("/{trimmed}");
}
format!("/{}/", components.join("/"))
}
fn build_refactoring_target_actions(item: &serde_json::Value) -> serde_json::Value {
let recommendation = item
.get("recommendation")
.and_then(serde_json::Value::as_str)
.unwrap_or("Apply the recommended refactoring");
let category = item
.get("category")
.and_then(serde_json::Value::as_str)
.unwrap_or("refactoring");
let mut actions = vec![serde_json::json!({
"type": "apply-refactoring",
"auto_fixable": false,
"description": recommendation,
"category": category,
})];
if item.get("evidence").is_some() {
actions.push(serde_json::json!({
"type": "suppress-line",
"auto_fixable": false,
"description": "Suppress the underlying complexity finding",
"comment": "// fallow-ignore-next-line complexity",
}));
}
serde_json::Value::Array(actions)
}
fn build_untested_file_actions(item: &serde_json::Value) -> serde_json::Value {
let path = item
.get("path")
.and_then(serde_json::Value::as_str)
.unwrap_or("file");
serde_json::Value::Array(vec![
serde_json::json!({
"type": "add-tests",
"auto_fixable": false,
"description": format!("Add test coverage for `{path}`"),
"note": "No test dependency path reaches this runtime file",
}),
serde_json::json!({
"type": "suppress-file",
"auto_fixable": false,
"description": format!("Suppress coverage gap reporting for `{path}`"),
"comment": "// fallow-ignore-file coverage-gaps",
}),
])
}
fn build_untested_export_actions(item: &serde_json::Value) -> serde_json::Value {
let path = item
.get("path")
.and_then(serde_json::Value::as_str)
.unwrap_or("file");
let export_name = item
.get("export_name")
.and_then(serde_json::Value::as_str)
.unwrap_or("export");
serde_json::Value::Array(vec![
serde_json::json!({
"type": "add-test-import",
"auto_fixable": false,
"description": format!("Import and test `{export_name}` from `{path}`"),
"note": "This export is runtime-reachable but no test-reachable module references it",
}),
serde_json::json!({
"type": "suppress-file",
"auto_fixable": false,
"description": format!("Suppress coverage gap reporting for `{path}`"),
"comment": "// fallow-ignore-file coverage-gaps",
}),
])
}
#[allow(
clippy::redundant_pub_crate,
reason = "pub(crate) needed — used by audit.rs via re-export, but not part of public API"
)]
pub(crate) fn inject_dupes_actions(output: &mut serde_json::Value) {
let Some(map) = output.as_object_mut() else {
return;
};
if let Some(families) = map.get_mut("clone_families").and_then(|v| v.as_array_mut()) {
for item in families {
let actions = build_clone_family_actions(item);
if let serde_json::Value::Object(obj) = item {
obj.insert("actions".to_string(), actions);
}
}
}
if let Some(groups) = map.get_mut("clone_groups").and_then(|v| v.as_array_mut()) {
for item in groups {
let actions = build_clone_group_actions(item);
if let serde_json::Value::Object(obj) = item {
obj.insert("actions".to_string(), actions);
}
}
}
}
fn build_clone_family_actions(item: &serde_json::Value) -> serde_json::Value {
let group_count = item
.get("groups")
.and_then(|v| v.as_array())
.map_or(0, Vec::len);
let total_lines = item
.get("total_duplicated_lines")
.and_then(serde_json::Value::as_u64)
.unwrap_or(0);
let mut actions = vec![serde_json::json!({
"type": "extract-shared",
"auto_fixable": false,
"description": format!(
"Extract {group_count} duplicated code block{} ({total_lines} lines) into a shared module",
if group_count == 1 { "" } else { "s" }
),
"note": "These clone groups share the same files, indicating a structural relationship — refactor together",
})];
if let Some(suggestions) = item.get("suggestions").and_then(|v| v.as_array()) {
for suggestion in suggestions {
if let Some(desc) = suggestion
.get("description")
.and_then(serde_json::Value::as_str)
{
actions.push(serde_json::json!({
"type": "apply-suggestion",
"auto_fixable": false,
"description": desc,
}));
}
}
}
actions.push(serde_json::json!({
"type": "suppress-line",
"auto_fixable": false,
"description": "Suppress with an inline comment above the duplicated code",
"comment": "// fallow-ignore-next-line code-duplication",
}));
serde_json::Value::Array(actions)
}
fn build_clone_group_actions(item: &serde_json::Value) -> serde_json::Value {
let instance_count = item
.get("instances")
.and_then(|v| v.as_array())
.map_or(0, Vec::len);
let line_count = item
.get("line_count")
.and_then(serde_json::Value::as_u64)
.unwrap_or(0);
let actions = vec![
serde_json::json!({
"type": "extract-shared",
"auto_fixable": false,
"description": format!(
"Extract duplicated code ({line_count} lines, {instance_count} instance{}) into a shared function",
if instance_count == 1 { "" } else { "s" }
),
}),
serde_json::json!({
"type": "suppress-line",
"auto_fixable": false,
"description": "Suppress with an inline comment above the duplicated code",
"comment": "// fallow-ignore-next-line code-duplication",
}),
];
serde_json::Value::Array(actions)
}
fn insert_meta(output: &mut serde_json::Value, meta: serde_json::Value) {
if let serde_json::Value::Object(map) = output {
map.insert("_meta".to_string(), meta);
}
}
pub fn build_health_json(
report: &crate::health_types::HealthReport,
root: &Path,
elapsed: Duration,
explain: bool,
) -> Result<serde_json::Value, serde_json::Error> {
let report_value = serde_json::to_value(report)?;
let mut output = build_json_envelope(report_value, elapsed);
let root_prefix = format!("{}/", root.display());
strip_root_prefix(&mut output, &root_prefix);
inject_health_actions(&mut output);
if explain {
insert_meta(&mut output, explain::health_meta());
}
Ok(output)
}
pub(super) fn print_health_json(
report: &crate::health_types::HealthReport,
root: &Path,
elapsed: Duration,
explain: bool,
) -> ExitCode {
match build_health_json(report, root, elapsed, explain) {
Ok(output) => emit_json(&output, "JSON"),
Err(e) => {
eprintln!("Error: failed to serialize health report: {e}");
ExitCode::from(2)
}
}
}
pub(super) fn print_duplication_json(
report: &DuplicationReport,
root: &Path,
elapsed: Duration,
explain: bool,
) -> ExitCode {
let report_value = match serde_json::to_value(report) {
Ok(v) => v,
Err(e) => {
eprintln!("Error: failed to serialize duplication report: {e}");
return ExitCode::from(2);
}
};
let mut output = build_json_envelope(report_value, elapsed);
let root_prefix = format!("{}/", root.display());
strip_root_prefix(&mut output, &root_prefix);
inject_dupes_actions(&mut output);
if explain {
insert_meta(&mut output, explain::dupes_meta());
}
emit_json(&output, "JSON")
}
pub(super) fn print_trace_json<T: serde::Serialize>(value: &T) {
match serde_json::to_string_pretty(value) {
Ok(json) => println!("{json}"),
Err(e) => {
eprintln!("Error: failed to serialize trace output: {e}");
#[expect(
clippy::exit,
reason = "fatal serialization error requires immediate exit"
)]
std::process::exit(2);
}
}
}
#[cfg(test)]
mod tests {
use super::*;
use crate::health_types::{
ProductionCoverageAction, ProductionCoverageConfidence, ProductionCoverageEvidence,
ProductionCoverageFinding, ProductionCoverageHotPath, ProductionCoverageMessage,
ProductionCoverageReport, ProductionCoverageReportVerdict, ProductionCoverageSummary,
ProductionCoverageVerdict, ProductionCoverageWatermark,
};
use crate::report::test_helpers::sample_results;
use fallow_core::extract::MemberKind;
use fallow_core::results::*;
use std::path::PathBuf;
use std::time::Duration;
#[test]
fn json_output_has_metadata_fields() {
let root = PathBuf::from("/project");
let results = AnalysisResults::default();
let elapsed = Duration::from_millis(123);
let output = build_json(&results, &root, elapsed).expect("should serialize");
assert_eq!(output["schema_version"], 4);
assert!(output["version"].is_string());
assert_eq!(output["elapsed_ms"], 123);
assert_eq!(output["total_issues"], 0);
}
#[test]
fn json_output_includes_issue_arrays() {
let root = PathBuf::from("/project");
let results = sample_results(&root);
let elapsed = Duration::from_millis(50);
let output = build_json(&results, &root, elapsed).expect("should serialize");
assert_eq!(output["unused_files"].as_array().unwrap().len(), 1);
assert_eq!(output["unused_exports"].as_array().unwrap().len(), 1);
assert_eq!(output["unused_types"].as_array().unwrap().len(), 1);
assert_eq!(output["unused_dependencies"].as_array().unwrap().len(), 1);
assert_eq!(
output["unused_dev_dependencies"].as_array().unwrap().len(),
1
);
assert_eq!(output["unused_enum_members"].as_array().unwrap().len(), 1);
assert_eq!(output["unused_class_members"].as_array().unwrap().len(), 1);
assert_eq!(output["unresolved_imports"].as_array().unwrap().len(), 1);
assert_eq!(output["unlisted_dependencies"].as_array().unwrap().len(), 1);
assert_eq!(output["duplicate_exports"].as_array().unwrap().len(), 1);
assert_eq!(
output["type_only_dependencies"].as_array().unwrap().len(),
1
);
assert_eq!(output["circular_dependencies"].as_array().unwrap().len(), 1);
}
#[test]
fn health_json_includes_production_coverage_with_relative_paths_and_actions() {
let root = PathBuf::from("/project");
let report = crate::health_types::HealthReport {
production_coverage: Some(ProductionCoverageReport {
verdict: ProductionCoverageReportVerdict::ColdCodeDetected,
summary: ProductionCoverageSummary {
functions_tracked: 3,
functions_hit: 1,
functions_unhit: 1,
functions_untracked: 1,
coverage_percent: 33.3,
trace_count: 2_847_291,
period_days: 30,
deployments_seen: 14,
},
findings: vec![ProductionCoverageFinding {
id: "fallow:prod:deadbeef".to_owned(),
path: root.join("src/cold.ts"),
function: "coldPath".to_owned(),
line: 12,
verdict: ProductionCoverageVerdict::ReviewRequired,
invocations: Some(0),
confidence: ProductionCoverageConfidence::Medium,
evidence: ProductionCoverageEvidence {
static_status: "used".to_owned(),
test_coverage: "not_covered".to_owned(),
v8_tracking: "tracked".to_owned(),
untracked_reason: None,
observation_days: 30,
deployments_observed: 14,
},
actions: vec![ProductionCoverageAction {
kind: "review-deletion".to_owned(),
description: "Tracked in production coverage with zero invocations."
.to_owned(),
auto_fixable: false,
}],
}],
hot_paths: vec![ProductionCoverageHotPath {
id: "fallow:hot:cafebabe".to_owned(),
path: root.join("src/hot.ts"),
function: "hotPath".to_owned(),
line: 3,
invocations: 250,
percentile: 99,
actions: vec![],
}],
watermark: Some(ProductionCoverageWatermark::LicenseExpiredGrace),
warnings: vec![ProductionCoverageMessage {
code: "partial-merge".to_owned(),
message: "Merged coverage omitted one chunk.".to_owned(),
}],
}),
..Default::default()
};
let report_value = serde_json::to_value(&report).expect("should serialize health report");
let mut output = build_json_envelope(report_value, Duration::from_millis(7));
strip_root_prefix(&mut output, "/project/");
inject_health_actions(&mut output);
assert_eq!(
output["production_coverage"]["verdict"],
serde_json::Value::String("cold-code-detected".to_owned())
);
assert_eq!(
output["production_coverage"]["summary"]["functions_tracked"],
serde_json::Value::from(3)
);
assert_eq!(
output["production_coverage"]["summary"]["coverage_percent"],
serde_json::Value::from(33.3)
);
let finding = &output["production_coverage"]["findings"][0];
assert_eq!(finding["path"], "src/cold.ts");
assert_eq!(finding["verdict"], "review_required");
assert_eq!(finding["id"], "fallow:prod:deadbeef");
assert_eq!(finding["actions"][0]["type"], "review-deletion");
let hot_path = &output["production_coverage"]["hot_paths"][0];
assert_eq!(hot_path["path"], "src/hot.ts");
assert_eq!(hot_path["function"], "hotPath");
assert_eq!(hot_path["percentile"], 99);
assert_eq!(
output["production_coverage"]["watermark"],
serde_json::Value::String("license-expired-grace".to_owned())
);
assert_eq!(
output["production_coverage"]["warnings"][0]["code"],
serde_json::Value::String("partial-merge".to_owned())
);
}
#[test]
fn json_metadata_fields_appear_first() {
let root = PathBuf::from("/project");
let results = AnalysisResults::default();
let elapsed = Duration::from_millis(0);
let output = build_json(&results, &root, elapsed).expect("should serialize");
let keys: Vec<&String> = output.as_object().unwrap().keys().collect();
assert_eq!(keys[0], "schema_version");
assert_eq!(keys[1], "version");
assert_eq!(keys[2], "elapsed_ms");
assert_eq!(keys[3], "total_issues");
}
#[test]
fn json_total_issues_matches_results() {
let root = PathBuf::from("/project");
let results = sample_results(&root);
let total = results.total_issues();
let elapsed = Duration::from_millis(0);
let output = build_json(&results, &root, elapsed).expect("should serialize");
assert_eq!(output["total_issues"], total);
}
#[test]
fn json_unused_export_contains_expected_fields() {
let root = PathBuf::from("/project");
let mut results = AnalysisResults::default();
results.unused_exports.push(UnusedExport {
path: root.join("src/utils.ts"),
export_name: "helperFn".to_string(),
is_type_only: false,
line: 10,
col: 4,
span_start: 120,
is_re_export: false,
});
let elapsed = Duration::from_millis(0);
let output = build_json(&results, &root, elapsed).expect("should serialize");
let export = &output["unused_exports"][0];
assert_eq!(export["export_name"], "helperFn");
assert_eq!(export["line"], 10);
assert_eq!(export["col"], 4);
assert_eq!(export["is_type_only"], false);
assert_eq!(export["span_start"], 120);
assert_eq!(export["is_re_export"], false);
}
#[test]
fn json_serializes_to_valid_json() {
let root = PathBuf::from("/project");
let results = sample_results(&root);
let elapsed = Duration::from_millis(42);
let output = build_json(&results, &root, elapsed).expect("should serialize");
let json_str = serde_json::to_string_pretty(&output).expect("should stringify");
let reparsed: serde_json::Value =
serde_json::from_str(&json_str).expect("JSON output should be valid JSON");
assert_eq!(reparsed, output);
}
#[test]
fn json_empty_results_produce_valid_structure() {
let root = PathBuf::from("/project");
let results = AnalysisResults::default();
let elapsed = Duration::from_millis(0);
let output = build_json(&results, &root, elapsed).expect("should serialize");
assert_eq!(output["total_issues"], 0);
assert_eq!(output["unused_files"].as_array().unwrap().len(), 0);
assert_eq!(output["unused_exports"].as_array().unwrap().len(), 0);
assert_eq!(output["unused_types"].as_array().unwrap().len(), 0);
assert_eq!(output["unused_dependencies"].as_array().unwrap().len(), 0);
assert_eq!(
output["unused_dev_dependencies"].as_array().unwrap().len(),
0
);
assert_eq!(output["unused_enum_members"].as_array().unwrap().len(), 0);
assert_eq!(output["unused_class_members"].as_array().unwrap().len(), 0);
assert_eq!(output["unresolved_imports"].as_array().unwrap().len(), 0);
assert_eq!(output["unlisted_dependencies"].as_array().unwrap().len(), 0);
assert_eq!(output["duplicate_exports"].as_array().unwrap().len(), 0);
assert_eq!(
output["type_only_dependencies"].as_array().unwrap().len(),
0
);
assert_eq!(output["circular_dependencies"].as_array().unwrap().len(), 0);
}
#[test]
fn json_empty_results_round_trips_through_string() {
let root = PathBuf::from("/project");
let results = AnalysisResults::default();
let elapsed = Duration::from_millis(0);
let output = build_json(&results, &root, elapsed).expect("should serialize");
let json_str = serde_json::to_string(&output).expect("should stringify");
let reparsed: serde_json::Value =
serde_json::from_str(&json_str).expect("should parse back");
assert_eq!(reparsed["total_issues"], 0);
}
#[test]
fn json_paths_are_relative_to_root() {
let root = PathBuf::from("/project");
let mut results = AnalysisResults::default();
results.unused_files.push(UnusedFile {
path: root.join("src/deep/nested/file.ts"),
});
let elapsed = Duration::from_millis(0);
let output = build_json(&results, &root, elapsed).expect("should serialize");
let path = output["unused_files"][0]["path"].as_str().unwrap();
assert_eq!(path, "src/deep/nested/file.ts");
assert!(!path.starts_with("/project"));
}
#[test]
fn json_strips_root_from_nested_locations() {
let root = PathBuf::from("/project");
let mut results = AnalysisResults::default();
results.unlisted_dependencies.push(UnlistedDependency {
package_name: "chalk".to_string(),
imported_from: vec![ImportSite {
path: root.join("src/cli.ts"),
line: 2,
col: 0,
}],
});
let elapsed = Duration::from_millis(0);
let output = build_json(&results, &root, elapsed).expect("should serialize");
let site_path = output["unlisted_dependencies"][0]["imported_from"][0]["path"]
.as_str()
.unwrap();
assert_eq!(site_path, "src/cli.ts");
}
#[test]
fn json_strips_root_from_duplicate_export_locations() {
let root = PathBuf::from("/project");
let mut results = AnalysisResults::default();
results.duplicate_exports.push(DuplicateExport {
export_name: "Config".to_string(),
locations: vec![
DuplicateLocation {
path: root.join("src/config.ts"),
line: 15,
col: 0,
},
DuplicateLocation {
path: root.join("src/types.ts"),
line: 30,
col: 0,
},
],
});
let elapsed = Duration::from_millis(0);
let output = build_json(&results, &root, elapsed).expect("should serialize");
let loc0 = output["duplicate_exports"][0]["locations"][0]["path"]
.as_str()
.unwrap();
let loc1 = output["duplicate_exports"][0]["locations"][1]["path"]
.as_str()
.unwrap();
assert_eq!(loc0, "src/config.ts");
assert_eq!(loc1, "src/types.ts");
}
#[test]
fn json_strips_root_from_circular_dependency_files() {
let root = PathBuf::from("/project");
let mut results = AnalysisResults::default();
results.circular_dependencies.push(CircularDependency {
files: vec![root.join("src/a.ts"), root.join("src/b.ts")],
length: 2,
line: 1,
col: 0,
is_cross_package: false,
});
let elapsed = Duration::from_millis(0);
let output = build_json(&results, &root, elapsed).expect("should serialize");
let files = output["circular_dependencies"][0]["files"]
.as_array()
.unwrap();
assert_eq!(files[0].as_str().unwrap(), "src/a.ts");
assert_eq!(files[1].as_str().unwrap(), "src/b.ts");
}
#[test]
fn json_path_outside_root_not_stripped() {
let root = PathBuf::from("/project");
let mut results = AnalysisResults::default();
results.unused_files.push(UnusedFile {
path: PathBuf::from("/other/project/src/file.ts"),
});
let elapsed = Duration::from_millis(0);
let output = build_json(&results, &root, elapsed).expect("should serialize");
let path = output["unused_files"][0]["path"].as_str().unwrap();
assert!(path.contains("/other/project/"));
}
#[test]
fn json_unused_file_contains_path() {
let root = PathBuf::from("/project");
let mut results = AnalysisResults::default();
results.unused_files.push(UnusedFile {
path: root.join("src/orphan.ts"),
});
let elapsed = Duration::from_millis(0);
let output = build_json(&results, &root, elapsed).expect("should serialize");
let file = &output["unused_files"][0];
assert_eq!(file["path"], "src/orphan.ts");
}
#[test]
fn json_unused_type_contains_expected_fields() {
let root = PathBuf::from("/project");
let mut results = AnalysisResults::default();
results.unused_types.push(UnusedExport {
path: root.join("src/types.ts"),
export_name: "OldInterface".to_string(),
is_type_only: true,
line: 20,
col: 0,
span_start: 300,
is_re_export: false,
});
let elapsed = Duration::from_millis(0);
let output = build_json(&results, &root, elapsed).expect("should serialize");
let typ = &output["unused_types"][0];
assert_eq!(typ["export_name"], "OldInterface");
assert_eq!(typ["is_type_only"], true);
assert_eq!(typ["line"], 20);
assert_eq!(typ["path"], "src/types.ts");
}
#[test]
fn json_unused_dependency_contains_expected_fields() {
let root = PathBuf::from("/project");
let mut results = AnalysisResults::default();
results.unused_dependencies.push(UnusedDependency {
package_name: "axios".to_string(),
location: DependencyLocation::Dependencies,
path: root.join("package.json"),
line: 10,
});
let elapsed = Duration::from_millis(0);
let output = build_json(&results, &root, elapsed).expect("should serialize");
let dep = &output["unused_dependencies"][0];
assert_eq!(dep["package_name"], "axios");
assert_eq!(dep["line"], 10);
}
#[test]
fn json_unused_dev_dependency_contains_expected_fields() {
let root = PathBuf::from("/project");
let mut results = AnalysisResults::default();
results.unused_dev_dependencies.push(UnusedDependency {
package_name: "vitest".to_string(),
location: DependencyLocation::DevDependencies,
path: root.join("package.json"),
line: 15,
});
let elapsed = Duration::from_millis(0);
let output = build_json(&results, &root, elapsed).expect("should serialize");
let dep = &output["unused_dev_dependencies"][0];
assert_eq!(dep["package_name"], "vitest");
}
#[test]
fn json_unused_optional_dependency_contains_expected_fields() {
let root = PathBuf::from("/project");
let mut results = AnalysisResults::default();
results.unused_optional_dependencies.push(UnusedDependency {
package_name: "fsevents".to_string(),
location: DependencyLocation::OptionalDependencies,
path: root.join("package.json"),
line: 12,
});
let elapsed = Duration::from_millis(0);
let output = build_json(&results, &root, elapsed).expect("should serialize");
let dep = &output["unused_optional_dependencies"][0];
assert_eq!(dep["package_name"], "fsevents");
assert_eq!(output["total_issues"], 1);
}
#[test]
fn json_unused_enum_member_contains_expected_fields() {
let root = PathBuf::from("/project");
let mut results = AnalysisResults::default();
results.unused_enum_members.push(UnusedMember {
path: root.join("src/enums.ts"),
parent_name: "Color".to_string(),
member_name: "Purple".to_string(),
kind: MemberKind::EnumMember,
line: 5,
col: 2,
});
let elapsed = Duration::from_millis(0);
let output = build_json(&results, &root, elapsed).expect("should serialize");
let member = &output["unused_enum_members"][0];
assert_eq!(member["parent_name"], "Color");
assert_eq!(member["member_name"], "Purple");
assert_eq!(member["line"], 5);
assert_eq!(member["path"], "src/enums.ts");
}
#[test]
fn json_unused_class_member_contains_expected_fields() {
let root = PathBuf::from("/project");
let mut results = AnalysisResults::default();
results.unused_class_members.push(UnusedMember {
path: root.join("src/api.ts"),
parent_name: "ApiClient".to_string(),
member_name: "deprecatedFetch".to_string(),
kind: MemberKind::ClassMethod,
line: 100,
col: 4,
});
let elapsed = Duration::from_millis(0);
let output = build_json(&results, &root, elapsed).expect("should serialize");
let member = &output["unused_class_members"][0];
assert_eq!(member["parent_name"], "ApiClient");
assert_eq!(member["member_name"], "deprecatedFetch");
assert_eq!(member["line"], 100);
}
#[test]
fn json_unresolved_import_contains_expected_fields() {
let root = PathBuf::from("/project");
let mut results = AnalysisResults::default();
results.unresolved_imports.push(UnresolvedImport {
path: root.join("src/app.ts"),
specifier: "@acme/missing-pkg".to_string(),
line: 7,
col: 0,
specifier_col: 0,
});
let elapsed = Duration::from_millis(0);
let output = build_json(&results, &root, elapsed).expect("should serialize");
let import = &output["unresolved_imports"][0];
assert_eq!(import["specifier"], "@acme/missing-pkg");
assert_eq!(import["line"], 7);
assert_eq!(import["path"], "src/app.ts");
}
#[test]
fn json_unlisted_dependency_contains_import_sites() {
let root = PathBuf::from("/project");
let mut results = AnalysisResults::default();
results.unlisted_dependencies.push(UnlistedDependency {
package_name: "dotenv".to_string(),
imported_from: vec![
ImportSite {
path: root.join("src/config.ts"),
line: 1,
col: 0,
},
ImportSite {
path: root.join("src/server.ts"),
line: 3,
col: 0,
},
],
});
let elapsed = Duration::from_millis(0);
let output = build_json(&results, &root, elapsed).expect("should serialize");
let dep = &output["unlisted_dependencies"][0];
assert_eq!(dep["package_name"], "dotenv");
let sites = dep["imported_from"].as_array().unwrap();
assert_eq!(sites.len(), 2);
assert_eq!(sites[0]["path"], "src/config.ts");
assert_eq!(sites[1]["path"], "src/server.ts");
}
#[test]
fn json_duplicate_export_contains_locations() {
let root = PathBuf::from("/project");
let mut results = AnalysisResults::default();
results.duplicate_exports.push(DuplicateExport {
export_name: "Button".to_string(),
locations: vec![
DuplicateLocation {
path: root.join("src/ui.ts"),
line: 10,
col: 0,
},
DuplicateLocation {
path: root.join("src/components.ts"),
line: 25,
col: 0,
},
],
});
let elapsed = Duration::from_millis(0);
let output = build_json(&results, &root, elapsed).expect("should serialize");
let dup = &output["duplicate_exports"][0];
assert_eq!(dup["export_name"], "Button");
let locs = dup["locations"].as_array().unwrap();
assert_eq!(locs.len(), 2);
assert_eq!(locs[0]["line"], 10);
assert_eq!(locs[1]["line"], 25);
}
#[test]
fn json_type_only_dependency_contains_expected_fields() {
let root = PathBuf::from("/project");
let mut results = AnalysisResults::default();
results.type_only_dependencies.push(TypeOnlyDependency {
package_name: "zod".to_string(),
path: root.join("package.json"),
line: 8,
});
let elapsed = Duration::from_millis(0);
let output = build_json(&results, &root, elapsed).expect("should serialize");
let dep = &output["type_only_dependencies"][0];
assert_eq!(dep["package_name"], "zod");
assert_eq!(dep["line"], 8);
}
#[test]
fn json_circular_dependency_contains_expected_fields() {
let root = PathBuf::from("/project");
let mut results = AnalysisResults::default();
results.circular_dependencies.push(CircularDependency {
files: vec![
root.join("src/a.ts"),
root.join("src/b.ts"),
root.join("src/c.ts"),
],
length: 3,
line: 5,
col: 0,
is_cross_package: false,
});
let elapsed = Duration::from_millis(0);
let output = build_json(&results, &root, elapsed).expect("should serialize");
let cycle = &output["circular_dependencies"][0];
assert_eq!(cycle["length"], 3);
assert_eq!(cycle["line"], 5);
let files = cycle["files"].as_array().unwrap();
assert_eq!(files.len(), 3);
}
#[test]
fn json_re_export_flagged_correctly() {
let root = PathBuf::from("/project");
let mut results = AnalysisResults::default();
results.unused_exports.push(UnusedExport {
path: root.join("src/index.ts"),
export_name: "reExported".to_string(),
is_type_only: false,
line: 1,
col: 0,
span_start: 0,
is_re_export: true,
});
let elapsed = Duration::from_millis(0);
let output = build_json(&results, &root, elapsed).expect("should serialize");
assert_eq!(output["unused_exports"][0]["is_re_export"], true);
}
#[test]
fn json_schema_version_is_4() {
let root = PathBuf::from("/project");
let results = AnalysisResults::default();
let elapsed = Duration::from_millis(0);
let output = build_json(&results, &root, elapsed).expect("should serialize");
assert_eq!(output["schema_version"], SCHEMA_VERSION);
assert_eq!(output["schema_version"], 4);
}
#[test]
fn json_version_matches_cargo_pkg_version() {
let root = PathBuf::from("/project");
let results = AnalysisResults::default();
let elapsed = Duration::from_millis(0);
let output = build_json(&results, &root, elapsed).expect("should serialize");
assert_eq!(output["version"], env!("CARGO_PKG_VERSION"));
}
#[test]
fn json_elapsed_ms_zero_duration() {
let root = PathBuf::from("/project");
let results = AnalysisResults::default();
let output = build_json(&results, &root, Duration::ZERO).expect("should serialize");
assert_eq!(output["elapsed_ms"], 0);
}
#[test]
fn json_elapsed_ms_large_duration() {
let root = PathBuf::from("/project");
let results = AnalysisResults::default();
let elapsed = Duration::from_mins(2);
let output = build_json(&results, &root, elapsed).expect("should serialize");
assert_eq!(output["elapsed_ms"], 120_000);
}
#[test]
fn json_elapsed_ms_sub_millisecond_truncated() {
let root = PathBuf::from("/project");
let results = AnalysisResults::default();
let elapsed = Duration::from_micros(500);
let output = build_json(&results, &root, elapsed).expect("should serialize");
assert_eq!(output["elapsed_ms"], 0);
}
#[test]
fn json_multiple_unused_files() {
let root = PathBuf::from("/project");
let mut results = AnalysisResults::default();
results.unused_files.push(UnusedFile {
path: root.join("src/a.ts"),
});
results.unused_files.push(UnusedFile {
path: root.join("src/b.ts"),
});
results.unused_files.push(UnusedFile {
path: root.join("src/c.ts"),
});
let elapsed = Duration::from_millis(0);
let output = build_json(&results, &root, elapsed).expect("should serialize");
assert_eq!(output["unused_files"].as_array().unwrap().len(), 3);
assert_eq!(output["total_issues"], 3);
}
#[test]
fn strip_root_prefix_on_string_value() {
let mut value = serde_json::json!("/project/src/file.ts");
strip_root_prefix(&mut value, "/project/");
assert_eq!(value, "src/file.ts");
}
#[test]
fn strip_root_prefix_leaves_non_matching_string() {
let mut value = serde_json::json!("/other/src/file.ts");
strip_root_prefix(&mut value, "/project/");
assert_eq!(value, "/other/src/file.ts");
}
#[test]
fn strip_root_prefix_recurses_into_arrays() {
let mut value = serde_json::json!(["/project/a.ts", "/project/b.ts", "/other/c.ts"]);
strip_root_prefix(&mut value, "/project/");
assert_eq!(value[0], "a.ts");
assert_eq!(value[1], "b.ts");
assert_eq!(value[2], "/other/c.ts");
}
#[test]
fn strip_root_prefix_recurses_into_nested_objects() {
let mut value = serde_json::json!({
"outer": {
"path": "/project/src/nested.ts"
}
});
strip_root_prefix(&mut value, "/project/");
assert_eq!(value["outer"]["path"], "src/nested.ts");
}
#[test]
fn strip_root_prefix_leaves_numbers_and_booleans() {
let mut value = serde_json::json!({
"line": 42,
"is_type_only": false,
"path": "/project/src/file.ts"
});
strip_root_prefix(&mut value, "/project/");
assert_eq!(value["line"], 42);
assert_eq!(value["is_type_only"], false);
assert_eq!(value["path"], "src/file.ts");
}
#[test]
fn strip_root_prefix_normalizes_windows_separators() {
let mut value = serde_json::json!(r"/project\src\file.ts");
strip_root_prefix(&mut value, "/project/");
assert_eq!(value, "src/file.ts");
}
#[test]
fn strip_root_prefix_handles_empty_string_after_strip() {
let mut value = serde_json::json!("/project/");
strip_root_prefix(&mut value, "/project/");
assert_eq!(value, "");
}
#[test]
fn strip_root_prefix_deeply_nested_array_of_objects() {
let mut value = serde_json::json!({
"groups": [{
"instances": [{
"file": "/project/src/a.ts"
}, {
"file": "/project/src/b.ts"
}]
}]
});
strip_root_prefix(&mut value, "/project/");
assert_eq!(value["groups"][0]["instances"][0]["file"], "src/a.ts");
assert_eq!(value["groups"][0]["instances"][1]["file"], "src/b.ts");
}
#[test]
fn json_full_sample_results_total_issues_correct() {
let root = PathBuf::from("/project");
let results = sample_results(&root);
let elapsed = Duration::from_millis(100);
let output = build_json(&results, &root, elapsed).expect("should serialize");
assert_eq!(output["total_issues"], results.total_issues());
}
#[test]
fn json_full_sample_no_absolute_paths_in_output() {
let root = PathBuf::from("/project");
let results = sample_results(&root);
let elapsed = Duration::from_millis(0);
let output = build_json(&results, &root, elapsed).expect("should serialize");
let json_str = serde_json::to_string(&output).expect("should stringify");
assert!(!json_str.contains("/project/src/"));
assert!(!json_str.contains("/project/package.json"));
}
#[test]
fn json_output_is_deterministic() {
let root = PathBuf::from("/project");
let results = sample_results(&root);
let elapsed = Duration::from_millis(50);
let output1 = build_json(&results, &root, elapsed).expect("first build");
let output2 = build_json(&results, &root, elapsed).expect("second build");
assert_eq!(output1, output2);
}
#[test]
fn json_results_fields_do_not_shadow_metadata() {
let root = PathBuf::from("/project");
let results = AnalysisResults::default();
let elapsed = Duration::from_millis(99);
let output = build_json(&results, &root, elapsed).expect("should serialize");
assert_eq!(output["schema_version"], 4);
assert_eq!(output["elapsed_ms"], 99);
}
#[test]
fn json_all_issue_type_arrays_present_in_empty_results() {
let root = PathBuf::from("/project");
let results = AnalysisResults::default();
let elapsed = Duration::from_millis(0);
let output = build_json(&results, &root, elapsed).expect("should serialize");
let expected_arrays = [
"unused_files",
"unused_exports",
"unused_types",
"unused_dependencies",
"unused_dev_dependencies",
"unused_optional_dependencies",
"unused_enum_members",
"unused_class_members",
"unresolved_imports",
"unlisted_dependencies",
"duplicate_exports",
"type_only_dependencies",
"test_only_dependencies",
"circular_dependencies",
];
for key in &expected_arrays {
assert!(
output[key].is_array(),
"expected '{key}' to be an array in JSON output"
);
}
}
#[test]
fn insert_meta_adds_key_to_object() {
let mut output = serde_json::json!({ "foo": 1 });
let meta = serde_json::json!({ "docs": "https://example.com" });
insert_meta(&mut output, meta.clone());
assert_eq!(output["_meta"], meta);
}
#[test]
fn insert_meta_noop_on_non_object() {
let mut output = serde_json::json!([1, 2, 3]);
let meta = serde_json::json!({ "docs": "https://example.com" });
insert_meta(&mut output, meta);
assert!(output.is_array());
}
#[test]
fn insert_meta_overwrites_existing_meta() {
let mut output = serde_json::json!({ "_meta": "old" });
let meta = serde_json::json!({ "new": true });
insert_meta(&mut output, meta.clone());
assert_eq!(output["_meta"], meta);
}
#[test]
fn build_json_envelope_has_metadata_fields() {
let report = serde_json::json!({ "findings": [] });
let elapsed = Duration::from_millis(42);
let output = build_json_envelope(report, elapsed);
assert_eq!(output["schema_version"], 4);
assert!(output["version"].is_string());
assert_eq!(output["elapsed_ms"], 42);
assert!(output["findings"].is_array());
}
#[test]
fn build_json_envelope_metadata_appears_first() {
let report = serde_json::json!({ "data": "value" });
let output = build_json_envelope(report, Duration::from_millis(10));
let keys: Vec<&String> = output.as_object().unwrap().keys().collect();
assert_eq!(keys[0], "schema_version");
assert_eq!(keys[1], "version");
assert_eq!(keys[2], "elapsed_ms");
}
#[test]
fn build_json_envelope_non_object_report() {
let report = serde_json::json!("not an object");
let output = build_json_envelope(report, Duration::from_millis(0));
let obj = output.as_object().unwrap();
assert_eq!(obj.len(), 3);
assert!(obj.contains_key("schema_version"));
assert!(obj.contains_key("version"));
assert!(obj.contains_key("elapsed_ms"));
}
#[test]
fn strip_root_prefix_null_unchanged() {
let mut value = serde_json::Value::Null;
strip_root_prefix(&mut value, "/project/");
assert!(value.is_null());
}
#[test]
fn strip_root_prefix_empty_string() {
let mut value = serde_json::json!("");
strip_root_prefix(&mut value, "/project/");
assert_eq!(value, "");
}
#[test]
fn strip_root_prefix_mixed_types() {
let mut value = serde_json::json!({
"path": "/project/src/file.ts",
"line": 42,
"flag": true,
"nested": {
"items": ["/project/a.ts", 99, null, "/project/b.ts"],
"deep": { "path": "/project/c.ts" }
}
});
strip_root_prefix(&mut value, "/project/");
assert_eq!(value["path"], "src/file.ts");
assert_eq!(value["line"], 42);
assert_eq!(value["flag"], true);
assert_eq!(value["nested"]["items"][0], "a.ts");
assert_eq!(value["nested"]["items"][1], 99);
assert!(value["nested"]["items"][2].is_null());
assert_eq!(value["nested"]["items"][3], "b.ts");
assert_eq!(value["nested"]["deep"]["path"], "c.ts");
}
#[test]
fn json_check_meta_integrates_correctly() {
let root = PathBuf::from("/project");
let results = AnalysisResults::default();
let elapsed = Duration::from_millis(0);
let mut output = build_json(&results, &root, elapsed).expect("should serialize");
insert_meta(&mut output, crate::explain::check_meta());
assert!(output["_meta"]["docs"].is_string());
assert!(output["_meta"]["rules"].is_object());
}
#[test]
fn json_unused_member_kind_serialized() {
let root = PathBuf::from("/project");
let mut results = AnalysisResults::default();
results.unused_enum_members.push(UnusedMember {
path: root.join("src/enums.ts"),
parent_name: "Color".to_string(),
member_name: "Red".to_string(),
kind: MemberKind::EnumMember,
line: 3,
col: 2,
});
results.unused_class_members.push(UnusedMember {
path: root.join("src/class.ts"),
parent_name: "Foo".to_string(),
member_name: "bar".to_string(),
kind: MemberKind::ClassMethod,
line: 10,
col: 4,
});
let elapsed = Duration::from_millis(0);
let output = build_json(&results, &root, elapsed).expect("should serialize");
let enum_member = &output["unused_enum_members"][0];
assert!(enum_member["kind"].is_string());
let class_member = &output["unused_class_members"][0];
assert!(class_member["kind"].is_string());
}
#[test]
fn json_unused_export_has_actions() {
let root = PathBuf::from("/project");
let mut results = AnalysisResults::default();
results.unused_exports.push(UnusedExport {
path: root.join("src/utils.ts"),
export_name: "helperFn".to_string(),
is_type_only: false,
line: 10,
col: 4,
span_start: 120,
is_re_export: false,
});
let output = build_json(&results, &root, Duration::ZERO).unwrap();
let actions = output["unused_exports"][0]["actions"].as_array().unwrap();
assert_eq!(actions.len(), 2);
assert_eq!(actions[0]["type"], "remove-export");
assert_eq!(actions[0]["auto_fixable"], true);
assert!(actions[0].get("note").is_none());
assert_eq!(actions[1]["type"], "suppress-line");
assert_eq!(
actions[1]["comment"],
"// fallow-ignore-next-line unused-export"
);
}
#[test]
fn json_unused_file_has_file_suppress_and_note() {
let root = PathBuf::from("/project");
let mut results = AnalysisResults::default();
results.unused_files.push(UnusedFile {
path: root.join("src/dead.ts"),
});
let output = build_json(&results, &root, Duration::ZERO).unwrap();
let actions = output["unused_files"][0]["actions"].as_array().unwrap();
assert_eq!(actions[0]["type"], "delete-file");
assert_eq!(actions[0]["auto_fixable"], false);
assert!(actions[0]["note"].is_string());
assert_eq!(actions[1]["type"], "suppress-file");
assert_eq!(actions[1]["comment"], "// fallow-ignore-file unused-file");
}
#[test]
fn json_unused_dependency_has_config_suppress_with_package_name() {
let root = PathBuf::from("/project");
let mut results = AnalysisResults::default();
results.unused_dependencies.push(UnusedDependency {
package_name: "lodash".to_string(),
location: DependencyLocation::Dependencies,
path: root.join("package.json"),
line: 5,
});
let output = build_json(&results, &root, Duration::ZERO).unwrap();
let actions = output["unused_dependencies"][0]["actions"]
.as_array()
.unwrap();
assert_eq!(actions[0]["type"], "remove-dependency");
assert_eq!(actions[0]["auto_fixable"], true);
assert_eq!(actions[1]["type"], "add-to-config");
assert_eq!(actions[1]["config_key"], "ignoreDependencies");
assert_eq!(actions[1]["value"], "lodash");
}
#[test]
fn json_empty_results_have_no_actions_in_empty_arrays() {
let root = PathBuf::from("/project");
let results = AnalysisResults::default();
let output = build_json(&results, &root, Duration::ZERO).unwrap();
assert!(output["unused_exports"].as_array().unwrap().is_empty());
assert!(output["unused_files"].as_array().unwrap().is_empty());
}
#[test]
fn json_all_issue_types_have_actions() {
let root = PathBuf::from("/project");
let results = sample_results(&root);
let output = build_json(&results, &root, Duration::ZERO).unwrap();
let issue_keys = [
"unused_files",
"unused_exports",
"unused_types",
"unused_dependencies",
"unused_dev_dependencies",
"unused_optional_dependencies",
"unused_enum_members",
"unused_class_members",
"unresolved_imports",
"unlisted_dependencies",
"duplicate_exports",
"type_only_dependencies",
"test_only_dependencies",
"circular_dependencies",
];
for key in &issue_keys {
let arr = output[key].as_array().unwrap();
if !arr.is_empty() {
let actions = arr[0]["actions"].as_array();
assert!(
actions.is_some() && !actions.unwrap().is_empty(),
"missing actions for {key}"
);
}
}
}
#[test]
fn health_finding_has_actions() {
let mut output = serde_json::json!({
"findings": [{
"path": "src/utils.ts",
"name": "processData",
"line": 10,
"col": 0,
"cyclomatic": 25,
"cognitive": 30,
"line_count": 150,
"exceeded": "both"
}]
});
inject_health_actions(&mut output);
let actions = output["findings"][0]["actions"].as_array().unwrap();
assert_eq!(actions.len(), 2);
assert_eq!(actions[0]["type"], "refactor-function");
assert_eq!(actions[0]["auto_fixable"], false);
assert!(
actions[0]["description"]
.as_str()
.unwrap()
.contains("processData")
);
assert_eq!(actions[1]["type"], "suppress-line");
assert_eq!(
actions[1]["comment"],
"// fallow-ignore-next-line complexity"
);
}
#[test]
fn refactoring_target_has_actions() {
let mut output = serde_json::json!({
"targets": [{
"path": "src/big-module.ts",
"priority": 85.0,
"efficiency": 42.5,
"recommendation": "Split module: 12 exports, 4 unused",
"category": "split_high_impact",
"effort": "medium",
"confidence": "high",
"evidence": { "unused_exports": 4 }
}]
});
inject_health_actions(&mut output);
let actions = output["targets"][0]["actions"].as_array().unwrap();
assert_eq!(actions.len(), 2);
assert_eq!(actions[0]["type"], "apply-refactoring");
assert_eq!(
actions[0]["description"],
"Split module: 12 exports, 4 unused"
);
assert_eq!(actions[0]["category"], "split_high_impact");
assert_eq!(actions[1]["type"], "suppress-line");
}
#[test]
fn refactoring_target_without_evidence_has_no_suppress() {
let mut output = serde_json::json!({
"targets": [{
"path": "src/simple.ts",
"priority": 30.0,
"efficiency": 15.0,
"recommendation": "Consider extracting helper functions",
"category": "extract_complex_functions",
"effort": "small",
"confidence": "medium"
}]
});
inject_health_actions(&mut output);
let actions = output["targets"][0]["actions"].as_array().unwrap();
assert_eq!(actions.len(), 1);
assert_eq!(actions[0]["type"], "apply-refactoring");
}
#[test]
fn health_empty_findings_no_actions() {
let mut output = serde_json::json!({
"findings": [],
"targets": []
});
inject_health_actions(&mut output);
assert!(output["findings"].as_array().unwrap().is_empty());
assert!(output["targets"].as_array().unwrap().is_empty());
}
#[test]
fn hotspot_has_actions() {
let mut output = serde_json::json!({
"hotspots": [{
"path": "src/utils.ts",
"complexity_score": 45.0,
"churn_score": 12,
"hotspot_score": 540.0
}]
});
inject_health_actions(&mut output);
let actions = output["hotspots"][0]["actions"].as_array().unwrap();
assert_eq!(actions.len(), 2);
assert_eq!(actions[0]["type"], "refactor-file");
assert!(
actions[0]["description"]
.as_str()
.unwrap()
.contains("src/utils.ts")
);
assert_eq!(actions[1]["type"], "add-tests");
}
#[test]
fn hotspot_low_bus_factor_emits_action() {
let mut output = serde_json::json!({
"hotspots": [{
"path": "src/api.ts",
"ownership": {
"bus_factor": 1,
"contributor_count": 1,
"top_contributor": {"identifier": "alice@x", "share": 1.0, "stale_days": 5, "commits": 30},
"unowned": null,
"drift": false,
}
}]
});
inject_health_actions(&mut output);
let actions = output["hotspots"][0]["actions"].as_array().unwrap();
assert!(
actions
.iter()
.filter_map(|a| a["type"].as_str())
.any(|t| t == "low-bus-factor"),
"low-bus-factor action should be present",
);
let bus = actions
.iter()
.find(|a| a["type"] == "low-bus-factor")
.unwrap();
assert!(bus["description"].as_str().unwrap().contains("alice@x"));
}
#[test]
fn hotspot_unowned_emits_action_with_pattern() {
let mut output = serde_json::json!({
"hotspots": [{
"path": "src/api/users.ts",
"ownership": {
"bus_factor": 2,
"contributor_count": 4,
"top_contributor": {"identifier": "alice@x", "share": 0.5, "stale_days": 5, "commits": 10},
"unowned": true,
"drift": false,
}
}]
});
inject_health_actions(&mut output);
let actions = output["hotspots"][0]["actions"].as_array().unwrap();
let unowned = actions
.iter()
.find(|a| a["type"] == "unowned-hotspot")
.expect("unowned-hotspot action should be present");
assert_eq!(unowned["suggested_pattern"], "/src/api/");
assert_eq!(unowned["heuristic"], "directory-deepest");
}
#[test]
fn hotspot_unowned_skipped_when_codeowners_missing() {
let mut output = serde_json::json!({
"hotspots": [{
"path": "src/api.ts",
"ownership": {
"bus_factor": 2,
"contributor_count": 4,
"top_contributor": {"identifier": "alice@x", "share": 0.5, "stale_days": 5, "commits": 10},
"unowned": null,
"drift": false,
}
}]
});
inject_health_actions(&mut output);
let actions = output["hotspots"][0]["actions"].as_array().unwrap();
assert!(
!actions.iter().any(|a| a["type"] == "unowned-hotspot"),
"unowned action must not fire when CODEOWNERS file is absent"
);
}
#[test]
fn hotspot_drift_emits_action() {
let mut output = serde_json::json!({
"hotspots": [{
"path": "src/old.ts",
"ownership": {
"bus_factor": 1,
"contributor_count": 2,
"top_contributor": {"identifier": "bob@x", "share": 0.9, "stale_days": 1, "commits": 18},
"unowned": null,
"drift": true,
"drift_reason": "original author alice@x has 5% share",
}
}]
});
inject_health_actions(&mut output);
let actions = output["hotspots"][0]["actions"].as_array().unwrap();
let drift = actions
.iter()
.find(|a| a["type"] == "ownership-drift")
.expect("ownership-drift action should be present");
assert!(drift["description"].as_str().unwrap().contains("alice@x"));
}
#[test]
fn codeowners_pattern_uses_deepest_directory() {
assert_eq!(
suggest_codeowners_pattern("src/api/users/handlers.ts"),
"/src/api/users/"
);
}
#[test]
fn codeowners_pattern_for_root_file() {
assert_eq!(suggest_codeowners_pattern("README.md"), "/README.md");
}
#[test]
fn codeowners_pattern_normalizes_backslashes() {
assert_eq!(
suggest_codeowners_pattern("src\\api\\users.ts"),
"/src/api/"
);
}
#[test]
fn codeowners_pattern_two_level_path() {
assert_eq!(suggest_codeowners_pattern("src/foo.ts"), "/src/");
}
#[test]
fn health_finding_suppress_has_placement() {
let mut output = serde_json::json!({
"findings": [{
"path": "src/utils.ts",
"name": "processData",
"line": 10,
"col": 0,
"cyclomatic": 25,
"cognitive": 30,
"line_count": 150,
"exceeded": "both"
}]
});
inject_health_actions(&mut output);
let suppress = &output["findings"][0]["actions"][1];
assert_eq!(suppress["placement"], "above-function-declaration");
}
#[test]
fn clone_family_has_actions() {
let mut output = serde_json::json!({
"clone_families": [{
"files": ["src/a.ts", "src/b.ts"],
"groups": [
{ "instances": [{"file": "src/a.ts"}, {"file": "src/b.ts"}], "token_count": 100, "line_count": 20 }
],
"total_duplicated_lines": 20,
"total_duplicated_tokens": 100,
"suggestions": [
{ "kind": "ExtractFunction", "description": "Extract shared validation logic", "estimated_savings": 15 }
]
}]
});
inject_dupes_actions(&mut output);
let actions = output["clone_families"][0]["actions"].as_array().unwrap();
assert_eq!(actions.len(), 3);
assert_eq!(actions[0]["type"], "extract-shared");
assert_eq!(actions[0]["auto_fixable"], false);
assert!(
actions[0]["description"]
.as_str()
.unwrap()
.contains("20 lines")
);
assert_eq!(actions[1]["type"], "apply-suggestion");
assert!(
actions[1]["description"]
.as_str()
.unwrap()
.contains("validation logic")
);
assert_eq!(actions[2]["type"], "suppress-line");
assert_eq!(
actions[2]["comment"],
"// fallow-ignore-next-line code-duplication"
);
}
#[test]
fn clone_group_has_actions() {
let mut output = serde_json::json!({
"clone_groups": [{
"instances": [
{"file": "src/a.ts", "start_line": 1, "end_line": 10},
{"file": "src/b.ts", "start_line": 5, "end_line": 14}
],
"token_count": 50,
"line_count": 10
}]
});
inject_dupes_actions(&mut output);
let actions = output["clone_groups"][0]["actions"].as_array().unwrap();
assert_eq!(actions.len(), 2);
assert_eq!(actions[0]["type"], "extract-shared");
assert!(
actions[0]["description"]
.as_str()
.unwrap()
.contains("10 lines")
);
assert!(
actions[0]["description"]
.as_str()
.unwrap()
.contains("2 instances")
);
assert_eq!(actions[1]["type"], "suppress-line");
}
#[test]
fn dupes_empty_results_no_actions() {
let mut output = serde_json::json!({
"clone_families": [],
"clone_groups": []
});
inject_dupes_actions(&mut output);
assert!(output["clone_families"].as_array().unwrap().is_empty());
assert!(output["clone_groups"].as_array().unwrap().is_empty());
}
}