use anyhow::{Context, Result, bail};
use cargo_crap::{
complexity, coverage,
delta::{compute_delta, load_baseline},
merge::{MissingCoveragePolicy, merge},
report::{Format, crappy_count, render, render_delta, render_delta_summary, render_summary},
score::DEFAULT_THRESHOLD,
};
use clap::{Parser, ValueEnum};
use globset::{GlobBuilder, GlobSet, GlobSetBuilder};
use indicatif::{ProgressBar, ProgressStyle};
use std::fs::File;
use std::io::{self, BufWriter, Write};
use std::path::PathBuf;
use std::time::Duration;
#[derive(Parser, Debug)]
#[command(
name = "cargo-crap",
about = "Compute the CRAP (Change Risk Anti-Patterns) metric for Rust projects.",
long_about = None,
version
)]
struct Cli {
#[arg(long, value_name = "FILE")]
lcov: Option<PathBuf>,
#[arg(long, value_name = "DIR", default_value = ".")]
path: PathBuf,
#[arg(long)]
workspace: bool,
#[arg(long, value_name = "GLOB")]
exclude: Vec<String>,
#[arg(long)]
threshold: Option<f64>,
#[arg(long, value_name = "SCORE")]
min: Option<f64>,
#[arg(long, value_name = "N")]
top: Option<usize>,
#[arg(long, value_enum)]
missing: Option<MissingPolicy>,
#[arg(long, value_enum, default_value_t = FormatArg::Human)]
format: FormatArg,
#[arg(long)]
summary: bool,
#[arg(long)]
fail_above: bool,
#[arg(long, value_name = "GLOB")]
allow: Vec<String>,
#[arg(long, value_name = "FILE")]
baseline: Option<PathBuf>,
#[arg(long)]
fail_regression: bool,
#[arg(long, value_name = "FILE")]
output: Option<PathBuf>,
}
#[derive(ValueEnum, Clone, Copy, Debug)]
enum MissingPolicy {
Pessimistic,
Optimistic,
Skip,
}
impl From<MissingPolicy> for MissingCoveragePolicy {
fn from(p: MissingPolicy) -> Self {
match p {
MissingPolicy::Pessimistic => Self::Pessimistic,
MissingPolicy::Optimistic => Self::Optimistic,
MissingPolicy::Skip => Self::Skip,
}
}
}
#[derive(ValueEnum, Clone, Copy, Debug)]
enum FormatArg {
Human,
Json,
Github,
Markdown,
}
impl From<FormatArg> for Format {
fn from(f: FormatArg) -> Self {
match f {
FormatArg::Human => Self::Human,
FormatArg::Json => Self::Json,
FormatArg::Github => Self::GitHub,
FormatArg::Markdown => Self::Markdown,
}
}
}
fn strip_cargo_subcommand(mut args: Vec<String>) -> Vec<String> {
if args.get(1).map(String::as_str) == Some("crap") {
args.remove(1);
}
args
}
fn build_allow_set(patterns: &[String]) -> Result<GlobSet> {
let mut builder = GlobSetBuilder::new();
for pat in patterns {
let glob = GlobBuilder::new(pat)
.build()
.with_context(|| format!("invalid allow pattern: {pat:?}"))?;
builder.add(glob);
}
builder.build().context("building allow glob set")
}
fn collect_complexity(
workspace: bool,
path: &std::path::Path,
excludes: &[String],
) -> Result<Vec<complexity::FunctionComplexity>> {
if workspace {
let roots = workspace_roots()?;
let mut all = Vec::new();
for root in &roots {
let fns = complexity::analyze_tree(root, excludes)
.with_context(|| format!("analyzing {}", root.display()))?;
all.extend(fns);
}
Ok(all)
} else {
complexity::analyze_tree(path, excludes)
.with_context(|| format!("analyzing {}", path.display()))
}
}
fn apply_filters(
entries: &mut Vec<cargo_crap::merge::CrapEntry>,
allow_patterns: &[String],
min: Option<f64>,
top: Option<usize>,
) -> Result<()> {
if !allow_patterns.is_empty() {
let allow_set = build_allow_set(allow_patterns)?;
entries.retain(|e| !allow_set.is_match(&e.function));
}
if let Some(min) = min {
entries.retain(|e| e.crap >= min);
}
if let Some(top) = top {
entries.truncate(top);
}
Ok(())
}
fn load_coverage(
lcov: Option<&PathBuf>
) -> Result<std::collections::HashMap<std::path::PathBuf, cargo_crap::coverage::FileCoverage>> {
match lcov {
Some(path) => coverage::parse_lcov(path)
.with_context(|| format!("parsing LCOV file {}", path.display())),
None => Ok(Default::default()),
}
}
fn open_output(path: Option<&PathBuf>) -> Result<Box<dyn Write>> {
Ok(match path {
Some(p) => {
Box::new(BufWriter::new(File::create(p).with_context(|| {
format!("creating output file {}", p.display())
})?))
},
None => Box::new(io::stdout()),
})
}
fn spinner(msg: &'static str) -> ProgressBar {
let pb = ProgressBar::new_spinner();
pb.set_style(
ProgressStyle::with_template("{spinner:.cyan} {msg}")
.unwrap()
.tick_strings(&["⠋", "⠙", "⠹", "⠸", "⠼", "⠴", "⠦", "⠧", "⠇", "⠏", ""]),
);
pb.set_message(msg);
pb.enable_steady_tick(Duration::from_millis(80));
pb
}
fn workspace_roots() -> Result<Vec<PathBuf>> {
let output = std::process::Command::new("cargo")
.args(["metadata", "--no-deps", "--format-version", "1"])
.output()
.context("running `cargo metadata`")?;
if !output.status.success() {
let stderr = String::from_utf8_lossy(&output.stderr);
bail!("`cargo metadata` failed: {stderr}");
}
let meta: serde_json::Value =
serde_json::from_slice(&output.stdout).context("parsing `cargo metadata` output")?;
let roots: Vec<PathBuf> = meta["packages"]
.as_array()
.context("`cargo metadata` output missing `packages`")?
.iter()
.filter_map(|pkg| {
pkg["manifest_path"]
.as_str()
.and_then(|p| PathBuf::from(p).parent().map(|d| d.to_path_buf()))
})
.collect();
if roots.is_empty() {
bail!("`cargo metadata` returned no packages");
}
Ok(roots)
}
fn validate_args(cli: &Cli) -> Result<()> {
if !cli.workspace && !cli.path.exists() {
bail!("path does not exist: {}", cli.path.display());
}
if cli.fail_regression && cli.baseline.is_none() {
bail!("--fail-regression requires --baseline");
}
Ok(())
}
fn do_render(
entries: &[cargo_crap::merge::CrapEntry],
baseline: Option<&PathBuf>,
threshold: f64,
format: Format,
summary: bool,
out: &mut dyn Write,
) -> Result<(bool, bool)> {
if let Some(baseline_path) = baseline {
let baseline_data = load_baseline(baseline_path)?;
let report = compute_delta(entries, &baseline_data);
let has_crappy = crappy_count(entries, threshold) > 0;
let has_regression = report.regression_count() > 0;
if summary {
render_delta_summary(&report, out)?;
} else {
render_delta(&report, threshold, format, out)?;
}
Ok((has_crappy, has_regression))
} else {
let has_crappy = crappy_count(entries, threshold) > 0;
if summary {
render_summary(entries, threshold, out)?;
} else {
render(entries, threshold, format, out)?;
}
Ok((has_crappy, false))
}
}
fn main() -> Result<()> {
let cli = Cli::parse_from(strip_cargo_subcommand(std::env::args().collect()));
validate_args(&cli)?;
let cwd = std::env::current_dir().unwrap_or_else(|_| cli.path.clone());
let config = cargo_crap::config::load(&cwd)?;
let threshold = cli
.threshold
.or(config.threshold)
.unwrap_or(DEFAULT_THRESHOLD);
let missing_policy: MissingCoveragePolicy = cli
.missing
.map(Into::into)
.or(config.missing)
.unwrap_or(MissingCoveragePolicy::Pessimistic);
let fail_above = cli.fail_above || config.fail_above.unwrap_or(false);
let fail_regression = cli.fail_regression || config.fail_regression.unwrap_or(false);
let mut effective_exclude = config.exclude;
effective_exclude.extend(cli.exclude);
let mut effective_allow = config.allow;
effective_allow.extend(cli.allow);
let pb = spinner("Analyzing source files…");
let fns = collect_complexity(cli.workspace, &cli.path, &effective_exclude)?;
pb.set_message("Parsing coverage report…");
let coverage = load_coverage(cli.lcov.as_ref())?;
pb.finish_and_clear();
let mut entries = merge(fns, coverage, missing_policy);
apply_filters(
&mut entries,
&effective_allow,
cli.min.or(config.min),
cli.top.or(config.top),
)?;
let mut out_box = open_output(cli.output.as_ref())?;
let (has_crappy, has_regression) = do_render(
&entries,
cli.baseline.as_ref(),
threshold,
cli.format.into(),
cli.summary,
out_box.as_mut(),
)?;
if (fail_above && has_crappy) || (fail_regression && has_regression) {
std::process::exit(1);
}
Ok(())
}