#![allow(clippy::multiple_crate_versions)]
use std::fmt::Write as FmtWrite;
use std::io::IsTerminal;
use std::path::{Path, PathBuf};
use anyhow::{Context, Result};
use clap::{Args, Parser, Subcommand};
use lettre::{
message::{header::ContentType, MultiPart, SinglePart},
transport::smtp::authentication::Credentials,
AsyncSmtpTransport, AsyncTransport, Message, Tokio1Executor,
};
use tracing_subscriber::EnvFilter;
use sloc_config::{AppConfig, BlankInBlockCommentPolicy, ContinuationLinePolicy, MixedLinePolicy};
use sloc_core::{
analyze, check_against_baseline, compute_delta, read_json, resolve_baselines_path, write_json,
AnalysisRun, BaselineEntry, BaselineStore, ScanComparison,
};
use sloc_git::{clone_or_fetch, create_worktree, destroy_worktree, get_sha};
use sloc_report::{
render_html, write_csv, write_diff_csv, write_html, write_pdf_from_html, write_xlsx,
};
fn color_enabled() -> bool {
std::io::stdout().is_terminal()
&& std::env::var_os("NO_COLOR").is_none()
&& std::env::var("TERM").map_or(true, |t| t != "dumb")
}
macro_rules! paint {
($enabled:expr, $code:expr, $val:expr) => {
if $enabled {
format!("\x1b[{}m{}\x1b[0m", $code, $val)
} else {
$val.to_string()
}
};
}
#[derive(Debug, Parser)]
#[command(name = "oxide-sloc", version)]
#[command(about = "Cross-platform source line analysis tool")]
#[command(
long_about = "Cross-platform source line analysis tool.\n\nRun without arguments to start the web UI on http://127.0.0.1:4317."
)]
struct Cli {
#[command(subcommand)]
command: Option<Commands>,
}
#[derive(Debug, Subcommand)]
enum Commands {
Analyze(Box<AnalyzeArgs>),
Report(ReportArgs),
Diff(DiffArgs),
Serve(ServeArgs),
Init(InitArgs),
Validate(ValidateArgs),
Send(Box<SendArgs>),
GitScan(GitScanArgs),
GitCompare(GitCompareArgs),
Watch(WatchArgs),
#[command(name = "pr-comment")]
PrComment(PrCommentArgs),
}
#[allow(clippy::struct_excessive_bools)]
#[derive(Debug, Args)]
struct AnalyzeArgs {
#[arg(value_name = "PATH")]
paths: Vec<PathBuf>,
#[arg(long)]
config: Option<PathBuf>,
#[arg(long, short = 'j', value_name = "PATH")]
json_out: Option<PathBuf>,
#[arg(long, short = 'H', value_name = "PATH")]
html_out: Option<PathBuf>,
#[arg(long, value_name = "PATH")]
pdf_out: Option<PathBuf>,
#[arg(long, short = 'c', value_name = "PATH")]
csv_out: Option<PathBuf>,
#[arg(long, short = 'x', value_name = "PATH")]
xlsx_out: Option<PathBuf>,
#[arg(long)]
open: bool,
#[arg(long, short = 'q')]
quiet: bool,
#[arg(long)]
fail_on_warnings: bool,
#[arg(long, value_name = "N")]
fail_below: Option<u64>,
#[arg(long)]
mixed_line_policy: Option<MixedLinePolicy>,
#[arg(long)]
python_docstrings_as_code: bool,
#[arg(long)]
continuation_line_policy: Option<ContinuationLinePolicy>,
#[arg(long)]
blank_in_block_comment_policy: Option<BlankInBlockCommentPolicy>,
#[arg(long)]
no_count_compiler_directives: bool,
#[arg(long)]
no_ignore_files: bool,
#[arg(long)]
follow_symlinks: bool,
#[arg(long, value_name = "PATTERN")]
include_glob: Vec<String>,
#[arg(long, value_name = "PATTERN")]
exclude_glob: Vec<String>,
#[arg(long, value_name = "LANG")]
enabled_language: Vec<String>,
#[arg(long, value_name = "TITLE")]
report_title: Option<String>,
#[arg(long)]
per_file: bool,
#[arg(long)]
plain: bool,
#[arg(long)]
submodule_breakdown: bool,
#[arg(long, value_name = "NAME")]
profile: Option<String>,
#[arg(long)]
fail_on_budget: bool,
#[arg(long, value_name = "NAME")]
set_baseline: Option<String>,
#[arg(long, value_name = "NAME")]
fail_above_baseline: Option<String>,
#[arg(long, value_name = "PCT")]
max_delta_pct: Option<f64>,
#[arg(long, value_name = "FILE")]
coverage_file: Option<PathBuf>,
}
#[derive(Debug, Args)]
struct ReportArgs {
#[arg(value_name = "RESULT_JSON")]
input: PathBuf,
#[arg(long, short = 'H', value_name = "PATH")]
html_out: Option<PathBuf>,
#[arg(long, value_name = "PATH")]
pdf_out: Option<PathBuf>,
#[arg(long, short = 'c', value_name = "PATH")]
csv_out: Option<PathBuf>,
#[arg(long, short = 'x', value_name = "PATH")]
xlsx_out: Option<PathBuf>,
#[arg(long)]
open: bool,
}
#[derive(Debug, Args)]
struct DiffArgs {
#[arg(value_name = "BASELINE_JSON")]
baseline: PathBuf,
#[arg(value_name = "CURRENT_JSON")]
current: PathBuf,
#[arg(long, short = 'j', value_name = "PATH")]
json_out: Option<PathBuf>,
#[arg(long, short = 'c', value_name = "PATH")]
csv_out: Option<PathBuf>,
#[arg(long, short = 'x', value_name = "PATH")]
xlsx_out: Option<PathBuf>,
#[arg(long)]
plain: bool,
#[arg(long, short = 'q')]
quiet: bool,
}
#[derive(Debug, Args)]
struct ServeArgs {
#[arg(long)]
config: Option<PathBuf>,
#[arg(long, value_name = "ADDR")]
bind: Option<String>,
#[arg(long)]
server: bool,
}
#[derive(Debug, Args)]
struct InitArgs {
#[arg(value_name = "PATH", default_value = ".oxide-sloc.toml")]
output: PathBuf,
#[arg(long)]
force: bool,
}
#[derive(Debug, Args)]
struct ValidateArgs {
#[arg(long, value_name = "PATH")]
config: Option<PathBuf>,
}
#[derive(Debug, Args)]
struct SendArgs {
#[arg(value_name = "RESULT_JSON")]
input: PathBuf,
#[arg(long, value_name = "EMAIL,...")]
smtp_to: Vec<String>,
#[arg(long, value_name = "EMAIL")]
smtp_from: Option<String>,
#[arg(long, value_name = "HOST", env = "SLOC_SMTP_HOST")]
smtp_host: Option<String>,
#[arg(long, value_name = "PORT", default_value = "587")]
smtp_port: u16,
#[arg(long, value_name = "USER", env = "SLOC_SMTP_USER")]
smtp_user: Option<String>,
#[arg(long, value_name = "PASS", env = "SLOC_SMTP_PASS")]
smtp_pass: Option<String>,
#[arg(long, value_name = "URL")]
webhook_url: Vec<String>,
#[arg(long, value_name = "TOKEN", env = "SLOC_WEBHOOK_TOKEN")]
webhook_token: Option<String>,
#[arg(long, value_name = "URL")]
notify_teams: Vec<String>,
#[arg(long, value_name = "URL")]
report_url: Option<String>,
#[arg(long, value_name = "URL", env = "SLOC_CONFLUENCE_URL")]
confluence_url: Option<String>,
#[arg(long, value_name = "USER", env = "SLOC_CONFLUENCE_USER")]
confluence_username: Option<String>,
#[arg(long, value_name = "TOKEN", env = "SLOC_CONFLUENCE_TOKEN")]
confluence_token: Option<String>,
#[arg(long, value_name = "KEY", env = "SLOC_CONFLUENCE_SPACE")]
confluence_space: Option<String>,
#[arg(long, value_name = "ID")]
confluence_parent_id: Option<String>,
#[arg(long, value_name = "TITLE")]
confluence_page_title: Option<String>,
#[arg(long, value_name = "URL")]
confluence_report_url: Option<String>,
}
#[derive(Debug, Clone, clap::ValueEnum)]
enum VcsProvider {
Github,
Gitlab,
}
#[derive(Debug, Args)]
struct PrCommentArgs {
#[arg(value_name = "CURRENT_JSON")]
current: PathBuf,
#[arg(long, value_name = "BASELINE_JSON")]
baseline: Option<PathBuf>,
#[arg(long, value_enum, default_value = "github")]
provider: VcsProvider,
#[arg(long, value_name = "URL", env = "SLOC_VCS_API_URL")]
api_url: Option<String>,
#[arg(long, value_name = "REPO", env = "SLOC_VCS_REPO")]
repo: String,
#[arg(long, value_name = "NUMBER", env = "SLOC_PR_NUMBER")]
pr_number: u64,
#[arg(long, value_name = "TOKEN", env = "SLOC_VCS_TOKEN")]
token: String,
#[arg(long, value_name = "URL")]
report_url: Option<String>,
}
#[derive(Debug, Args)]
struct GitScanArgs {
#[arg(value_name = "REPO")]
repo: String,
#[arg(long, default_value = "HEAD", value_name = "REF")]
git_ref: String,
#[arg(long, value_name = "DIR")]
clones_dir: Option<PathBuf>,
#[arg(long, short = 'j', value_name = "PATH")]
json_out: Option<PathBuf>,
#[arg(long, short = 'H', value_name = "PATH")]
html_out: Option<PathBuf>,
#[arg(long, short = 'c', value_name = "PATH")]
csv_out: Option<PathBuf>,
#[arg(long)]
plain: bool,
#[arg(long, short = 'q')]
quiet: bool,
}
#[derive(Debug, Args)]
struct GitCompareArgs {
#[arg(value_name = "REPO")]
repo: String,
#[arg(value_name = "BASELINE_REF")]
baseline_ref: String,
#[arg(value_name = "CURRENT_REF")]
current_ref: String,
#[arg(long, value_name = "DIR")]
clones_dir: Option<PathBuf>,
#[arg(long, short = 'j', value_name = "PATH")]
json_out: Option<PathBuf>,
#[arg(long, short = 'c', value_name = "PATH")]
csv_out: Option<PathBuf>,
#[arg(long)]
plain: bool,
#[arg(long, short = 'q')]
quiet: bool,
}
#[derive(Debug, Args)]
struct WatchArgs {
#[arg(value_name = "REPO")]
repo: String,
#[arg(long, default_value = "main", value_name = "BRANCH")]
branch: String,
#[arg(long, default_value = "300", value_name = "SECS")]
interval: u64,
#[arg(long, value_name = "DIR")]
clones_dir: Option<PathBuf>,
#[arg(long, value_name = "DIR")]
output_dir: Option<PathBuf>,
#[arg(long, short = 'q')]
quiet: bool,
}
#[tokio::main]
async fn main() -> Result<()> {
tracing_subscriber::fmt()
.with_env_filter(
EnvFilter::try_from_default_env().unwrap_or_else(|_| {
EnvFilter::new("warn,headless_chrome::browser::transport=error")
}),
)
.init();
let cli = Cli::parse();
match cli.command.unwrap_or(Commands::Serve(ServeArgs {
config: None,
bind: None,
server: false,
})) {
Commands::Analyze(args) => run_analyze(*args).await,
Commands::Report(args) => run_report(&args),
Commands::Diff(args) => run_diff(&args),
Commands::Serve(args) => run_serve(args).await,
Commands::Init(args) => run_init(&args),
Commands::Validate(args) => run_validate(&args),
Commands::Send(args) => run_send(*args).await,
Commands::GitScan(args) => run_git_scan(args).await,
Commands::GitCompare(args) => run_git_compare(args),
Commands::Watch(args) => run_watch(args).await,
Commands::PrComment(args) => run_pr_comment(args).await,
}
}
fn log_written(path: &Path, quiet: bool) {
if !quiet {
eprintln!("wrote {}", path.display());
}
}
fn write_outputs(run: &AnalysisRun, args: &AnalyzeArgs, quiet: bool) -> Result<()> {
if let Some(path) = &args.json_out {
write_json(run, path)?;
log_written(path, quiet);
}
if let Some(path) = &args.html_out {
write_html(run, path)?;
log_written(path, quiet);
if args.open {
open_path(path);
}
}
if let Some(path) = &args.pdf_out {
let html_for_pdf = ensure_html_for_pdf(run, args.html_out.as_deref(), path)?;
write_pdf_from_html(&html_for_pdf, path)?;
log_written(path, quiet);
}
if let Some(path) = &args.csv_out {
write_csv(run, path)?;
log_written(path, quiet);
}
if let Some(path) = &args.xlsx_out {
write_xlsx(run, path)?;
log_written(path, quiet);
}
Ok(())
}
fn check_exit_conditions(
run: &AnalysisRun,
fail_on_warnings: bool,
fail_below: Option<u64>,
fail_on_budget: bool,
) {
if fail_on_warnings && !run.warnings.is_empty() {
eprintln!(
"error: {} warning(s) found — failing due to --fail-on-warnings",
run.warnings.len()
);
std::process::exit(2);
}
if let Some(threshold) = fail_below {
if run.summary_totals.code_lines < threshold {
eprintln!(
"error: code lines ({}) below threshold {} (--fail-below)",
run.summary_totals.code_lines, threshold
);
std::process::exit(3);
}
}
if fail_on_budget {
if let Some(budget) = &run.effective_configuration.analysis.budget {
check_budget(run, budget);
}
}
}
#[allow(clippy::useless_let_if_seq)]
fn check_budget(run: &AnalysisRun, budget: &sloc_config::BudgetConfig) {
let mut violated = false;
if budget.total_max > 0 && run.summary_totals.code_lines > budget.total_max {
eprintln!(
"error: budget exceeded — total code lines {} > limit {} (--fail-on-budget)",
run.summary_totals.code_lines, budget.total_max
);
violated = true;
}
for lang_row in &run.totals_by_language {
let key = lang_row.language.display_name().to_ascii_lowercase();
if let Some(&limit) = budget.per_language.get(&key) {
if lang_row.code_lines > limit {
eprintln!(
"error: budget exceeded — {} code lines {} > limit {} (--fail-on-budget)",
lang_row.language.display_name(),
lang_row.code_lines,
limit
);
violated = true;
}
}
}
if violated {
std::process::exit(4);
}
}
async fn run_analyze(args: AnalyzeArgs) -> Result<()> {
let config = resolve_analyze_config(&args)?;
let quiet = args.quiet;
let run = tokio::task::spawn_blocking(move || analyze(&config, "analyze", None))
.await
.context("analysis task failed to join")??;
if !quiet {
print_summary(&run, args.per_file, args.plain);
}
write_outputs(&run, &args, quiet)?;
if let Some(name) = &args.set_baseline {
let baselines_path = resolve_baselines_path();
let mut store = BaselineStore::load(&baselines_path);
store.set(BaselineEntry {
name: name.clone(),
saved_at: chrono::Utc::now(),
run_id: run.tool.run_id.clone(),
summary: sloc_core::ScanSummarySnapshot {
files_analyzed: run.summary_totals.files_analyzed,
files_skipped: run.summary_totals.files_skipped,
code_lines: run.summary_totals.code_lines,
comment_lines: run.summary_totals.comment_lines,
blank_lines: run.summary_totals.blank_lines,
total_physical_lines: run.summary_totals.total_physical_lines,
..Default::default()
},
json_path: args.json_out.clone(),
});
store.save(&baselines_path)?;
if !quiet {
eprintln!("baseline '{}' saved → {}", name, baselines_path.display());
}
}
check_exit_conditions(
&run,
args.fail_on_warnings,
args.fail_below,
args.fail_on_budget,
);
if let Some(baseline_name) = &args.fail_above_baseline {
let baselines_path = resolve_baselines_path();
let store = BaselineStore::load(&baselines_path);
let result = check_against_baseline(
&store,
baseline_name,
run.summary_totals.code_lines,
args.max_delta_pct,
)?;
result.print_summary();
if result.exceeded {
std::process::exit(5);
}
}
Ok(())
}
fn run_report(args: &ReportArgs) -> Result<()> {
let run = read_json(&args.input)?;
if args.html_out.is_none()
&& args.pdf_out.is_none()
&& args.csv_out.is_none()
&& args.xlsx_out.is_none()
{
anyhow::bail!("provide at least one of --html-out, --pdf-out, --csv-out, --xlsx-out");
}
if let Some(path) = &args.html_out {
write_html(&run, path)?;
eprintln!("wrote {}", path.display());
if args.open {
open_path(path);
}
}
if let Some(path) = &args.pdf_out {
let html_for_pdf = ensure_html_for_pdf(&run, args.html_out.as_deref(), path)?;
write_pdf_from_html(&html_for_pdf, path)?;
eprintln!("wrote {}", path.display());
}
if let Some(path) = &args.csv_out {
write_csv(&run, path)?;
eprintln!("wrote {}", path.display());
}
if let Some(path) = &args.xlsx_out {
write_xlsx(&run, path)?;
eprintln!("wrote {}", path.display());
}
Ok(())
}
fn run_diff(args: &DiffArgs) -> Result<()> {
let baseline = read_json(&args.baseline)
.with_context(|| format!("failed to read baseline: {}", args.baseline.display()))?;
let current = read_json(&args.current)
.with_context(|| format!("failed to read current: {}", args.current.display()))?;
let comparison = compute_delta(&baseline, ¤t);
if !args.quiet {
print_diff_summary(&comparison, args.plain);
}
if let Some(path) = &args.json_out {
let json = serde_json::to_string_pretty(&comparison)
.context("failed to serialize diff to JSON")?;
std::fs::write(path, json)
.with_context(|| format!("failed to write {}", path.display()))?;
eprintln!("wrote {}", path.display());
}
if let Some(path) = &args.csv_out {
write_diff_csv(&comparison, path)?;
eprintln!("wrote {}", path.display());
}
if let Some(path) = &args.xlsx_out {
write_diff_xlsx(&comparison, path)?;
eprintln!("wrote {}", path.display());
}
Ok(())
}
async fn run_serve(args: ServeArgs) -> Result<()> {
let mut config = load_base_config(args.config.as_deref())?;
let bind_env = std::env::var("SLOC_BIND").ok().filter(|s| !s.is_empty());
if args.server {
config.web.server_mode = true;
if args.bind.is_none()
&& bind_env.is_none()
&& config.web.bind_address.starts_with("127.0.0.1")
{
config.web.bind_address = "0.0.0.0:4317".into();
}
}
if let Some(bind) = bind_env {
config.web.bind_address = bind;
}
if let Some(bind) = args.bind {
config.web.bind_address = bind;
}
if let Ok(roots_env) = std::env::var("SLOC_ALLOWED_ROOTS") {
let roots: Vec<std::path::PathBuf> = roots_env
.split(':')
.filter(|s| !s.is_empty())
.map(std::path::PathBuf::from)
.collect();
if !roots.is_empty() {
config.discovery.allowed_scan_roots = roots;
}
}
sloc_web::serve(config).await
}
fn run_init(args: &InitArgs) -> Result<()> {
if args.output.exists() && !args.force {
anyhow::bail!(
"{} already exists; use --force to overwrite",
args.output.display()
);
}
let content = r##"# oxide-sloc configuration
# Generated by `oxide-sloc init`. Uncomment and adjust as needed.
# Full reference: https://github.com/oxide-sloc/oxide-sloc
[discovery]
# root_paths = ["."]
# include_globs = []
# exclude_globs = []
# excluded_directories = [".git", "node_modules", "target", "vendor"]
# honor_ignore_files = true
# ignore_hidden_files = true
# follow_symlinks = false
# max_file_size_bytes = 2097152 # 2 MB
# submodule_breakdown = false
[analysis]
# enabled_languages = [] # empty = all 41 supported languages
# mixed_line_policy = "code-only" # code-only | code-and-comment | comment-only | separate-mixed-category
# python_docstrings_as_comments = true
# generated_file_detection = true
# minified_file_detection = true
# vendor_directory_detection = true
# include_lockfiles = false
#
# IEEE 1045-1992 counting parameters:
# continuation_line_policy = "each-physical-line" # each-physical-line | collapse-to-logical
# blank_in_block_comment_policy = "count-as-comment" # count-as-comment | count-as-blank
# count_compiler_directives = true # false = exclude #include/#define from code SLOC (C/C++/ObjC)
# Override extension → language mappings (e.g. treat .h as C++)
# [analysis.extension_overrides]
# "h" = "cpp"
# SLOC budget thresholds — fail CI with --fail-on-budget if exceeded.
# [analysis.budget]
# total_max = 100000 # maximum code lines across all languages (0 = unlimited)
# rust = 60000 # per-language ceiling; key = lowercase display name
# typescript = 30000
[reporting]
# report_title = "OxideSLOC Report"
# theme = "auto" # auto | light | dark
# include_summary_charts = true
# include_skipped_files_section = true
# include_warnings_section = true
#
# Team branding (all optional):
# company_name = "Acme Corp" # replaces "OxideSLOC" in the report header
# logo_path = "assets/logo.png" # PNG or SVG to embed in place of the default logo
# accent_color = "#3b82f6" # primary accent colour (#RGB or #RRGGBB)
[web]
# bind_address = "127.0.0.1:4317"
# server_mode = false
# ── Named profiles ───────────────────────────────────────────────────────────
# Use `oxide-sloc analyze --profile frontend` to apply a profile.
# Each profile overrides its entire config section when selected.
#
# [profile.frontend]
# [profile.frontend.discovery]
# root_paths = ["frontend"]
# exclude_globs = ["**/node_modules/**", "**/dist/**"]
# [profile.frontend.analysis]
# enabled_languages = ["TypeScript", "JavaScript", "CSS", "HTML"]
# [profile.frontend.reporting]
# report_title = "Frontend SLOC Report"
#
# [profile.backend]
# [profile.backend.discovery]
# root_paths = ["backend", "shared"]
# [profile.backend.analysis]
# enabled_languages = ["Rust", "Python", "SQL"]
"##;
if let Some(parent) = args.output.parent() {
if !parent.as_os_str().is_empty() {
std::fs::create_dir_all(parent)
.with_context(|| format!("failed to create directory {}", parent.display()))?;
}
}
std::fs::write(&args.output, content)
.with_context(|| format!("failed to write {}", args.output.display()))?;
eprintln!("created {}", args.output.display());
Ok(())
}
fn run_validate(
args: &ValidateArgs,
) -> Result<()> {
let config_path = args
.config
.as_deref()
.unwrap_or_else(|| std::path::Path::new(".oxide-sloc.toml"));
if !config_path.exists() {
anyhow::bail!(
"config file not found: {} (use `oxide-sloc init` to create one)",
config_path.display()
);
}
let config = sloc_config::AppConfig::load_from_file(config_path)
.with_context(|| format!("failed to load {}", config_path.display()))?;
let mut errors: Vec<String> = Vec::new();
for path in &config.discovery.root_paths {
if !path.exists() {
errors.push(format!(
"discovery.root_paths: '{}' does not exist",
path.display()
));
}
}
for path in &config.discovery.allowed_scan_roots {
if !path.exists() {
errors.push(format!(
"discovery.allowed_scan_roots: '{}' does not exist",
path.display()
));
}
}
for pattern in &config.discovery.include_globs {
if globset::Glob::new(pattern).is_err() {
errors.push(format!(
"discovery.include_globs: invalid glob pattern '{pattern}'"
));
}
}
for pattern in &config.discovery.exclude_globs {
if globset::Glob::new(pattern).is_err() {
errors.push(format!(
"discovery.exclude_globs: invalid glob pattern '{pattern}'"
));
}
}
if let Some(logo) = &config.reporting.logo_path {
if !logo.exists() {
errors.push(format!(
"reporting.logo_path: '{}' does not exist",
logo.display()
));
}
}
if let Some(color) = &config.reporting.accent_color {
if sloc_config::validate_hex_color(color).is_err() {
errors.push(format!(
"reporting.accent_color: '{color}' is not a valid hex colour (use #RGB or #RRGGBB)"
));
}
}
for name in config.profiles.keys() {
if name.trim().is_empty() {
errors.push("profiles: profile name must not be empty".into());
}
}
if errors.is_empty() {
println!("config valid: {}", config_path.display());
let profile_count = config.profiles.len();
if profile_count > 0 {
println!(
" {} profile(s): {}",
profile_count,
config
.profiles
.keys()
.cloned()
.collect::<Vec<_>>()
.join(", ")
);
}
Ok(())
} else {
for e in &errors {
eprintln!("error: {e}");
}
anyhow::bail!("{} validation error(s) found", errors.len())
}
}
async fn run_send(args: SendArgs) -> Result<()> {
if args.smtp_to.is_empty()
&& args.webhook_url.is_empty()
&& args.notify_teams.is_empty()
&& args.confluence_url.is_none()
{
anyhow::bail!(
"provide at least one of --smtp-to, --webhook-url, --notify-teams, or --confluence-url"
);
}
if args.smtp_pass.is_some() && std::env::var("SLOC_SMTP_PASS").is_err() {
eprintln!(
"WARNING: --smtp-pass exposes credentials in process listings. \
Use the SLOC_SMTP_PASS environment variable instead."
);
}
let run = read_json(&args.input)?;
if !args.smtp_to.is_empty() {
send_smtp(&args, &run).await?;
}
for url in &args.webhook_url {
send_webhook(url, args.webhook_token.as_deref(), &run).await?;
}
for url in &args.notify_teams {
send_teams_card(url, &run, args.report_url.as_deref()).await?;
}
if args.confluence_url.is_some() {
send_confluence(&args, &run).await?;
}
println!("send: all deliveries completed");
Ok(())
}
async fn send_smtp(args: &SendArgs, run: &AnalysisRun) -> Result<()> {
let host = args.smtp_host.as_deref().ok_or_else(|| {
anyhow::anyhow!("--smtp-host (or SLOC_SMTP_HOST) is required for SMTP delivery")
})?;
let from = args
.smtp_from
.as_deref()
.ok_or_else(|| anyhow::anyhow!("--smtp-from is required for SMTP delivery"))?;
let html_body = render_html(run)?;
let plain_body = format!(
"oxide-sloc report: {} files analyzed, {} code lines\n\nSee attached HTML for the full report.",
run.summary_totals.files_analyzed, run.summary_totals.code_lines,
);
for recipient in &args.smtp_to {
let msg = Message::builder()
.from(
from.parse()
.with_context(|| format!("invalid from address: {from}"))?,
)
.to(recipient
.parse()
.with_context(|| format!("invalid recipient address: {recipient}"))?)
.subject(format!(
"oxide-sloc report — {}",
run.effective_configuration.reporting.report_title
))
.multipart(
MultiPart::alternative()
.singlepart(
SinglePart::builder()
.header(ContentType::TEXT_PLAIN)
.body(plain_body.clone()),
)
.singlepart(
SinglePart::builder()
.header(ContentType::TEXT_HTML)
.body(html_body.clone()),
),
)
.context("failed to build email message")?;
let tls_params = lettre::transport::smtp::client::TlsParameters::builder(host.to_string())
.dangerous_accept_invalid_certs(false)
.build()
.with_context(|| format!("failed to build TLS parameters for {host}"))?;
let mut builder = AsyncSmtpTransport::<Tokio1Executor>::builder_dangerous(host)
.tls(lettre::transport::smtp::client::Tls::Required(tls_params))
.port(args.smtp_port);
if let (Some(user), Some(pass)) = (args.smtp_user.as_deref(), args.smtp_pass.as_deref()) {
builder = builder.credentials(Credentials::new(user.to_owned(), pass.to_owned()));
}
let transport = builder.build();
transport
.send(msg)
.await
.with_context(|| format!("SMTP delivery to {recipient} failed"))?;
println!("send: emailed {recipient}");
}
Ok(())
}
const BLOCKED_WEBHOOK_HOSTS: &[&str] = &[
"169.254.169.254",
"metadata.google.internal",
"metadata.internal",
"instance-data",
];
const fn is_ip_blocked(ip: std::net::IpAddr) -> bool {
match ip {
std::net::IpAddr::V4(v4) => {
v4.is_loopback()
|| v4.is_private()
|| v4.is_link_local()
|| v4.is_broadcast()
|| v4.is_unspecified()
}
std::net::IpAddr::V6(v6) => {
v6.is_loopback()
|| v6.is_unspecified()
|| v6.is_multicast()
|| (v6.segments()[0] & 0xfe00) == 0xfc00 || (v6.segments()[0] & 0xffc0) == 0xfe80 }
}
}
fn validate_webhook_url(raw: &str) -> Result<()> {
let parsed = reqwest::Url::parse(raw).with_context(|| format!("invalid webhook URL: {raw}"))?;
if parsed.scheme() != "https" {
anyhow::bail!(
"webhook URL must use HTTPS (got scheme \"{}\")",
parsed.scheme()
);
}
let host = parsed
.host_str()
.ok_or_else(|| anyhow::anyhow!("webhook URL has no host"))?;
if BLOCKED_WEBHOOK_HOSTS.contains(&host) || host.to_ascii_lowercase().ends_with(".local") {
anyhow::bail!("webhook URL host is blocked: {host}");
}
if let Ok(ip) = host.parse::<std::net::IpAddr>() {
if is_ip_blocked(ip) {
anyhow::bail!("webhook URL resolves to a blocked IP address: {ip}");
}
}
Ok(())
}
async fn send_webhook(url: &str, token: Option<&str>, run: &AnalysisRun) -> Result<()> {
validate_webhook_url(url)?;
let client = reqwest::Client::new();
let mut req = client.post(url).json(run);
if let Some(t) = token {
req = req.header("Authorization", format!("Bearer {t}"));
}
let resp = req
.send()
.await
.with_context(|| format!("webhook POST to {url} failed"))?;
if !resp.status().is_success() {
anyhow::bail!("webhook {url} returned HTTP {}", resp.status());
}
println!("send: posted to {url}");
Ok(())
}
fn fmt_thousands(n: u64) -> String {
let s = n.to_string();
let bytes = s.as_bytes();
let mut out = String::with_capacity(s.len() + s.len() / 3);
for (i, &b) in bytes.iter().enumerate() {
let pos_from_right = bytes.len() - 1 - i;
if i > 0 && pos_from_right % 3 == 2 {
out.push(',');
}
out.push(b as char);
}
out
}
async fn send_teams_card(url: &str, run: &AnalysisRun, report_url: Option<&str>) -> Result<()> {
validate_webhook_url(url)?;
let totals = &run.summary_totals;
let title = &run.effective_configuration.reporting.report_title;
let lang_lines: String = run
.totals_by_language
.iter()
.take(5)
.map(|l| {
format!(
"- **{}**: {} code lines ({} files)",
l.language.display_name(),
fmt_thousands(l.code_lines),
l.files
)
})
.collect::<Vec<_>>()
.join(" \n");
let facts = serde_json::json!([
{ "title": "Files Analyzed", "value": totals.files_analyzed.to_string() },
{ "title": "Code Lines", "value": fmt_thousands(totals.code_lines) },
{ "title": "Comment Lines", "value": fmt_thousands(totals.comment_lines) },
{ "title": "Blank Lines", "value": fmt_thousands(totals.blank_lines) },
{ "title": "Total Physical", "value": fmt_thousands(totals.total_physical_lines) },
]);
let mut body_items = vec![
serde_json::json!({
"type": "TextBlock",
"text": title,
"weight": "Bolder",
"size": "Medium"
}),
serde_json::json!({
"type": "FactSet",
"facts": facts
}),
];
if !lang_lines.is_empty() {
body_items.push(serde_json::json!({
"type": "TextBlock",
"text": "**Top Languages**",
"weight": "Bolder",
"spacing": "Medium"
}));
body_items.push(serde_json::json!({
"type": "TextBlock",
"text": lang_lines,
"wrap": true
}));
}
let mut actions: Vec<serde_json::Value> = Vec::new();
if let Some(link) = report_url {
actions.push(serde_json::json!({
"type": "Action.OpenUrl",
"title": "View Full Report",
"url": link
}));
}
let card = serde_json::json!({
"type": "message",
"attachments": [{
"contentType": "application/vnd.microsoft.card.adaptive",
"content": {
"$schema": "http://adaptivecards.io/schemas/adaptive-card.json",
"type": "AdaptiveCard",
"version": "1.4",
"body": body_items,
"actions": actions
}
}]
});
let client = reqwest::Client::new();
let resp = client
.post(url)
.json(&card)
.send()
.await
.with_context(|| format!("Teams webhook POST to {url} failed"))?;
if !resp.status().is_success() {
anyhow::bail!("Teams webhook {url} returned HTTP {}", resp.status());
}
println!("send: posted Teams card to {url}");
Ok(())
}
fn load_base_config(config_path: Option<&Path>) -> Result<AppConfig> {
config_path.map_or_else(|| Ok(AppConfig::default()), AppConfig::load_from_file)
}
fn resolve_analyze_config(args: &AnalyzeArgs) -> Result<AppConfig> {
let mut config = load_base_config(args.config.as_deref())?;
apply_discovery_cli_args(&mut config, args);
apply_analysis_cli_args(&mut config, args);
if let Some(title) = &args.report_title {
config.reporting.report_title.clone_from(title);
}
if let Some(profile) = &args.profile {
config.apply_profile(profile)?;
}
config.validate()?;
if config.discovery.root_paths.is_empty() {
anyhow::bail!("provide at least one PATH or configure discovery.root_paths");
}
Ok(config)
}
fn apply_discovery_cli_args(config: &mut AppConfig, args: &AnalyzeArgs) {
if !args.paths.is_empty() {
config.discovery.root_paths.clone_from(&args.paths);
}
if !args.include_glob.is_empty() {
config
.discovery
.include_globs
.clone_from(&args.include_glob);
}
if !args.exclude_glob.is_empty() {
config
.discovery
.exclude_globs
.clone_from(&args.exclude_glob);
}
if args.no_ignore_files {
config.discovery.honor_ignore_files = false;
}
if args.follow_symlinks {
config.discovery.follow_symlinks = true;
}
if args.submodule_breakdown {
config.discovery.submodule_breakdown = true;
}
}
fn apply_analysis_cli_args(config: &mut AppConfig, args: &AnalyzeArgs) {
if !args.enabled_language.is_empty() {
config
.analysis
.enabled_languages
.clone_from(&args.enabled_language);
}
if let Some(policy) = args.mixed_line_policy {
config.analysis.mixed_line_policy = policy;
}
if args.python_docstrings_as_code {
config.analysis.python_docstrings_as_comments = false;
}
if let Some(policy) = args.continuation_line_policy {
config.analysis.continuation_line_policy = policy;
}
if let Some(policy) = args.blank_in_block_comment_policy {
config.analysis.blank_in_block_comment_policy = policy;
}
if args.no_count_compiler_directives {
config.analysis.count_compiler_directives = false;
}
if let Some(cov) = &args.coverage_file {
config.analysis.coverage_file = Some(cov.clone());
}
}
fn ensure_html_for_pdf(
run: &AnalysisRun,
html_out: Option<&Path>,
pdf_out: &Path,
) -> Result<PathBuf> {
if let Some(html_out) = html_out {
return Ok(html_out.to_path_buf());
}
let html_path = pdf_out.with_extension("html");
write_html(run, &html_path)?;
Ok(html_path)
}
fn print_plain_summary(run: &AnalysisRun) {
println!("files_analyzed={}", run.summary_totals.files_analyzed);
println!("files_skipped={}", run.summary_totals.files_skipped);
println!("physical_lines={}", run.summary_totals.total_physical_lines);
println!("code_lines={}", run.summary_totals.code_lines);
println!("comment_lines={}", run.summary_totals.comment_lines);
println!("blank_lines={}", run.summary_totals.blank_lines);
println!(
"mixed_lines_separate={}",
run.summary_totals.mixed_lines_separate
);
}
fn print_totals_header(run: &AnalysisRun, col: bool) {
println!("{}", paint!(col, "1", "SLOC Analysis Complete"));
println!(
" {} {}",
paint!(col, "36", "Files analyzed :"),
paint!(col, "32", run.summary_totals.files_analyzed)
);
println!(
" {} {}",
paint!(col, "36", "Files skipped :"),
run.summary_totals.files_skipped
);
println!(
" {} {}",
paint!(col, "36", "Physical lines :"),
run.summary_totals.total_physical_lines
);
println!(
" {} {}",
paint!(col, "36", "Code lines :"),
paint!(col, "32;1", run.summary_totals.code_lines)
);
println!(
" {} {}",
paint!(col, "36", "Comment lines :"),
run.summary_totals.comment_lines
);
println!(
" {} {}",
paint!(col, "36", "Blank lines :"),
run.summary_totals.blank_lines
);
if run.summary_totals.mixed_lines_separate > 0 {
println!(
" {} {}",
paint!(col, "36", "Mixed separate :"),
run.summary_totals.mixed_lines_separate
);
}
}
fn print_language_table(run: &AnalysisRun, col: bool) {
if run.totals_by_language.is_empty() {
return;
}
println!();
println!("{}", paint!(col, "1", "By Language"));
println!(
" {:<14} {:>6} {:>8} {:>9} {:>7} {:>8}",
paint!(col, "2", "Language"),
paint!(col, "2", "Files"),
paint!(col, "2", "Code"),
paint!(col, "2", "Comments"),
paint!(col, "2", "Blank"),
paint!(col, "2", "Total"),
);
for lang in &run.totals_by_language {
println!(
" {:<14} {:>6} {:>8} {:>9} {:>7} {:>8}",
lang.language.display_name(),
lang.files,
lang.code_lines,
lang.comment_lines,
lang.blank_lines,
lang.total_physical_lines,
);
}
}
fn print_per_file_table(run: &AnalysisRun, col: bool) {
if run.per_file_records.is_empty() {
return;
}
println!();
println!("{}", paint!(col, "1", "Per-File Detail"));
for file in &run.per_file_records {
let sub_tag = file
.submodule
.as_deref()
.map(|s| format!("[{s}] "))
.unwrap_or_default();
println!(
" {:<50} {:<14} code={:<6} comment={:<6} blank={:<6}",
truncate(&format!("{sub_tag}{}", file.relative_path), 50),
file.language
.map_or_else(|| "-".into(), |l| l.display_name().to_string()),
file.effective_counts.code_lines,
file.effective_counts.comment_lines,
file.effective_counts.blank_lines,
);
}
}
fn print_submodule_table(run: &AnalysisRun, col: bool) {
if run.submodule_summaries.is_empty() {
return;
}
println!();
println!("{}", paint!(col, "1", "By Submodule"));
for sub in &run.submodule_summaries {
println!(
" {:<30} files={:<4} code={:<6} comment={:<6} blank={:<6}",
truncate(&sub.name, 30),
sub.files_analyzed,
sub.code_lines,
sub.comment_lines,
sub.blank_lines,
);
}
}
fn print_summary(run: &AnalysisRun, per_file: bool, plain: bool) {
if plain {
print_plain_summary(run);
return;
}
let col = color_enabled();
print_totals_header(run, col);
print_language_table(run, col);
if per_file {
print_per_file_table(run, col);
}
print_submodule_table(run, col);
if !run.warnings.is_empty() {
println!();
println!(
" {} {}",
paint!(col, "33", "Warnings:"),
run.warnings.len()
);
}
}
fn fmt_delta(col: bool, v: i64) -> String {
match v.cmp(&0) {
std::cmp::Ordering::Greater => paint!(col, "32", format!("+{v}")),
std::cmp::Ordering::Less => paint!(col, "31", v.to_string()),
std::cmp::Ordering::Equal => paint!(col, "2", "0"),
}
}
fn print_diff_summary(cmp: &ScanComparison, plain: bool) {
let s = &cmp.summary;
if plain {
println!("baseline_run_id={}", s.baseline_run_id);
println!("current_run_id={}", s.current_run_id);
println!("files_added={}", cmp.files_added);
println!("files_removed={}", cmp.files_removed);
println!("files_modified={}", cmp.files_modified);
println!("files_unchanged={}", cmp.files_unchanged);
println!("code_lines_delta={}", s.code_lines_delta);
println!("comment_lines_delta={}", s.comment_lines_delta);
println!("blank_lines_delta={}", s.blank_lines_delta);
println!("total_lines_delta={}", s.total_lines_delta);
return;
}
let col = color_enabled();
println!("{}", paint!(col, "1", "SLOC Diff"));
println!(" Baseline : {}", s.baseline_run_id);
println!(" Current : {}", s.current_run_id);
println!();
println!(
" Files added={} removed={} modified={} unchanged={}",
paint!(col, "32", cmp.files_added),
paint!(col, "31", cmp.files_removed),
paint!(col, "33", cmp.files_modified),
paint!(col, "2", cmp.files_unchanged),
);
println!(" Code Δ : {}", fmt_delta(col, s.code_lines_delta));
println!(" Comment Δ: {}", fmt_delta(col, s.comment_lines_delta));
println!(" Blank Δ : {}", fmt_delta(col, s.blank_lines_delta));
println!(" Total Δ : {}", fmt_delta(col, s.total_lines_delta));
let changed: Vec<_> = cmp
.file_deltas
.iter()
.filter(|f| f.status != sloc_core::FileChangeStatus::Unchanged)
.take(20)
.collect();
if !changed.is_empty() {
println!();
println!("{}", paint!(col, "1", "Changed Files (top 20)"));
for f in changed {
let status_str = match f.status {
sloc_core::FileChangeStatus::Added => paint!(col, "32", "A"),
sloc_core::FileChangeStatus::Removed => paint!(col, "31", "D"),
sloc_core::FileChangeStatus::Modified => paint!(col, "33", "M"),
sloc_core::FileChangeStatus::Unchanged => paint!(col, "2", " "),
};
println!(
" {} {:<50} code {}",
status_str,
truncate(&f.relative_path, 50),
fmt_delta(col, f.code_delta),
);
}
}
}
fn truncate(input: &str, width: usize) -> String {
if input.len() <= width {
return input.to_string();
}
let keep = width.saturating_sub(1);
format!("{}…", &input[..keep])
}
fn open_path(path: &Path) {
#[cfg(target_os = "windows")]
{
let path_str = path.to_string_lossy();
let _ = std::process::Command::new("cmd")
.args(["/c", "start", "", path_str.as_ref()])
.spawn();
}
#[cfg(target_os = "macos")]
{
let _ = std::process::Command::new("open").arg(path).spawn();
}
#[cfg(not(any(target_os = "windows", target_os = "macos")))]
{
let _ = std::process::Command::new("xdg-open").arg(path).spawn();
}
}
fn write_diff_xlsx(cmp: &ScanComparison, path: &Path) -> Result<()> {
sloc_report::write_diff_xlsx(cmp, path)
}
fn build_confluence_auth(username: Option<&str>, token: &str) -> String {
use base64::Engine as _;
match username {
Some(u) if !u.is_empty() => {
let enc = base64::engine::general_purpose::STANDARD.encode(format!("{u}:{token}"));
format!("Basic {enc}")
}
_ => format!("Bearer {token}"),
}
}
async fn confluence_upsert_cloud(
client: &reqwest::Client,
base_url: &str,
auth: &str,
space: &str,
page_title: &str,
body_html: &str,
parent_id: Option<&str>,
) -> Result<()> {
let space_resp: serde_json::Value = client
.get(format!(
"{base_url}/wiki/api/v2/spaces?keys={space}&limit=1"
))
.header("Authorization", auth)
.header("Accept", "application/json")
.send()
.await
.context("Confluence space lookup")?
.json()
.await
.context("Confluence space response")?;
let space_id = space_resp["results"][0]["id"]
.as_str()
.ok_or_else(|| anyhow::anyhow!("Confluence space '{space}' not found"))?
.to_owned();
let enc_title = percent_encode(page_title);
let find_resp: serde_json::Value = client
.get(format!(
"{base_url}/wiki/api/v2/pages?spaceId={space_id}&title={enc_title}&limit=1&expand=version"
))
.header("Authorization", auth)
.header("Accept", "application/json")
.send()
.await?
.json()
.await?;
let existing_id = find_resp["results"][0]["id"].as_str().map(str::to_owned);
let existing_ver = find_resp["results"][0]["version"]["number"]
.as_u64()
.map(|v| u32::try_from(v).unwrap_or(u32::MAX));
if let (Some(page_id), Some(ver)) = (existing_id, existing_ver) {
let payload = serde_json::json!({
"version": { "number": ver + 1 },
"title": page_title,
"body": { "representation": "storage", "value": body_html }
});
let resp = client
.put(format!("{base_url}/wiki/api/v2/pages/{page_id}"))
.header("Authorization", auth)
.header("Content-Type", "application/json")
.json(&payload)
.send()
.await?;
if !resp.status().is_success() {
anyhow::bail!("Confluence update failed (HTTP {})", resp.status());
}
println!("send: updated Confluence page '{page_title}' (id: {page_id})");
} else {
let mut payload = serde_json::json!({
"spaceId": space_id,
"title": page_title,
"body": { "representation": "storage", "value": body_html }
});
if let Some(pid) = parent_id {
payload["parentId"] = serde_json::Value::String(pid.to_owned());
}
let resp = client
.post(format!("{base_url}/wiki/api/v2/pages"))
.header("Authorization", auth)
.header("Content-Type", "application/json")
.json(&payload)
.send()
.await?;
if !resp.status().is_success() {
let status = resp.status();
let body = resp.text().await.unwrap_or_default();
anyhow::bail!("Confluence create failed (HTTP {status}): {body}");
}
let created: serde_json::Value = resp.json().await?;
let new_id = created["id"].as_str().unwrap_or("?");
println!("send: created Confluence page '{page_title}' (id: {new_id})");
}
Ok(())
}
async fn confluence_upsert_server(
client: &reqwest::Client,
base_url: &str,
auth: &str,
space: &str,
page_title: &str,
body_html: &str,
parent_id: Option<&str>,
) -> Result<()> {
let enc_title = percent_encode(page_title);
let find_resp: serde_json::Value = client
.get(format!(
"{base_url}/rest/api/content?spaceKey={space}&title={enc_title}&type=page&expand=version&limit=1"
))
.header("Authorization", auth)
.header("Accept", "application/json")
.send()
.await?
.json()
.await?;
let existing_id = find_resp["results"][0]["id"].as_str().map(str::to_owned);
let existing_ver = find_resp["results"][0]["version"]["number"]
.as_u64()
.map(|v| u32::try_from(v).unwrap_or(u32::MAX));
if let (Some(page_id), Some(ver)) = (existing_id, existing_ver) {
let payload = serde_json::json!({
"version": { "number": ver + 1 },
"type": "page",
"title": page_title,
"space": { "key": space },
"body": { "storage": { "value": body_html, "representation": "storage" } }
});
let resp = client
.put(format!("{base_url}/rest/api/content/{page_id}"))
.header("Authorization", auth)
.header("Content-Type", "application/json")
.json(&payload)
.send()
.await?;
if !resp.status().is_success() {
anyhow::bail!("Confluence update failed (HTTP {})", resp.status());
}
println!("send: updated Confluence page '{page_title}' (id: {page_id})");
} else {
let mut payload = serde_json::json!({
"type": "page",
"space": { "key": space },
"title": page_title,
"body": { "storage": { "value": body_html, "representation": "storage" } }
});
if let Some(pid) = parent_id {
payload["ancestors"] = serde_json::json!([{ "id": pid }]);
}
let resp = client
.post(format!("{base_url}/rest/api/content"))
.header("Authorization", auth)
.header("Content-Type", "application/json")
.json(&payload)
.send()
.await?;
if !resp.status().is_success() {
let status = resp.status();
let body = resp.text().await.unwrap_or_default();
anyhow::bail!("Confluence create failed (HTTP {status}): {body}");
}
let created: serde_json::Value = resp.json().await?;
let new_id = created["id"].as_str().unwrap_or("?");
println!("send: created Confluence page '{page_title}' (id: {new_id})");
}
Ok(())
}
async fn send_confluence(args: &SendArgs, run: &AnalysisRun) -> Result<()> {
let base_url = args
.confluence_url
.as_deref()
.ok_or_else(|| anyhow::anyhow!("--confluence-url is required"))?
.trim_end_matches('/');
let token = args.confluence_token.as_deref().ok_or_else(|| {
anyhow::anyhow!("--confluence-token (or SLOC_CONFLUENCE_TOKEN) is required")
})?;
let space = args.confluence_space.as_deref().ok_or_else(|| {
anyhow::anyhow!("--confluence-space (or SLOC_CONFLUENCE_SPACE) is required")
})?;
let page_title = args
.confluence_page_title
.as_deref()
.unwrap_or(&run.effective_configuration.reporting.report_title);
let report_url = args
.confluence_report_url
.as_deref()
.or(args.report_url.as_deref());
let body_html = sloc_report::render_confluence_storage(run, report_url);
let auth = build_confluence_auth(args.confluence_username.as_deref(), token);
let client = reqwest::Client::new();
let parent_id = args.confluence_parent_id.as_deref();
if base_url.to_lowercase().contains(".atlassian.net") {
confluence_upsert_cloud(
&client, base_url, &auth, space, page_title, &body_html, parent_id,
)
.await
} else {
confluence_upsert_server(
&client, base_url, &auth, space, page_title, &body_html, parent_id,
)
.await
}
}
fn percent_encode(s: &str) -> String {
let mut out = String::with_capacity(s.len());
for b in s.bytes() {
match b {
b'A'..=b'Z' | b'a'..=b'z' | b'0'..=b'9' | b'-' | b'_' | b'.' | b'~' => {
out.push(b as char);
}
b' ' => out.push('+'),
_ => {
out.push('%');
write!(out, "{b:02X}").expect("write to String is infallible");
}
}
}
out
}
async fn run_pr_comment(args: PrCommentArgs) -> Result<()> {
let current = read_json(&args.current)
.with_context(|| format!("failed to read current: {}", args.current.display()))?;
let comparison = if let Some(baseline_path) = &args.baseline {
let baseline = read_json(baseline_path)
.with_context(|| format!("failed to read baseline: {}", baseline_path.display()))?;
Some(compute_delta(&baseline, ¤t))
} else {
None
};
let body = build_pr_comment_body(¤t, comparison.as_ref(), args.report_url.as_deref());
match args.provider {
VcsProvider::Github => {
post_github_comment(&args, &body).await?;
}
VcsProvider::Gitlab => {
post_gitlab_comment(&args, &body).await?;
}
}
println!("pr-comment: posted comment to PR #{}", args.pr_number);
Ok(())
}
fn build_pr_comment_body(
run: &AnalysisRun,
comparison: Option<&ScanComparison>,
report_url: Option<&str>,
) -> String {
let totals = &run.summary_totals;
let mut out = String::new();
out.push_str("## SLOC Report\n\n");
out.push_str("| Metric | Value |\n");
out.push_str("|--------|-------|\n");
writeln!(out, "| Files analyzed | {} |", totals.files_analyzed).expect("infallible");
writeln!(out, "| Code lines | {} |", fmt_thousands(totals.code_lines)).expect("infallible");
writeln!(
out,
"| Comment lines | {} |",
fmt_thousands(totals.comment_lines)
)
.expect("infallible");
writeln!(
out,
"| Blank lines | {} |",
fmt_thousands(totals.blank_lines)
)
.expect("infallible");
if let Some(cmp) = comparison {
let s = &cmp.summary;
let sign = |v: i64| {
if v >= 0 {
format!("+{v}")
} else {
v.to_string()
}
};
out.push_str("\n### Changes vs. Target Branch\n\n");
out.push_str("| | Delta |\n");
out.push_str("|--|-------|\n");
writeln!(out, "| Code Δ | {} |", sign(s.code_lines_delta)).expect("infallible");
writeln!(out, "| Comment Δ | {} |", sign(s.comment_lines_delta)).expect("infallible");
writeln!(out, "| Blank Δ | {} |", sign(s.blank_lines_delta)).expect("infallible");
writeln!(
out,
"| Files | +{} added / -{} removed / ~{} modified |",
cmp.files_added, cmp.files_removed, cmp.files_modified
)
.expect("infallible");
}
if !run.totals_by_language.is_empty() {
out.push_str("\n<details><summary>Language breakdown</summary>\n\n");
out.push_str("| Language | Files | Code | Comments | Blank |\n");
out.push_str("|----------|-------|------|----------|-------|\n");
for l in run.totals_by_language.iter().take(10) {
writeln!(
out,
"| {} | {} | {} | {} | {} |",
l.language.display_name(),
l.files,
l.code_lines,
l.comment_lines,
l.blank_lines,
)
.expect("infallible");
}
out.push_str("\n</details>\n");
}
if let Some(url) = report_url {
writeln!(out, "\n[View full report]({url})").expect("infallible");
}
out.push_str("\n*Generated by [oxide-sloc](https://github.com/oxide-sloc/oxide-sloc)*\n");
out
}
async fn post_github_comment(args: &PrCommentArgs, body: &str) -> Result<()> {
let base = args
.api_url
.as_deref()
.unwrap_or("https://api.github.com")
.trim_end_matches('/');
let url = format!(
"{base}/repos/{}/issues/{}/comments",
args.repo, args.pr_number
);
validate_webhook_url(&url)?;
let payload = serde_json::json!({ "body": body });
let client = reqwest::Client::new();
let resp = client
.post(&url)
.header("Authorization", format!("Bearer {}", args.token))
.header("Accept", "application/vnd.github+json")
.header("X-GitHub-Api-Version", "2022-11-28")
.header("User-Agent", "oxide-sloc")
.json(&payload)
.send()
.await
.with_context(|| format!("GitHub API POST to {url} failed"))?;
if !resp.status().is_success() {
let status = resp.status();
let text = resp.text().await.unwrap_or_default();
anyhow::bail!("GitHub API returned HTTP {status}: {text}");
}
Ok(())
}
async fn post_gitlab_comment(args: &PrCommentArgs, body: &str) -> Result<()> {
let base = args
.api_url
.as_deref()
.unwrap_or("https://gitlab.com")
.trim_end_matches('/');
let encoded_repo: String = args
.repo
.chars()
.map(|c| {
if c == '/' {
"%2F".to_string()
} else {
c.to_string()
}
})
.collect();
let url = format!(
"{base}/api/v4/projects/{encoded_repo}/merge_requests/{}/notes",
args.pr_number
);
validate_webhook_url(&url)?;
let payload = serde_json::json!({ "body": body });
let client = reqwest::Client::new();
let resp = client
.post(&url)
.header("PRIVATE-TOKEN", &args.token)
.header("Content-Type", "application/json")
.json(&payload)
.send()
.await
.with_context(|| format!("GitLab API POST to {url} failed"))?;
if !resp.status().is_success() {
let status = resp.status();
let text = resp.text().await.unwrap_or_default();
anyhow::bail!("GitLab API returned HTTP {status}: {text}");
}
Ok(())
}
async fn run_git_scan(args: GitScanArgs) -> Result<()> {
let clones_dir = resolve_clones_dir(args.clones_dir.as_deref());
let quiet = args.quiet;
if !quiet {
eprintln!("Cloning / fetching {}…", args.repo);
}
let dest = git_clone_path(&args.repo, &clones_dir);
clone_or_fetch(&args.repo, &dest)?;
let wt_path = clones_dir.join(format!("wt-cli-{}", uuid_simple()));
create_worktree(&dest, &args.git_ref, &wt_path)?;
let config = build_git_scan_config(&wt_path);
if !quiet {
eprintln!("Scanning {} at {}…", args.repo, args.git_ref);
}
let run_result = tokio::task::spawn_blocking(move || analyze(&config, "git-scan", None)).await;
let _ = destroy_worktree(&dest, &wt_path);
let run = run_result.context("analysis task failed")??;
if !quiet {
print_summary(&run, false, args.plain);
}
write_git_scan_outputs(
&run,
args.json_out.as_deref(),
args.html_out.as_deref(),
args.csv_out.as_deref(),
quiet,
)?;
Ok(())
}
fn build_git_scan_config(path: &Path) -> AppConfig {
let mut config = AppConfig::default();
config.discovery.root_paths = vec![path.to_path_buf()];
config
}
fn write_git_scan_outputs(
run: &AnalysisRun,
json_out: Option<&Path>,
html_out: Option<&Path>,
csv_out: Option<&Path>,
quiet: bool,
) -> Result<()> {
if let Some(p) = json_out {
write_json(run, p)?;
log_written(p, quiet);
}
if let Some(p) = html_out {
write_html(run, p)?;
log_written(p, quiet);
}
if let Some(p) = csv_out {
write_csv(run, p)?;
log_written(p, quiet);
}
Ok(())
}
#[allow(clippy::needless_pass_by_value)]
fn run_git_compare(args: GitCompareArgs) -> Result<()> {
let clones_dir = resolve_clones_dir(args.clones_dir.as_deref());
let quiet = args.quiet;
let dest = git_clone_path(&args.repo, &clones_dir);
clone_or_fetch(&args.repo, &dest)?;
let baseline_run = scan_at_ref(&dest, &args.baseline_ref, &clones_dir, quiet)?;
let current_run = scan_at_ref(&dest, &args.current_ref, &clones_dir, quiet)?;
let comparison = compute_delta(&baseline_run, ¤t_run);
if !quiet {
print_diff_summary(&comparison, args.plain);
}
write_compare_outputs(
&comparison,
args.json_out.as_deref(),
args.csv_out.as_deref(),
quiet,
)?;
Ok(())
}
fn scan_at_ref(dest: &Path, ref_name: &str, clones_dir: &Path, quiet: bool) -> Result<AnalysisRun> {
let wt = clones_dir.join(format!("wt-cli-{}", uuid_simple()));
create_worktree(dest, ref_name, &wt)?;
if !quiet {
eprintln!("Scanning ref {ref_name}…");
}
let config = build_git_scan_config(&wt);
let run = analyze(&config, "git-compare", None)?;
let _ = destroy_worktree(dest, &wt);
Ok(run)
}
fn write_compare_outputs(
cmp: &ScanComparison,
json_out: Option<&Path>,
csv_out: Option<&Path>,
quiet: bool,
) -> Result<()> {
if let Some(p) = json_out {
let json = serde_json::to_string_pretty(cmp)?;
std::fs::write(p, json)?;
log_written(p, quiet);
}
if let Some(p) = csv_out {
write_diff_csv(cmp, p)?;
log_written(p, quiet);
}
Ok(())
}
async fn run_watch(args: WatchArgs) -> Result<()> {
let clones_dir = resolve_clones_dir(args.clones_dir.as_deref());
let quiet = args.quiet;
let interval = args.interval.max(60);
if !quiet {
eprintln!(
"Watching {} ({}) — polling every {}s. Ctrl-C to stop.",
args.repo, args.branch, interval
);
}
let dest = git_clone_path(&args.repo, &clones_dir);
clone_or_fetch(&args.repo, &dest)?;
let mut last_sha = get_sha(&dest, &format!("origin/{}", args.branch)).unwrap_or_default();
loop {
tokio::time::sleep(std::time::Duration::from_secs(interval)).await;
clone_or_fetch(&args.repo, &dest)?;
let sha = match get_sha(&dest, &format!("origin/{}", args.branch)) {
Ok(s) => s,
Err(e) => {
eprintln!("[watch] resolve failed: {e}");
continue;
}
};
if sha == last_sha {
continue;
}
last_sha.clone_from(&sha);
if !quiet {
eprintln!("[watch] new commit {sha} — scanning…");
}
run_watch_scan(&dest, &sha, &clones_dir, args.output_dir.as_deref(), quiet);
}
}
fn run_watch_scan(
dest: &Path,
sha: &str,
clones_dir: &Path,
output_dir: Option<&Path>,
quiet: bool,
) {
let wt = clones_dir.join(format!("wt-watch-{}", uuid_simple()));
if let Err(e) = create_worktree(dest, sha, &wt) {
eprintln!("[watch] worktree error: {e}");
return;
}
let config = build_git_scan_config(&wt);
match analyze(&config, "watch", None) {
Ok(run) => {
if !quiet {
print_summary(&run, false, false);
}
write_watch_output(&run, output_dir, sha, quiet);
}
Err(e) => eprintln!("[watch] scan error: {e:#}"),
}
let _ = destroy_worktree(dest, &wt);
}
fn write_watch_output(run: &AnalysisRun, output_dir: Option<&Path>, sha: &str, quiet: bool) {
let Some(dir) = output_dir else { return };
let path = dir.join(format!("{}.json", &sha[..sha.len().min(8)]));
if let Err(e) = write_json(run, &path) {
eprintln!("[watch] write failed: {e}");
} else {
log_written(&path, quiet);
}
}
fn resolve_clones_dir(override_path: Option<&Path>) -> PathBuf {
override_path
.map(PathBuf::from)
.or_else(|| std::env::var("SLOC_GIT_CLONES_DIR").ok().map(PathBuf::from))
.unwrap_or_else(|| std::env::temp_dir().join("sloc-git-clones"))
}
fn git_clone_path(repo_url: &str, clones_dir: &Path) -> PathBuf {
let safe: String = repo_url
.chars()
.map(|c| {
if c.is_alphanumeric() || c == '-' || c == '_' || c == '.' {
c
} else {
'_'
}
})
.take(80)
.collect();
clones_dir.join(safe)
}
fn uuid_simple() -> String {
uuid::Uuid::new_v4().simple().to_string()
}