#![forbid(unsafe_code)]
use std::path::{Path, PathBuf};
#[cfg(not(all(target_arch = "wasm32", target_os = "unknown")))]
use std::time::{SystemTime, UNIX_EPOCH};
use anyhow::Result;
#[cfg(feature = "analysis")]
use tokmd_analysis as analysis;
#[cfg(feature = "analysis")]
use tokmd_analysis_types::{AnalysisArgsMeta, AnalysisSource};
pub mod context_git;
pub mod context_policy;
pub mod error;
pub mod ffi;
pub mod settings;
pub use tokmd_scan::InMemoryFile;
pub use tokmd_types as types;
use settings::{DiffSettings, ExportSettings, LangSettings, ModuleSettings, ScanSettings};
use tokmd_format::scan_args;
use tokmd_settings::ScanOptions;
use tokmd_types::{
ChildIncludeMode, DiffReceipt, ExportArgsMeta, ExportData, ExportReceipt, FileRow,
LangArgsMeta, LangReceipt, LangReport, ModuleArgsMeta, ModuleReceipt, ModuleReport, RedactMode,
SCHEMA_VERSION, ScanStatus, ToolInfo,
};
#[cfg(all(target_arch = "wasm32", target_os = "unknown"))]
fn now_ms() -> u128 {
js_sys::Date::now().max(1.0) as u128
}
#[cfg(not(all(target_arch = "wasm32", target_os = "unknown")))]
fn now_ms() -> u128 {
SystemTime::now()
.duration_since(UNIX_EPOCH)
.unwrap_or_default()
.as_millis()
}
pub fn lang_workflow(scan: &ScanSettings, lang: &LangSettings) -> Result<LangReceipt> {
let scan_opts = settings_to_scan_options(scan);
let paths = scan_paths_or_current_dir(scan);
let languages = tokmd_scan::scan(&paths, &scan_opts)?;
let report = tokmd_model::create_lang_report(&languages, lang.top, lang.files, lang.children);
Ok(build_lang_receipt(&paths, &scan_opts, lang, report))
}
pub fn lang_workflow_from_inputs(
inputs: &[InMemoryFile],
scan_opts: &ScanOptions,
lang: &LangSettings,
) -> Result<LangReceipt> {
let scan_opts = deterministic_in_memory_scan_options(scan_opts);
let (paths, rows) =
collect_pure_in_memory_rows(inputs, &scan_opts, &[], 1, ChildIncludeMode::Separate)?;
let report =
tokmd_model::create_lang_report_from_rows(&rows, lang.top, lang.files, lang.children);
Ok(build_lang_receipt(&paths, &scan_opts, lang, report))
}
pub fn module_workflow(scan: &ScanSettings, module: &ModuleSettings) -> Result<ModuleReceipt> {
let scan_opts = settings_to_scan_options(scan);
let paths = scan_paths_or_current_dir(scan);
let languages = tokmd_scan::scan(&paths, &scan_opts)?;
let report = tokmd_model::create_module_report(
&languages,
&module.module_roots,
module.module_depth,
module.children,
module.top,
);
Ok(build_module_receipt(&paths, &scan_opts, module, report))
}
pub fn module_workflow_from_inputs(
inputs: &[InMemoryFile],
scan_opts: &ScanOptions,
module: &ModuleSettings,
) -> Result<ModuleReceipt> {
let scan_opts = deterministic_in_memory_scan_options(scan_opts);
let (paths, rows) = collect_pure_in_memory_rows(
inputs,
&scan_opts,
&module.module_roots,
module.module_depth,
module.children,
)?;
let report = tokmd_model::create_module_report_from_rows(
&rows,
&module.module_roots,
module.module_depth,
module.children,
module.top,
);
Ok(build_module_receipt(&paths, &scan_opts, module, report))
}
pub fn export_workflow(scan: &ScanSettings, export: &ExportSettings) -> Result<ExportReceipt> {
let scan_opts = settings_to_scan_options(scan);
let paths = scan_paths_or_current_dir(scan);
let strip_prefix = export.strip_prefix.as_deref();
let languages = tokmd_scan::scan(&paths, &scan_opts)?;
let data = tokmd_model::create_export_data(
&languages,
&export.module_roots,
export.module_depth,
export.children,
strip_prefix.map(std::path::Path::new),
export.min_code,
export.max_rows,
);
Ok(build_export_receipt(&paths, &scan_opts, export, data))
}
pub fn export_workflow_from_inputs(
inputs: &[InMemoryFile],
scan_opts: &ScanOptions,
export: &ExportSettings,
) -> Result<ExportReceipt> {
let scan_opts = deterministic_in_memory_scan_options(scan_opts);
let (paths, mut rows) = collect_pure_in_memory_rows(
inputs,
&scan_opts,
&export.module_roots,
export.module_depth,
export.children,
)?;
if let Some(strip_prefix) = export.strip_prefix.as_deref() {
rows = strip_virtual_export_prefix(
rows,
strip_prefix,
&export.module_roots,
export.module_depth,
);
}
let data = tokmd_model::create_export_data_from_rows(
rows,
&export.module_roots,
export.module_depth,
export.children,
export.min_code,
export.max_rows,
);
Ok(build_export_receipt(&paths, &scan_opts, export, data))
}
pub fn diff_workflow(settings: &DiffSettings) -> Result<DiffReceipt> {
let from_report = load_lang_report(&settings.from)?;
let to_report = load_lang_report(&settings.to)?;
let rows = tokmd_format::compute_diff_rows(&from_report, &to_report);
let totals = tokmd_format::compute_diff_totals(&rows);
Ok(tokmd_format::create_diff_receipt(
&settings.from,
&settings.to,
rows,
totals,
))
}
#[cfg(feature = "analysis")]
pub fn analyze_workflow(
scan: &ScanSettings,
analyze: &settings::AnalyzeSettings,
) -> Result<tokmd_analysis_types::AnalysisReceipt> {
let export_receipt = export_workflow(scan, &ExportSettings::default())?;
let root = derive_analysis_root(scan)
.or_else(|| std::env::current_dir().ok())
.unwrap_or_else(|| PathBuf::from("."));
analyze_with_export_receipt(export_receipt, scan.paths.clone(), root, analyze)
}
#[cfg(feature = "analysis")]
pub fn analyze_workflow_from_inputs(
inputs: &[InMemoryFile],
scan_opts: &ScanOptions,
analyze: &settings::AnalyzeSettings,
) -> Result<tokmd_analysis_types::AnalysisReceipt> {
let export = ExportSettings::default();
let scan_opts = deterministic_in_memory_scan_options(scan_opts);
if supports_rootless_in_memory_analyze_preset(&analyze.preset) {
let (paths, rows) = collect_pure_in_memory_rows(
inputs,
&scan_opts,
&export.module_roots,
export.module_depth,
export.children,
)?;
let data = tokmd_model::create_export_data_from_rows(
rows,
&export.module_roots,
export.module_depth,
export.children,
export.min_code,
export.max_rows,
);
let logical_inputs: Vec<String> = paths
.iter()
.map(|path| tokmd_model::normalize_path(path, None))
.collect();
let export_receipt = build_export_receipt(&paths, &scan_opts, &export, data);
return analyze_with_export_receipt(
export_receipt,
logical_inputs,
PathBuf::new(),
analyze,
);
}
let scan = tokmd_scan::scan_in_memory(inputs, &scan_opts)?;
let data = collect_materialized_export_data(&scan, &export);
let logical_inputs: Vec<String> = scan
.logical_paths()
.iter()
.map(|path| tokmd_model::normalize_path(path, None))
.collect();
let root = scan.strip_prefix().to_path_buf();
let export_receipt = build_export_receipt(scan.logical_paths(), &scan_opts, &export, data);
analyze_with_export_receipt(export_receipt, logical_inputs, root, analyze)
}
#[cfg(feature = "analysis")]
#[doc(hidden)]
pub fn supports_rootless_in_memory_analyze_preset(preset: &str) -> bool {
let preset = preset.trim();
preset.eq_ignore_ascii_case("receipt") || preset.eq_ignore_ascii_case("estimate")
}
#[cfg(feature = "analysis")]
fn analyze_with_export_receipt(
export_receipt: ExportReceipt,
inputs: Vec<String>,
root: PathBuf,
analyze: &settings::AnalyzeSettings,
) -> Result<tokmd_analysis_types::AnalysisReceipt> {
let request = build_analysis_request(analyze)?;
let source = AnalysisSource {
inputs,
export_path: None,
base_receipt_path: None,
export_schema_version: Some(export_receipt.schema_version),
export_generated_at_ms: Some(export_receipt.generated_at_ms),
base_signature: None,
module_roots: export_receipt.data.module_roots.clone(),
module_depth: export_receipt.data.module_depth,
children: child_include_mode_to_string(export_receipt.data.children),
};
let ctx = analysis::AnalysisContext {
export: export_receipt.data,
root,
source,
};
analysis::analyze(ctx, request)
}
#[cfg(feature = "analysis")]
fn build_analysis_request(
analyze: &settings::AnalyzeSettings,
) -> Result<analysis::AnalysisRequest> {
let (preset, preset_meta) = parse_analysis_preset(&analyze.preset)?;
let (granularity, granularity_meta) = parse_import_granularity(&analyze.granularity)?;
let effort = parse_effort_request(analyze, &preset_meta)?;
Ok(analysis::AnalysisRequest {
preset,
args: AnalysisArgsMeta {
preset: preset_meta,
format: "json".to_string(),
window_tokens: analyze.window,
git: analyze.git,
max_files: analyze.max_files,
max_bytes: analyze.max_bytes,
max_file_bytes: analyze.max_file_bytes,
max_commits: analyze.max_commits,
max_commit_files: analyze.max_commit_files,
import_granularity: granularity_meta,
},
limits: analysis::AnalysisLimits {
max_files: analyze.max_files,
max_bytes: analyze.max_bytes,
max_file_bytes: analyze.max_file_bytes,
max_commits: analyze.max_commits,
max_commit_files: analyze.max_commit_files,
},
window_tokens: analyze.window,
git: analyze.git,
import_granularity: granularity,
detail_functions: false,
near_dup: false,
near_dup_threshold: 0.80,
near_dup_max_files: 2000,
near_dup_scope: analysis::NearDupScope::Module,
near_dup_max_pairs: None,
near_dup_exclude: Vec::new(),
effort,
})
}
#[cfg(feature = "cockpit")]
pub fn cockpit_workflow(
settings: &settings::CockpitSettings,
) -> Result<tokmd_types::cockpit::CockpitReceipt> {
use tokmd_types::cockpit::CockpitReceipt;
if !tokmd_git::git_available() {
anyhow::bail!("git is not available on PATH");
}
let cwd = std::env::current_dir().context("Failed to resolve current directory")?;
let repo_root =
tokmd_git::repo_root(&cwd).ok_or_else(|| anyhow::anyhow!("not inside a git repository"))?;
let range_mode = parse_cockpit_range_mode(&settings.range_mode)?;
let resolved_base =
tokmd_git::resolve_base_ref(&repo_root, &settings.base).ok_or_else(|| {
anyhow::anyhow!(
"base ref '{}' not found and no fallback resolved",
settings.base
)
})?;
let baseline_path = settings.baseline.as_deref();
let mut receipt: CockpitReceipt = tokmd_cockpit::compute_cockpit(
&repo_root,
&resolved_base,
&settings.head,
range_mode,
baseline_path.map(std::path::Path::new),
)?;
if let Some(baseline_path) = baseline_path {
receipt.trend = Some(tokmd_cockpit::load_and_compute_trend(
std::path::Path::new(baseline_path),
&receipt,
)?);
}
Ok(receipt)
}
#[cfg(feature = "cockpit")]
fn parse_cockpit_range_mode(value: &str) -> Result<tokmd_git::GitRangeMode> {
let normalized = value.trim().to_ascii_lowercase();
match normalized.as_str() {
"two-dot" | "2dot" => Ok(tokmd_git::GitRangeMode::TwoDot),
"three-dot" | "3dot" => Ok(tokmd_git::GitRangeMode::ThreeDot),
_ => Err(error::TokmdError::invalid_field(
"range_mode",
"'two-dot', '2dot', 'three-dot', or '3dot'",
)
.into()),
}
}
#[cfg(feature = "cockpit")]
use anyhow::Context as _;
#[cfg(feature = "analysis")]
pub mod analysis_facade {
pub use tokmd_format::analysis::render;
pub use tokmd_format::analysis::RenderedOutput;
}
fn settings_to_scan_options(scan: &ScanSettings) -> ScanOptions {
scan.options.clone()
}
fn scan_paths_or_current_dir(scan: &ScanSettings) -> Vec<PathBuf> {
if scan.paths.is_empty() {
vec![PathBuf::from(".")]
} else {
scan.paths.iter().map(PathBuf::from).collect()
}
}
fn deterministic_in_memory_scan_options(scan_opts: &ScanOptions) -> ScanOptions {
let mut effective = scan_opts.clone();
effective.config = tokmd_types::ConfigMode::None;
effective.hidden = true;
effective.excluded.clear();
effective
}
fn collect_pure_in_memory_rows(
inputs: &[InMemoryFile],
scan_opts: &ScanOptions,
module_roots: &[String],
module_depth: usize,
children: ChildIncludeMode,
) -> Result<(Vec<PathBuf>, Vec<FileRow>)> {
let paths = tokmd_scan::normalize_in_memory_paths(inputs)?;
let config = tokmd_scan::config_from_scan_options(scan_opts);
let row_inputs: Vec<tokmd_model::InMemoryRowInput<'_>> = paths
.iter()
.zip(inputs)
.map(|(path, input)| {
tokmd_model::InMemoryRowInput::new(path.as_path(), input.bytes.as_slice())
})
.collect();
let rows = tokmd_model::collect_in_memory_file_rows(
&row_inputs,
module_roots,
module_depth,
children,
&config,
);
Ok((paths, rows))
}
#[cfg(feature = "analysis")]
fn collect_materialized_rows(
scan: &tokmd_scan::MaterializedScan,
module_roots: &[String],
module_depth: usize,
children: ChildIncludeMode,
) -> Vec<FileRow> {
tokmd_model::collect_file_rows(
scan.languages(),
module_roots,
module_depth,
children,
Some(scan.strip_prefix()),
)
}
fn strip_virtual_export_prefix(
rows: Vec<FileRow>,
strip_prefix: &str,
module_roots: &[String],
module_depth: usize,
) -> Vec<FileRow> {
rows.into_iter()
.map(|mut row| {
let normalized =
tokmd_model::normalize_path(Path::new(&row.path), Some(Path::new(strip_prefix)));
row.path = normalized.clone();
row.module = tokmd_model::module_key(&normalized, module_roots, module_depth);
row
})
.collect()
}
#[cfg(feature = "analysis")]
fn collect_materialized_export_data(
scan: &tokmd_scan::MaterializedScan,
export: &ExportSettings,
) -> ExportData {
let mut rows = collect_materialized_rows(
scan,
&export.module_roots,
export.module_depth,
export.children,
);
if let Some(strip_prefix) = export.strip_prefix.as_deref() {
rows = strip_virtual_export_prefix(
rows,
strip_prefix,
&export.module_roots,
export.module_depth,
);
}
tokmd_model::create_export_data_from_rows(
rows,
&export.module_roots,
export.module_depth,
export.children,
export.min_code,
export.max_rows,
)
}
fn build_lang_receipt(
paths: &[PathBuf],
scan_opts: &ScanOptions,
lang: &LangSettings,
report: LangReport,
) -> LangReceipt {
LangReceipt {
schema_version: SCHEMA_VERSION,
generated_at_ms: now_ms(),
tool: ToolInfo::current(),
mode: "lang".to_string(),
status: ScanStatus::Complete,
warnings: vec![],
scan: scan_args(paths, scan_opts, lang.redact),
args: LangArgsMeta {
format: "json".to_string(),
top: lang.top,
with_files: lang.files,
children: lang.children,
},
report,
}
}
fn build_module_receipt(
paths: &[PathBuf],
scan_opts: &ScanOptions,
module: &ModuleSettings,
report: ModuleReport,
) -> ModuleReceipt {
ModuleReceipt {
schema_version: SCHEMA_VERSION,
generated_at_ms: now_ms(),
tool: ToolInfo::current(),
mode: "module".to_string(),
status: ScanStatus::Complete,
warnings: vec![],
scan: scan_args(paths, scan_opts, module.redact),
args: ModuleArgsMeta {
format: "json".to_string(),
top: module.top,
module_roots: module.module_roots.clone(),
module_depth: module.module_depth,
children: module.children,
},
report,
}
}
fn build_export_receipt(
paths: &[PathBuf],
scan_opts: &ScanOptions,
export: &ExportSettings,
data: ExportData,
) -> ExportReceipt {
let should_redact = export.redact == RedactMode::Paths || export.redact == RedactMode::All;
let strip_prefix_redacted = should_redact && export.strip_prefix.is_some();
ExportReceipt {
schema_version: SCHEMA_VERSION,
generated_at_ms: now_ms(),
tool: ToolInfo::current(),
mode: "export".to_string(),
status: ScanStatus::Complete,
warnings: vec![],
scan: scan_args(paths, scan_opts, Some(export.redact)),
args: ExportArgsMeta {
format: export.format,
module_roots: export.module_roots.clone(),
module_depth: export.module_depth,
children: export.children,
min_code: export.min_code,
max_rows: export.max_rows,
redact: export.redact,
strip_prefix: if should_redact {
export
.strip_prefix
.as_ref()
.map(|p| tokmd_format::redact_path(p))
} else {
export.strip_prefix.clone()
},
strip_prefix_redacted,
},
data: redact_export_data(data, export.redact),
}
}
#[cfg(feature = "analysis")]
fn parse_analysis_preset(value: &str) -> Result<(analysis::AnalysisPreset, String)> {
let normalized = value.trim().to_ascii_lowercase();
let preset = match normalized.as_str() {
"receipt" => analysis::AnalysisPreset::Receipt,
"estimate" => analysis::AnalysisPreset::Estimate,
"health" => analysis::AnalysisPreset::Health,
"risk" => analysis::AnalysisPreset::Risk,
"supply" => analysis::AnalysisPreset::Supply,
"architecture" => analysis::AnalysisPreset::Architecture,
"topics" => analysis::AnalysisPreset::Topics,
"security" => analysis::AnalysisPreset::Security,
"identity" => analysis::AnalysisPreset::Identity,
"git" => analysis::AnalysisPreset::Git,
"deep" => analysis::AnalysisPreset::Deep,
"fun" => analysis::AnalysisPreset::Fun,
_ => {
return Err(error::TokmdError::invalid_field(
"preset",
"'receipt', 'estimate', 'health', 'risk', 'supply', 'architecture', 'topics', 'security', 'identity', 'git', 'deep', or 'fun'",
)
.into());
}
};
Ok((preset, normalized))
}
#[cfg(feature = "analysis")]
fn parse_import_granularity(value: &str) -> Result<(analysis::ImportGranularity, String)> {
let normalized = value.trim().to_ascii_lowercase();
let granularity = match normalized.as_str() {
"module" => analysis::ImportGranularity::Module,
"file" => analysis::ImportGranularity::File,
_ => {
return Err(
error::TokmdError::invalid_field("granularity", "'module' or 'file'").into(),
);
}
};
Ok((granularity, normalized))
}
#[cfg(feature = "analysis")]
fn parse_effort_request(
analyze: &settings::AnalyzeSettings,
preset: &str,
) -> Result<Option<analysis::EffortRequest>> {
let request = analysis::EffortRequest::default();
let requested = preset == "estimate"
|| analyze.effort_model.is_some()
|| analyze.effort_layer.is_some()
|| analyze.effort_base_ref.is_some()
|| analyze.effort_head_ref.is_some()
|| analyze.effort_monte_carlo.unwrap_or(false)
|| analyze.effort_mc_iterations.is_some()
|| analyze.effort_mc_seed.is_some();
if !requested {
return Ok(None);
}
if (analyze.effort_base_ref.is_some() && analyze.effort_head_ref.is_none())
|| (analyze.effort_base_ref.is_none() && analyze.effort_head_ref.is_some())
{
return Err(error::TokmdError::invalid_field(
"effort_base_ref/effort_head_ref",
"both effort_base_ref and effort_head_ref must be provided together",
)
.into());
}
let model = analyze
.effort_model
.as_deref()
.map(parse_effort_model)
.transpose()?
.unwrap_or(request.model);
let layer = analyze
.effort_layer
.as_deref()
.map(parse_effort_layer)
.transpose()?
.unwrap_or(request.layer);
let monte_carlo = analyze.effort_monte_carlo.unwrap_or(false);
let mc_iterations = analyze
.effort_mc_iterations
.unwrap_or(request.mc_iterations);
if mc_iterations == 0 {
return Err(error::TokmdError::invalid_field(
"effort_mc_iterations",
"must be greater than 0",
)
.into());
}
Ok(Some(analysis::EffortRequest {
model,
layer,
base_ref: analyze.effort_base_ref.clone(),
head_ref: analyze.effort_head_ref.clone(),
monte_carlo,
mc_iterations,
mc_seed: analyze.effort_mc_seed,
}))
}
#[cfg(feature = "analysis")]
fn parse_effort_model(value: &str) -> Result<analysis::EffortModelKind> {
match value.trim().to_ascii_lowercase().as_str() {
"cocomo81-basic" => Ok(analysis::EffortModelKind::Cocomo81Basic),
"cocomo2-early" | "ensemble" => Err(error::TokmdError::invalid_field(
"effort_model",
"only 'cocomo81-basic' is currently supported",
)
.into()),
_ => Err(error::TokmdError::invalid_field("effort_model", "'cocomo81-basic'").into()),
}
}
#[cfg(feature = "analysis")]
fn parse_effort_layer(value: &str) -> Result<analysis::EffortLayer> {
match value.trim().to_ascii_lowercase().as_str() {
"headline" => Ok(analysis::EffortLayer::Headline),
"why" => Ok(analysis::EffortLayer::Why),
"full" => Ok(analysis::EffortLayer::Full),
_ => Err(
error::TokmdError::invalid_field("effort_layer", "'headline', 'why', or 'full'").into(),
),
}
}
#[cfg(feature = "analysis")]
fn child_include_mode_to_string(mode: tokmd_types::ChildIncludeMode) -> String {
match mode {
tokmd_types::ChildIncludeMode::Separate => "separate".to_string(),
tokmd_types::ChildIncludeMode::ParentsOnly => "parents-only".to_string(),
}
}
#[cfg(feature = "analysis")]
fn derive_analysis_root(scan: &ScanSettings) -> Option<PathBuf> {
let first = scan.paths.first()?;
if first.trim().is_empty() {
return None;
}
let candidate = PathBuf::from(first);
let absolute = if candidate.is_absolute() {
candidate
} else {
std::env::current_dir().ok()?.join(candidate)
};
if absolute.is_dir() {
Some(absolute)
} else {
absolute.parent().map(|p| p.to_path_buf())
}
}
fn load_lang_report(source: &str) -> Result<LangReport> {
let path = std::path::Path::new(source);
if path.exists() && path.is_file() {
let content = std::fs::read_to_string(path)?;
if let Ok(receipt) = serde_json::from_str::<LangReceipt>(&content) {
return Ok(receipt.report);
}
}
let scan = ScanSettings::for_paths(vec![source.to_string()]);
let lang = LangSettings::default();
let receipt = lang_workflow(&scan, &lang)?;
Ok(receipt.report)
}
fn redact_export_data(data: ExportData, mode: RedactMode) -> ExportData {
if mode == RedactMode::None {
return data;
}
let rows = data
.rows
.into_iter()
.map(|mut row| {
if mode == RedactMode::Paths || mode == RedactMode::All {
row.path = tokmd_format::redact_path(&row.path);
}
if mode == RedactMode::All {
row.module = tokmd_format::short_hash(&row.module);
}
row
})
.collect();
ExportData {
rows,
module_roots: data.module_roots,
module_depth: data.module_depth,
children: data.children,
}
}
pub const CORE_SCHEMA_VERSION: u32 = SCHEMA_VERSION;
#[cfg(feature = "analysis")]
pub const CORE_ANALYSIS_SCHEMA_VERSION: u32 = tokmd_analysis_types::ANALYSIS_SCHEMA_VERSION;
pub fn version() -> &'static str {
env!("CARGO_PKG_VERSION")
}
#[cfg(test)]
mod tests {
use super::*;
#[cfg(feature = "analysis")]
use crate::settings::AnalyzeSettings;
#[cfg(feature = "analysis")]
use std::fs;
#[cfg(feature = "analysis")]
use std::path::{Path, PathBuf};
#[cfg(feature = "analysis")]
use std::time::{SystemTime, UNIX_EPOCH};
#[cfg(feature = "analysis")]
#[derive(Debug)]
struct TempDirGuard(PathBuf);
#[cfg(feature = "analysis")]
impl Drop for TempDirGuard {
fn drop(&mut self) {
let _ = fs::remove_dir_all(&self.0);
}
}
#[test]
fn version_not_empty() {
assert!(!version().is_empty());
}
#[test]
fn settings_to_scan_options_preserves_values() {
let scan = ScanSettings {
paths: vec!["src".to_string()],
options: ScanOptions {
excluded: vec!["target".to_string()],
hidden: true,
no_ignore: true,
..Default::default()
},
};
let opts = settings_to_scan_options(&scan);
assert_eq!(opts.excluded, vec!["target"]);
assert!(opts.hidden);
assert!(opts.no_ignore);
}
#[test]
fn scan_settings_current_dir() {
let settings = ScanSettings::current_dir();
assert_eq!(settings.paths, vec!["."]);
}
#[test]
fn scan_settings_for_paths() {
let settings = ScanSettings::for_paths(vec!["src".to_string(), "lib".to_string()]);
assert_eq!(settings.paths, vec!["src", "lib"]);
}
#[cfg(feature = "analysis")]
#[test]
fn effort_request_defaults_to_estimate_preset() {
let analyze = AnalyzeSettings {
preset: "estimate".to_string(),
..Default::default()
};
let req = parse_effort_request(&analyze, "estimate").expect("parse effort request");
let req = req.expect("estimate should imply effort request");
assert_eq!(
req.model.as_str(),
analysis::EffortModelKind::Cocomo81Basic.as_str()
);
assert_eq!(req.layer.as_str(), analysis::EffortLayer::Full.as_str());
}
#[cfg(feature = "analysis")]
#[test]
fn effort_request_not_implied_for_non_estimate_without_flags() {
let analyze = AnalyzeSettings {
preset: "receipt".to_string(),
..Default::default()
};
let req = parse_effort_request(&analyze, "receipt").expect("parse effort request");
assert!(req.is_none());
}
#[cfg(feature = "analysis")]
#[test]
fn effort_request_rejects_unsupported_model() {
let analyze = AnalyzeSettings {
preset: "estimate".to_string(),
effort_model: Some("cocomo2-early".to_string()),
..Default::default()
};
let err =
parse_effort_request(&analyze, "estimate").expect_err("unsupported model should fail");
assert!(err.to_string().contains("only 'cocomo81-basic'"));
}
#[cfg(feature = "analysis")]
fn mk_temp_dir(prefix: &str) -> PathBuf {
let timestamp = SystemTime::now()
.duration_since(UNIX_EPOCH)
.unwrap_or_default()
.as_nanos();
let mut root = std::env::temp_dir();
root.push(format!("{prefix}-{timestamp}-{}", std::process::id()));
root
}
#[cfg(feature = "analysis")]
fn write_file(path: &Path, contents: &str) {
if let Some(parent) = path.parent() {
fs::create_dir_all(parent).unwrap();
}
fs::write(path, contents).unwrap();
}
#[cfg(feature = "analysis")]
#[test]
fn analyze_workflow_estimate_preset_populates_effort_and_size_basis_breakdown() {
let root = mk_temp_dir("tokmd-core-estimate-preset");
let _guard = TempDirGuard(root.clone());
write_file(&root.join("src/main.rs"), "fn main() {}\n");
write_file(
&root.join("target/generated/bundle.min.js"),
"console.log(1);\n",
);
write_file(
&root.join("vendor/lib/external.rs"),
"pub fn external() {}\n",
);
let scan = settings::ScanSettings::for_paths(vec![root.display().to_string()]);
let analyze = AnalyzeSettings {
preset: "estimate".to_string(),
..Default::default()
};
let receipt = analyze_workflow(&scan, &analyze).expect("estimate analyze failed");
let effort = receipt
.effort
.as_ref()
.expect("estimate preset should produce effort");
assert!(effort.results.effort_pm_p50 > 0.0);
assert_eq!(
effort.size_basis.total_lines,
effort.size_basis.authored_lines
+ effort.size_basis.generated_lines
+ effort.size_basis.vendored_lines
);
assert!(effort.size_basis.authored_lines > 0);
assert!(
effort.size_basis.generated_lines + effort.size_basis.vendored_lines > 0,
"expected deterministic generated or vendored lines"
);
}
}
#[cfg(test)]
mod mutation_tests {
use super::*;
use tokmd_settings::ExportSettings;
use tokmd_types::ExportData;
use tokmd_types::RedactMode;
fn empty_export_data() -> ExportData {
ExportData {
rows: vec![],
module_roots: vec![],
module_depth: 3,
children: tokmd_types::ChildIncludeMode::Separate,
}
}
fn minimal_scan_opts() -> ScanOptions {
ScanOptions {
excluded: vec![],
config: tokmd_types::ConfigMode::Auto,
hidden: false,
no_ignore: false,
no_ignore_parent: false,
no_ignore_dot: false,
no_ignore_vcs: false,
treat_doc_strings_as_comments: false,
}
}
fn export_settings(redact: RedactMode, strip_prefix: Option<String>) -> ExportSettings {
ExportSettings {
format: tokmd_settings::ExportFormat::Json,
module_roots: vec![],
module_depth: 3,
children: tokmd_types::ChildIncludeMode::Separate,
min_code: 1,
max_rows: 1000,
redact,
meta: true,
strip_prefix,
}
}
#[test]
#[cfg(feature = "analysis")]
fn parse_analysis_preset_all_twelve_variants() {
#[cfg(feature = "analysis")]
use tokmd_analysis::AnalysisPreset;
let variants = [
("receipt", AnalysisPreset::Receipt),
("estimate", AnalysisPreset::Estimate),
("health", AnalysisPreset::Health),
("risk", AnalysisPreset::Risk),
("supply", AnalysisPreset::Supply),
("architecture", AnalysisPreset::Architecture),
("topics", AnalysisPreset::Topics),
("security", AnalysisPreset::Security),
("identity", AnalysisPreset::Identity),
("git", AnalysisPreset::Git),
("deep", AnalysisPreset::Deep),
("fun", AnalysisPreset::Fun),
];
for (input, expected) in &variants {
let (preset, normalized) = parse_analysis_preset(input).unwrap();
assert_eq!(preset, *expected, "Exact match failed for: {}", input);
assert_eq!(normalized, *input, "Normalization failed for: {}", input);
let upper = input.to_uppercase();
let (preset, normalized) = parse_analysis_preset(&upper).unwrap();
assert_eq!(preset, *expected, "Uppercase match failed for: {}", upper);
assert_eq!(
normalized, *input,
"Uppercase normalization failed for: {}",
upper
);
let mixed = format!(" {} ", input);
let (preset, normalized) = parse_analysis_preset(&mixed).unwrap();
assert_eq!(preset, *expected, "Mixed case match failed for: {}", mixed);
assert_eq!(
normalized, *input,
"Mixed case normalization failed for: {}",
mixed
);
}
}
#[test]
#[cfg(feature = "analysis")]
fn parse_analysis_preset_invalid_variants_fail() {
let invalid = [
"unknown",
"invalid",
"",
"receipts", "healthh", "ARCH", "receipt_estimate", ];
for input in &invalid {
assert!(
parse_analysis_preset(input).is_err(),
"Should fail for invalid input: {}",
input
);
}
}
#[test]
fn build_export_receipt_redact_paths_with_strip_prefix() {
let settings = export_settings(RedactMode::Paths, Some("/project".to_string()));
let data = empty_export_data();
let paths = vec![PathBuf::from("/project/src/main.rs")];
let receipt = build_export_receipt(&paths, &minimal_scan_opts(), &settings, data);
assert!(
receipt.args.strip_prefix_redacted,
"strip_prefix_redacted should be true when redact=Paths and strip_prefix=Some"
);
}
#[test]
fn build_export_receipt_redact_paths_without_strip_prefix() {
let settings = export_settings(RedactMode::Paths, None);
let data = empty_export_data();
let paths = vec![PathBuf::from("/project/src/main.rs")];
let receipt = build_export_receipt(&paths, &minimal_scan_opts(), &settings, data);
assert!(
!receipt.args.strip_prefix_redacted,
"strip_prefix_redacted should be false when strip_prefix=None (kills &&→||)"
);
}
#[test]
fn build_export_receipt_no_redact_with_strip_prefix() {
let settings = export_settings(RedactMode::None, Some("/project".to_string()));
let data = empty_export_data();
let paths = vec![PathBuf::from("/project/src/main.rs")];
let receipt = build_export_receipt(&paths, &minimal_scan_opts(), &settings, data);
assert!(
!receipt.args.strip_prefix_redacted,
"strip_prefix_redacted should be false when redact=None"
);
}
#[test]
fn build_export_receipt_redact_all_with_strip_prefix() {
let settings = export_settings(RedactMode::All, Some("/project".to_string()));
let data = empty_export_data();
let paths = vec![PathBuf::from("/project/src/main.rs")];
let receipt = build_export_receipt(&paths, &minimal_scan_opts(), &settings, data);
assert!(
receipt.args.strip_prefix_redacted,
"strip_prefix_redacted should be true when redact=All and strip_prefix=Some"
);
}
#[test]
fn build_export_receipt_redact_all_without_strip_prefix() {
let settings = export_settings(RedactMode::All, None);
let data = empty_export_data();
let paths = vec![PathBuf::from("/project/src/main.rs")];
let receipt = build_export_receipt(&paths, &minimal_scan_opts(), &settings, data);
assert!(
!receipt.args.strip_prefix_redacted,
"strip_prefix_redacted should be false when strip_prefix=None (kills &&→||)"
);
}
#[test]
fn build_export_receipt_strip_prefix_redaction_logic() {
let settings = export_settings(RedactMode::Paths, Some("/project".to_string()));
let data = empty_export_data();
let paths = vec![PathBuf::from("/project/src/main.rs")];
let receipt = build_export_receipt(&paths, &minimal_scan_opts(), &settings, data);
assert!(receipt.args.strip_prefix.is_some());
assert_ne!(
receipt.args.strip_prefix,
Some("/project".to_string()),
"strip_prefix should be redacted/transformed when redact=Paths"
);
let settings = export_settings(RedactMode::None, Some("/project".to_string()));
let data = empty_export_data();
let receipt = build_export_receipt(&paths, &minimal_scan_opts(), &settings, data);
assert_eq!(
receipt.args.strip_prefix,
Some("/project".to_string()),
"strip_prefix should pass through unchanged when redact=None"
);
let settings = export_settings(RedactMode::All, Some("/project".to_string()));
let data = empty_export_data();
let receipt = build_export_receipt(&paths, &minimal_scan_opts(), &settings, data);
assert!(receipt.args.strip_prefix.is_some());
assert_ne!(
receipt.args.strip_prefix,
Some("/project".to_string()),
"strip_prefix should be redacted when redact=All"
);
}
#[test]
#[cfg(feature = "analysis")]
fn parse_analysis_preset_normalization_edge_cases() {
let (preset, _) = parse_analysis_preset(" receipt ").unwrap();
assert_eq!(
preset,
tokmd_analysis::AnalysisPreset::Receipt,
"Leading/trailing whitespace should be trimmed"
);
let (preset, _) = parse_analysis_preset("\tHEALTH\n").unwrap();
assert_eq!(
preset,
tokmd_analysis::AnalysisPreset::Health,
"Tabs and newlines should be trimmed, case normalized"
);
let (preset, _) = parse_analysis_preset("ReCeIpT").unwrap();
assert_eq!(
preset,
tokmd_analysis::AnalysisPreset::Receipt,
"Mixed case should be normalized to lowercase"
);
let (preset, _) = parse_analysis_preset("ESTIMATE").unwrap();
assert_eq!(
preset,
tokmd_analysis::AnalysisPreset::Estimate,
"Uppercase should be normalized"
);
let (preset, normalized) = parse_analysis_preset(" DeEp ").unwrap();
assert_eq!(preset, tokmd_analysis::AnalysisPreset::Deep);
assert_eq!(normalized, "deep", "Should be trimmed and lowercased");
}
#[cfg(feature = "cockpit")]
#[test]
fn cockpit_workflow_range_mode_parsing() {
assert!(matches!(
parse_cockpit_range_mode("three-dot").expect("three-dot should parse"),
tokmd_git::GitRangeMode::ThreeDot
));
assert!(matches!(
parse_cockpit_range_mode("3dot").expect("3dot should parse"),
tokmd_git::GitRangeMode::ThreeDot
));
assert!(matches!(
parse_cockpit_range_mode("two-dot").expect("two-dot should parse"),
tokmd_git::GitRangeMode::TwoDot
));
assert!(matches!(
parse_cockpit_range_mode("2dot").expect("2dot should parse"),
tokmd_git::GitRangeMode::TwoDot
));
assert!(matches!(
parse_cockpit_range_mode(" THREE-DOT ").expect("trimmed/case-insensitive parse"),
tokmd_git::GitRangeMode::ThreeDot
));
}
#[cfg(feature = "cockpit")]
#[test]
fn cockpit_workflow_range_mode_invalid_rejected() {
let err = parse_cockpit_range_mode("invalid").expect_err("invalid mode should fail");
let msg = err.to_string();
assert!(
msg.contains("range_mode"),
"Error should reference range_mode field; got: {msg}"
);
}
}
#[cfg(doctest)]
#[doc = include_str!("../README.md")]
pub mod readme_doctests {}