use crate::config::{
ResolvedLibrary, WorkspaceConfig, WorkspaceConfigError, load_workspace_config,
load_workspace_context,
};
use anyhow::{Context, Result, bail};
use clap::{Args, Subcommand, ValueEnum};
use serde::{Serialize, Serializer};
use spec_core::export::build_export_bundle;
use spec_core::generator::{
GenerateOptions, clean_output_dir, generate_and_write_molecule_tests,
generate_code_with_options, generate_mod_rs, safe_output_path, write_generated_file,
};
use spec_core::loader::{
is_unit_spec, load_directory_report, load_file, load_molecule_test_directory,
load_molecule_test_directory_report,
};
use spec_core::normalizer::normalize_spec;
use spec_core::passport::{
ArtifactProvenance, PassportEvidence, PassportTestResult, build_passport_with_evidence,
compute_contract_hash, ensure_gitignore_entry, read_passport, rfc3339_now, write_passport,
};
use spec_core::pipeline::{
ParsedCargoTestResult, Verbosity, cargo_available, parse_cargo_test_output, run_cargo_build,
run_cargo_test, workspace_root_for, zero_tests_ran,
};
use spec_core::types::{
DepRef, LoadedMoleculeTest, LoadedSpec, QualifiedUnitRef, ResolvedMoleculeTest, ResolvedSpec,
};
use spec_core::validator::{
QualifiedLoadedSpec, ValidationOptions, check_spec_versions, validate_full_with_options,
validate_molecule_test_covers, validate_molecule_test_semantic,
validate_no_duplicate_molecule_test_ids, validate_no_duplicate_qualified_ids,
validate_qualified_deps_exist_with_options,
};
#[cfg(test)]
use spec_core::validator::{validate_deps_exist_with_options, validate_no_duplicate_ids};
use std::collections::{BTreeMap, BTreeSet, HashMap, HashSet};
use std::fs;
use std::io::Write;
use std::path::{Path, PathBuf};
use std::time::{Duration, SystemTime, UNIX_EPOCH};
type CollectedSpecs = (
Vec<LoadedSpec>,
Vec<spec_core::SpecError>,
Vec<spec_core::SpecWarning>,
usize,
);
type DiagnosticMap = BTreeMap<String, Vec<String>>;
struct ImportedLibrarySpecs {
alias: String,
specs: Vec<LoadedSpec>,
}
struct ValidationSpecCollection {
root_specs: Vec<LoadedSpec>,
imported_libraries: Vec<ImportedLibrarySpecs>,
loader_errors: Vec<spec_core::SpecError>,
loader_warnings: Vec<spec_core::SpecWarning>,
total_files: usize,
}
impl ValidationSpecCollection {
fn all_specs(&self) -> Vec<&LoadedSpec> {
let mut specs = Vec::with_capacity(
self.root_specs.len()
+ self
.imported_libraries
.iter()
.map(|library| library.specs.len())
.sum::<usize>(),
);
specs.extend(self.root_specs.iter());
for library in &self.imported_libraries {
specs.extend(library.specs.iter());
}
specs
}
}
const JSON_SCHEMA_VERSION: u8 = 2;
const CONCURRENT_PASSPORT_WRITER_TTL_SECS: u64 = 300;
#[derive(ValueEnum, Debug, Clone, Copy, PartialEq, Eq)]
pub enum OutputFormat {
Text,
Json,
}
#[derive(Serialize)]
struct JsonValidateResponse {
schema_version: u8,
status: &'static str,
errors: Vec<JsonErrorEntry>,
warnings: Vec<String>,
}
#[derive(Serialize)]
struct JsonStatusResponse {
schema_version: u8,
units: Vec<JsonStatusUnit>,
#[serde(skip_serializing_if = "Vec::is_empty")]
loader_errors: Vec<JsonErrorEntry>,
}
#[derive(Serialize)]
struct JsonStatusUnit {
id: String,
status: HealthState,
#[serde(skip_serializing_if = "Option::is_none")]
reason: Option<String>,
errors: Vec<JsonErrorEntry>,
#[serde(skip_serializing_if = "Option::is_none")]
evidence_at: Option<String>,
}
#[derive(Clone, Serialize)]
struct JsonErrorEntry {
#[serde(skip_serializing_if = "Option::is_none")]
unit: Option<String>,
code: String,
#[serde(skip_serializing_if = "Option::is_none")]
path: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
dep: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
field: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
value: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
message: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
id: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
path2: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
cycle: Option<Vec<String>>,
}
fn workspace_config_error_to_json_entry(err: &WorkspaceConfigError) -> JsonErrorEntry {
JsonErrorEntry {
unit: None,
code: err.code().to_string(),
path: Some(err.config_path().display().to_string()),
dep: None,
field: None,
value: None,
message: Some(err.detail_message()),
id: None,
path2: None,
cycle: None,
}
}
fn emit_json_validate_response(response: &JsonValidateResponse) -> Result<()> {
let json = serde_json::to_string_pretty(response)?;
print!("{json}");
std::io::stdout().flush()?;
Ok(())
}
fn emit_json_status_response(response: &JsonStatusResponse) -> Result<()> {
let json = serde_json::to_string_pretty(response)?;
print!("{json}");
std::io::stdout().flush()?;
Ok(())
}
fn emit_json_validate_workspace_config_failure(err: &WorkspaceConfigError) -> Result<()> {
emit_json_validate_response(&JsonValidateResponse {
schema_version: JSON_SCHEMA_VERSION,
status: "invalid",
errors: vec![workspace_config_error_to_json_entry(err)],
warnings: vec![],
})?;
std::process::exit(1);
}
fn emit_json_status_workspace_config_failure(err: &WorkspaceConfigError) -> Result<()> {
emit_json_status_response(&JsonStatusResponse {
schema_version: JSON_SCHEMA_VERSION,
units: vec![],
loader_errors: vec![workspace_config_error_to_json_entry(err)],
})?;
std::process::exit(1);
}
#[derive(Default)]
struct ErrorFields {
unit: Option<String>,
path: Option<String>,
dep: Option<String>,
field: Option<String>,
value: Option<String>,
message: Option<String>,
id: Option<String>,
path2: Option<String>,
cycle: Option<Vec<String>>,
}
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
enum HealthState {
Invalid,
Failing,
Stale,
Incomplete,
Untested,
Valid,
}
impl HealthState {
fn as_str(self) -> &'static str {
match self {
HealthState::Invalid => "invalid",
HealthState::Failing => "failing",
HealthState::Stale => "stale",
HealthState::Incomplete => "incomplete",
HealthState::Untested => "untested",
HealthState::Valid => "valid",
}
}
fn is_valid(self) -> bool {
matches!(self, HealthState::Valid)
}
fn symbol(self) -> &'static str {
match self {
HealthState::Valid => "✓",
HealthState::Untested => "—",
HealthState::Incomplete => "?",
HealthState::Stale => "~",
HealthState::Failing | HealthState::Invalid => "✗",
}
}
}
impl Serialize for HealthState {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: Serializer,
{
serializer.serialize_str(self.as_str())
}
}
struct PassportWritePlan<'a> {
passport_root: &'a Path,
specs: &'a [LoadedSpec],
}
struct ConcurrentPassportWriteGuard {
marker_path: Option<PathBuf>,
}
impl ConcurrentPassportWriteGuard {
fn begin(passport_root: &Path) -> Self {
match Self::begin_in(
passport_root,
&std::env::temp_dir(),
std::process::id(),
SystemTime::now(),
) {
Ok((guard, other_writers)) => {
if let Some(warning) =
concurrent_passport_write_warning_message(passport_root, other_writers)
{
eprintln!("{warning}");
}
guard
}
Err(_) => Self { marker_path: None },
}
}
fn begin_in(
passport_root: &Path,
registry_base: &Path,
pid: u32,
now: SystemTime,
) -> Result<(Self, usize)> {
let registry_dir = concurrent_passport_writer_registry_dir(passport_root, registry_base);
fs::create_dir_all(®istry_dir)
.with_context(|| format!("Failed to create {}", registry_dir.display()))?;
let marker_path = registry_dir.join(concurrent_passport_writer_marker_name(pid, now));
fs::write(&marker_path, "")
.with_context(|| format!("Failed to write {}", marker_path.display()))?;
let other_writers = count_other_active_passport_writers(®istry_dir, pid, now)?;
Ok((
Self {
marker_path: Some(marker_path),
},
other_writers,
))
}
}
impl Drop for ConcurrentPassportWriteGuard {
fn drop(&mut self) {
let Some(marker_path) = self.marker_path.take() else {
return;
};
let _ = fs::remove_file(&marker_path);
if let Some(parent) = marker_path.parent() {
let _ = fs::remove_dir(parent);
}
}
}
#[derive(Subcommand, Debug)]
pub enum Command {
#[command(about = "Validate .unit.spec files")]
Validate(ValidateArgs),
#[command(about = "Show per-unit validation, passport, and staleness status")]
Status(StatusArgs),
#[command(about = "Generate Rust source files from .unit.spec files")]
Generate(GenerateArgs),
#[command(about = "Validate, generate, and run cargo build")]
Build(BuildArgs),
#[command(about = "Validate, generate, run cargo build and cargo test")]
Test(TestArgs),
#[command(about = "Export spec metadata as a JSON bundle")]
Export(ExportArgs),
#[command(about = "Print shell completion script to stdout")]
Completions(CompletionsArgs),
}
impl Command {
pub fn run(self) -> Result<()> {
match self {
Self::Validate(args) => validate_command(&args.path, args.no_strict, args.format),
Self::Status(args) => status_command(&args.path, args.format),
Self::Generate(args) => generate_command(&args.path, args.output.as_deref()),
Self::Build(args) => {
let config = load_workspace_config(&args.path)?;
build_command(
&args.path,
args.output.as_deref(),
args.crate_root.as_deref(),
&config,
)
}
Self::Test(args) => {
let config = load_workspace_config(&args.path)?;
test_command(
&args.path,
args.output.as_deref(),
args.crate_root.as_deref(),
&config,
)
}
Self::Export(args) => export_command(&args.path, args.output.as_deref()),
Self::Completions(_) => unreachable!("handled in main"),
}
}
}
#[derive(Args, Debug)]
pub struct ValidateArgs {
#[arg(
value_name = "PATH",
help = "Directory containing .unit.spec files, or a single .unit.spec file"
)]
pub path: PathBuf,
#[arg(
long,
help = "Downgrade missing-dep errors to warnings and exit 0 (validation only)"
)]
pub no_strict: bool,
#[arg(long, value_enum, default_value_t = OutputFormat::Text)]
pub format: OutputFormat,
}
#[derive(Args, Debug)]
pub struct StatusArgs {
#[arg(
value_name = "PATH",
help = "Directory containing .unit.spec files, or a single .unit.spec file"
)]
pub path: PathBuf,
#[arg(long, value_enum, default_value_t = OutputFormat::Text)]
pub format: OutputFormat,
}
#[derive(Args, Debug)]
pub struct GenerateArgs {
#[arg(
value_name = "PATH",
help = "Directory containing .unit.spec files, or a single .unit.spec file"
)]
pub path: PathBuf,
#[arg(
long,
help = "Output directory for generated Rust files (default: {crate_root}/src/generated)"
)]
pub output: Option<PathBuf>,
}
#[derive(Args, Debug)]
pub struct BuildArgs {
#[arg(
value_name = "PATH",
help = "Directory containing .unit.spec files, or a single .unit.spec file"
)]
pub path: PathBuf,
#[arg(
long,
help = "Output directory for generated Rust files (default: {crate_root}/src/generated)"
)]
pub output: Option<PathBuf>,
#[arg(
long,
help = "Path to the Cargo project root (overrides spec.toml and ancestor walk)"
)]
pub crate_root: Option<PathBuf>,
}
#[derive(Args, Debug)]
pub struct TestArgs {
#[arg(
value_name = "PATH",
help = "Directory containing .unit.spec files, or a single .unit.spec file"
)]
pub path: PathBuf,
#[arg(
long,
help = "Output directory for generated Rust files (default: {crate_root}/src/generated)"
)]
pub output: Option<PathBuf>,
#[arg(
long,
help = "Path to the Cargo project root (overrides spec.toml and ancestor walk)"
)]
pub crate_root: Option<PathBuf>,
}
#[derive(Args, Debug)]
pub struct ExportArgs {
#[arg(
value_name = "PATH",
help = "Directory containing .unit.spec files, or a single .unit.spec file"
)]
pub path: PathBuf,
#[arg(long, help = "Write JSON bundle to FILE instead of stdout")]
pub output: Option<PathBuf>,
}
#[derive(Args, Debug)]
pub struct CompletionsArgs {
#[arg(value_enum)]
pub shell: clap_complete::Shell,
}
fn validate_command(path: &Path, no_strict: bool, format: OutputFormat) -> Result<()> {
let context = match load_workspace_context(path) {
Ok(context) => context,
Err(err) if matches!(format, OutputFormat::Json) => {
if let Some(config_err) = err.downcast_ref::<WorkspaceConfigError>() {
return emit_json_validate_workspace_config_failure(config_err);
}
return Err(err);
}
Err(err) => return Err(err),
};
let validation_specs = collect_validation_specs(path, &context.libraries)?;
let config = context.config;
let validation_options = ValidationOptions {
strict_deps: !no_strict,
allow_unsafe_local_test_expect: config.validation.allow_unsafe_local_test_expect,
};
let (mut validation_errors, validation_warnings) =
finish_validation_with_imports(&validation_specs, &validation_options);
validation_errors.extend(validate_library_crate_aliases(
&validation_specs.root_specs,
path,
&config,
));
let (molecule_errors, molecule_warnings, molecule_loader_errors) =
if includes_directory_molecule_tests(path) {
let molecule_report = load_molecule_test_directory_report(path);
let (errors, warnings) =
validate_molecule_tests(&molecule_report.tests, &validation_specs.root_specs);
(errors, warnings, molecule_report.errors)
} else {
(Vec::new(), Vec::new(), Vec::new())
};
match format {
OutputFormat::Text => {
let mut errors = DiagnosticMap::new();
let mut warnings = DiagnosticMap::new();
for err in validation_specs.loader_errors {
push_error(&mut errors, err);
}
for err in validation_errors {
push_error(&mut errors, err);
}
for err in molecule_loader_errors {
push_error(&mut errors, err);
}
for err in molecule_errors {
push_error(&mut errors, err);
}
for warning in validation_specs.loader_warnings {
push_warning(&mut warnings, warning);
}
for warning in validation_warnings {
push_warning(&mut warnings, warning);
}
for warning in molecule_warnings {
push_warning(&mut warnings, warning);
}
let warning_count = count_messages(&warnings);
if !warnings.is_empty() {
print_diagnostics(&warnings);
}
if errors.is_empty() {
if validation_specs.total_files == 0 {
println!("0 units found, nothing to validate.");
} else {
println!(
"✅ {} unit{} valid{}",
validation_specs.total_files,
pluralize(validation_specs.total_files),
if warning_count == 0 {
String::new()
} else {
format!(" with {warning_count} warning{}", pluralize(warning_count))
}
);
}
return Ok(());
}
print_diagnostics(&errors);
let file_count = count_unique_files(&errors);
bail!(
"❌ {} file{}, {} error{}",
file_count,
pluralize(file_count),
count_messages(&errors),
pluralize(count_messages(&errors))
);
}
OutputFormat::Json => {
let id_by_path: HashMap<String, String> = validation_specs
.all_specs()
.into_iter()
.map(|s| (s.source.file_path.clone(), s.spec.id.clone()))
.collect();
let mut errors = Vec::with_capacity(
validation_specs.loader_errors.len()
+ validation_errors.len()
+ molecule_loader_errors.len()
+ molecule_errors.len(),
);
errors.extend(
validation_specs
.loader_errors
.iter()
.map(|err| spec_error_to_json_entry(err, &id_by_path)),
);
errors.extend(
validation_errors
.iter()
.map(|err| spec_error_to_json_entry(err, &id_by_path)),
);
errors.extend(
molecule_loader_errors
.iter()
.map(|err| spec_error_to_json_entry(err, &id_by_path)),
);
errors.extend(
molecule_errors
.iter()
.map(|err| spec_error_to_json_entry(err, &id_by_path)),
);
let warnings = validation_specs
.loader_warnings
.into_iter()
.chain(validation_warnings)
.chain(molecule_warnings)
.map(|warning| warning.to_string())
.collect();
let has_errors = !errors.is_empty();
let response = JsonValidateResponse {
schema_version: JSON_SCHEMA_VERSION,
status: if has_errors { "invalid" } else { "valid" },
errors,
warnings,
};
emit_json_validate_response(&response)?;
if has_errors {
std::process::exit(1);
} else {
Ok(())
}
}
}
}
struct HealthStatus {
status: HealthState,
reason: Option<String>,
evidence_at: Option<String>,
}
fn compute_health_status(
errors: &[JsonErrorEntry],
passport: Option<&spec_core::passport::Passport>,
live_hash: Option<&str>,
) -> HealthStatus {
if !errors.is_empty() {
return HealthStatus {
status: HealthState::Invalid,
reason: None,
evidence_at: None,
};
}
let evidence = passport.and_then(|p| p.evidence.as_ref());
let evidence_at = evidence.map(|e| e.observed_at.clone());
if let Some(ev) = evidence {
let build_failed = ev.build_status != "pass";
let any_test_failed = ev.test_results.iter().any(|r| r.status == "fail");
if build_failed || any_test_failed {
let reason = if build_failed {
match ev.build_status.as_str() {
"timeout" => "build timed out".to_string(),
_ => "build failed".to_string(),
}
} else {
let n = ev
.test_results
.iter()
.filter(|r| r.status == "fail")
.count();
format!("{} test{} failed", n, pluralize(n))
};
return HealthStatus {
status: HealthState::Failing,
reason: Some(reason),
evidence_at,
};
}
}
let stored_hash = passport.and_then(|p| p.contract_hash.as_deref());
if passport.is_some() {
let hash_changed = match (stored_hash, live_hash) {
(Some(stored), Some(live)) => stored != live,
(None, Some(_)) | (Some(_), None) => true, (None, None) => false,
};
if hash_changed {
return HealthStatus {
status: HealthState::Stale,
reason: Some("contract changed since last test".to_string()),
evidence_at,
};
}
}
if let Some(ev) = evidence {
let unknown_count = ev
.test_results
.iter()
.filter(|r| r.status == "unknown")
.count();
if unknown_count > 0 {
return HealthStatus {
status: HealthState::Incomplete,
reason: Some(format!(
"{} test{} not observed in cargo output",
unknown_count,
pluralize(unknown_count)
)),
evidence_at,
};
}
}
if evidence.is_none() {
return HealthStatus {
status: HealthState::Untested,
reason: Some("no evidence".to_string()),
evidence_at: None,
};
}
HealthStatus {
status: HealthState::Valid,
reason: None,
evidence_at,
}
}
fn status_command(path: &Path, format: OutputFormat) -> Result<()> {
let context = match load_workspace_context(path) {
Ok(context) => context,
Err(err) if matches!(format, OutputFormat::Json) => {
if let Some(config_err) = err.downcast_ref::<WorkspaceConfigError>() {
return emit_json_status_workspace_config_failure(config_err);
}
return Err(err);
}
Err(err) => return Err(err),
};
let config = context.config;
let (root_specs, root_loader_errors, _root_loader_warnings, root_total_files) =
collect_specs(path)?;
let (
selected_libraries,
imported_libraries,
imported_loader_errors,
_imported_loader_warnings,
_imported_total,
) = load_referenced_validation_specs(&root_specs, &context.libraries);
let validation_specs = ValidationSpecCollection {
root_specs,
imported_libraries,
loader_errors: Vec::new(),
loader_warnings: Vec::new(),
total_files: root_total_files,
};
let validation_options = ValidationOptions {
strict_deps: true,
allow_unsafe_local_test_expect: config.validation.allow_unsafe_local_test_expect,
};
let failed_import_aliases =
imported_library_aliases_with_loader_errors(&imported_loader_errors, &selected_libraries);
let (validation_errors, _validation_warnings) =
finish_validation_with_imports(&validation_specs, &validation_options);
let mut validation_errors = suppress_cross_library_dep_not_found_for_failed_imports(
validation_errors,
&failed_import_aliases,
);
validation_errors.extend(validate_library_crate_aliases(
&validation_specs.root_specs,
path,
&config,
));
let id_by_path: HashMap<String, String> = validation_specs
.all_specs()
.into_iter()
.map(|s| (s.source.file_path.clone(), s.spec.id.clone()))
.collect();
let root_paths: HashSet<String> = validation_specs
.root_specs
.iter()
.map(|spec| spec.source.file_path.clone())
.collect();
let mut errors_by_path: HashMap<String, Vec<JsonErrorEntry>> = HashMap::new();
let mut global_errors = root_loader_errors;
global_errors.extend(imported_loader_errors);
for err in validation_errors {
let entry = spec_error_to_json_entry(&err, &id_by_path);
let root_error_paths: Vec<String> = error_paths(&err)
.into_iter()
.filter(|path| root_paths.contains(path))
.collect();
if root_error_paths.is_empty() {
global_errors.push(err);
continue;
}
for path in root_error_paths {
errors_by_path.entry(path).or_default().push(entry.clone());
}
}
let has_global_errors = !global_errors.is_empty();
let loader_error_entries: Vec<JsonErrorEntry> = global_errors
.iter()
.map(|err| spec_error_to_json_entry(err, &id_by_path))
.collect();
if has_global_errors && matches!(format, OutputFormat::Text) {
let mut diagnostics = DiagnosticMap::new();
for err in global_errors {
push_error(&mut diagnostics, err);
}
print_diagnostics(&diagnostics);
}
if root_total_files == 0 && validation_specs.root_specs.is_empty() && !has_global_errors {
match format {
OutputFormat::Text => {
println!("0 units found, nothing to status.");
}
OutputFormat::Json => {
let response = JsonStatusResponse {
schema_version: JSON_SCHEMA_VERSION,
units: vec![],
loader_errors: vec![],
};
emit_json_status_response(&response)?;
}
}
return Ok(());
}
let mut units = Vec::with_capacity(validation_specs.root_specs.len());
let mut needs_nonzero_exit = has_global_errors;
for spec in &validation_specs.root_specs {
let source_path = Path::new(&spec.source.file_path);
let passport = match read_passport(source_path) {
Ok(passport) => passport,
Err(err) => {
if matches!(format, OutputFormat::Text) {
eprintln!(
"⚠ failed to read passport for {}: {err}",
source_path.display()
);
}
None
}
};
let live_hash = compute_contract_hash(spec);
let errors = errors_by_path
.remove(&spec.source.file_path)
.unwrap_or_default();
let health = compute_health_status(&errors, passport.as_ref(), live_hash.as_deref());
if !health.status.is_valid() {
needs_nonzero_exit = true;
}
units.push(JsonStatusUnit {
id: spec.spec.id.clone(),
status: health.status,
reason: health.reason,
errors,
evidence_at: health.evidence_at,
});
}
match format {
OutputFormat::Text => {
for unit in &units {
print_status_unit(unit);
}
}
OutputFormat::Json => {
let response = JsonStatusResponse {
schema_version: JSON_SCHEMA_VERSION,
units,
loader_errors: loader_error_entries,
};
emit_json_status_response(&response)?;
}
}
if needs_nonzero_exit {
std::process::exit(1);
}
Ok(())
}
fn imported_library_aliases_with_loader_errors(
loader_errors: &[spec_core::SpecError],
libraries: &[ResolvedLibrary],
) -> HashSet<String> {
libraries
.iter()
.filter(|library| {
loader_errors.iter().any(|err| {
error_paths(err)
.into_iter()
.any(|path| Path::new(&path).starts_with(&library.root))
})
})
.map(|library| library.alias.clone())
.collect()
}
fn suppress_cross_library_dep_not_found_for_failed_imports(
errors: Vec<spec_core::SpecError>,
failed_import_aliases: &HashSet<String>,
) -> Vec<spec_core::SpecError> {
errors
.into_iter()
.filter(|err| match err {
spec_core::SpecError::CrossLibraryDepNotFound { dep, .. } => DepRef::parse(dep)
.ok()
.and_then(|dep_ref| dep_ref.library_alias().map(str::to_string))
.is_none_or(|alias| !failed_import_aliases.contains(&alias)),
_ => true,
})
.collect()
}
fn export_command(path: &Path, output: Option<&Path>) -> Result<()> {
let (specs, loader_errors, loader_warnings, total_files) = collect_specs(path)?;
let context = load_workspace_context(path)?;
let config = context.config;
let validation_options = ValidationOptions {
strict_deps: true,
allow_unsafe_local_test_expect: config.validation.allow_unsafe_local_test_expect,
};
let (
_selected_libraries,
imported_libraries,
imported_loader_errors,
imported_loader_warnings,
_imported_total,
) = load_referenced_validation_specs(&specs, &context.libraries);
let validation_specs = ValidationSpecCollection {
root_specs: specs.clone(),
imported_libraries,
loader_errors: Vec::new(),
loader_warnings: Vec::new(),
total_files,
};
let (mut validation_errors, validation_warnings) =
finish_validation_with_imports(&validation_specs, &validation_options);
validation_errors.extend(validate_library_crate_aliases(
&validation_specs.root_specs,
path,
&config,
));
let mut errors = DiagnosticMap::new();
let mut warnings = DiagnosticMap::new();
for err in loader_errors {
push_error(&mut errors, err);
}
for err in imported_loader_errors {
push_error(&mut errors, err);
}
for err in validation_errors {
push_error(&mut errors, err);
}
for warning in loader_warnings {
push_warning(&mut warnings, warning);
}
for warning in imported_loader_warnings {
push_warning(&mut warnings, warning);
}
for warning in validation_warnings {
push_warning(&mut warnings, warning);
}
if !warnings.is_empty() {
print_diagnostics(&warnings);
}
if !errors.is_empty() {
print_diagnostics(&errors);
let file_count = count_unique_files(&errors);
bail!(
"❌ {} file{}, {} error{}",
file_count,
pluralize(file_count),
count_messages(&errors),
pluralize(count_messages(&errors))
);
}
let export_dir = if path.is_file() {
path.parent().unwrap_or(path)
} else {
path
};
let provenance = resolve_git_provenance(export_dir);
let molecule_tests = if includes_directory_molecule_tests(path) {
load_molecule_test_directory(export_dir).with_context(|| {
format!(
"Failed to load molecule tests from {}",
export_dir.display()
)
})?
} else {
Vec::new()
};
let (molecule_errors, molecule_warnings) = validate_molecule_tests(&molecule_tests, &specs);
for err in molecule_errors {
push_error(&mut errors, err);
}
if !molecule_warnings.is_empty() {
let mut mol_warn_map = DiagnosticMap::new();
for warning in molecule_warnings {
push_warning(&mut mol_warn_map, warning);
}
print_diagnostics(&mol_warn_map);
}
if !errors.is_empty() {
print_diagnostics(&errors);
let file_count = count_unique_files(&errors);
bail!(
"❌ {} file{}, {} error{}",
file_count,
pluralize(file_count),
count_messages(&errors),
pluralize(count_messages(&errors))
);
}
let bundle = build_export_bundle(&specs, &molecule_tests, &rfc3339_now(), provenance.as_ref());
let json = serde_json::to_string_pretty(&bundle)?;
match output {
Some(path) => {
validate_export_output_path(path)?;
fs::write(path, json)
.with_context(|| format!("Failed to write export bundle to {}", path.display()))?;
}
None => {
print!("{json}");
}
}
Ok(())
}
fn generate_command(path: &Path, output: Option<&Path>) -> Result<()> {
let config = load_workspace_config(path)?;
let (root_specs, loader_errors, loader_warnings, _total_files) = collect_specs(path)?;
if !loader_errors.is_empty() {
let mut errors = DiagnosticMap::new();
for err in loader_errors {
push_error(&mut errors, err);
}
print_diagnostics(&errors);
bail!("❌ unable to load units before generation");
}
if !loader_warnings.is_empty() {
let mut warnings = DiagnosticMap::new();
for warning in loader_warnings {
push_warning(&mut warnings, warning);
}
print_diagnostics(&warnings);
}
let alias_errors = validate_library_crate_aliases(&root_specs, path, &config);
if !alias_errors.is_empty() {
let mut errors = DiagnosticMap::new();
for err in alias_errors {
push_error(&mut errors, err);
}
print_diagnostics(&errors);
bail!("❌ cross-library crate alias validation failed");
}
let spec_root = if path.is_file() {
path.parent().unwrap_or(path)
} else {
path
};
let resolved_output: PathBuf = match output {
Some(p) => p.to_path_buf(),
None => {
let crate_root = match config.pipeline.crate_root.as_deref() {
Some(p) => p.to_path_buf(),
None => workspace_root_for(spec_root)?,
};
crate_root.join("src/generated")
}
};
let generated = generate_specs(path, &resolved_output)?;
if !generated.specs.is_empty() {
finalize_passports(
spec_root,
&generated.specs,
&generated.generated_at,
None,
None,
)?;
}
Ok(())
}
fn generate_specs(path: &Path, output: &Path) -> Result<GeneratedSpecs> {
let (specs, loader_errors, loader_warnings, total_files) = collect_specs(path)?;
if total_files == 0 {
let mut errors = DiagnosticMap::new();
let mut warnings = DiagnosticMap::new();
for err in loader_errors {
push_error(&mut errors, err);
}
for warning in loader_warnings {
push_warning(&mut warnings, warning);
}
let mut has_molecule_tests = false;
if includes_directory_molecule_tests(path) {
let molecule_report = load_molecule_test_directory_report(path);
has_molecule_tests =
!molecule_report.tests.is_empty() || !molecule_report.errors.is_empty();
let (molecule_errors, molecule_warnings) =
validate_molecule_tests(&molecule_report.tests, &specs);
for err in molecule_report.errors {
push_error(&mut errors, err);
}
for err in molecule_errors {
push_error(&mut errors, err);
}
for warning in molecule_report.warnings {
push_warning(&mut warnings, warning);
}
for warning in molecule_warnings {
push_warning(&mut warnings, warning);
}
}
let output_base = ensure_output_marker(output)?;
let generated_rs_rel_paths = HashSet::<PathBuf>::new();
clean_output_dir(&output_base, &generated_rs_rel_paths).with_context(|| {
format!("Failed to clean output directory {}", output_base.display())
})?;
if !warnings.is_empty() {
print_diagnostics(&warnings);
}
if !errors.is_empty() {
print_diagnostics(&errors);
let file_count = count_unique_files(&errors);
bail!(
"❌ {} file{}, {} error{}",
file_count,
pluralize(file_count),
count_messages(&errors),
pluralize(count_messages(&errors))
);
}
if has_molecule_tests {
bail!("❌ 0 unit specs found; molecule tests require unit specs to validate covers");
}
println!("0 units found, nothing to generate.");
return Ok(GeneratedSpecs {
specs,
generated_at: rfc3339_now(),
});
}
let context = load_workspace_context(path)?;
let config = context.config;
let validation_options = ValidationOptions {
strict_deps: true,
allow_unsafe_local_test_expect: config.validation.allow_unsafe_local_test_expect,
};
let (
_selected_libraries,
imported_libraries,
imported_loader_errors,
imported_loader_warnings,
_imported_total,
) = load_referenced_validation_specs(&specs, &context.libraries);
let validation_specs = ValidationSpecCollection {
root_specs: specs.clone(),
imported_libraries,
loader_errors: Vec::new(),
loader_warnings: Vec::new(),
total_files,
};
let (validation_errors, validation_warnings) =
finish_validation_with_imports(&validation_specs, &validation_options);
let mut errors = DiagnosticMap::new();
let mut warnings = DiagnosticMap::new();
for err in loader_errors {
push_error(&mut errors, err);
}
for err in imported_loader_errors {
push_error(&mut errors, err);
}
for err in validation_errors {
push_error(&mut errors, err);
}
for warning in loader_warnings {
push_warning(&mut warnings, warning);
}
for warning in imported_loader_warnings {
push_warning(&mut warnings, warning);
}
for warning in validation_warnings {
push_warning(&mut warnings, warning);
}
if !warnings.is_empty() {
print_diagnostics(&warnings);
}
if !errors.is_empty() {
print_diagnostics(&errors);
let file_count = count_unique_files(&errors);
bail!(
"❌ {} file{}, {} error{}",
file_count,
pluralize(file_count),
count_messages(&errors),
pluralize(count_messages(&errors))
);
}
let mut resolved_specs = Vec::new();
for spec in &specs {
resolved_specs.push(
normalize_spec(spec.spec.clone())
.with_context(|| format!("Failed to normalize {}", spec.source.file_path))?,
);
}
let molecule_tests = if includes_directory_molecule_tests(path) {
let spec_dir = path;
load_molecule_test_directory(spec_dir)
.with_context(|| format!("Failed to load molecule tests from {}", spec_dir.display()))?
} else {
Vec::new()
};
let (mol_errors, mol_warnings) = validate_molecule_tests(&molecule_tests, &specs);
if !mol_warnings.is_empty() {
let mut warn_map = DiagnosticMap::new();
for w in mol_warnings {
push_warning(&mut warn_map, w);
}
print_diagnostics(&warn_map);
}
if !mol_errors.is_empty() {
let mut err_map = DiagnosticMap::new();
for e in mol_errors {
push_error(&mut err_map, e);
}
print_diagnostics(&err_map);
let file_count = count_unique_files(&err_map);
bail!(
"❌ {} file{}, {} error{}",
file_count,
pluralize(file_count),
count_messages(&err_map),
pluralize(count_messages(&err_map))
);
}
let resolved_molecule_tests: Vec<ResolvedMoleculeTest> = molecule_tests
.iter()
.map(ResolvedMoleculeTest::from_loaded)
.collect();
let mut generated_rs_rel_paths = HashSet::<PathBuf>::new();
for spec in &resolved_specs {
generated_rs_rel_paths.insert(path_for_spec(spec));
}
let namespaces = build_namespaces(&resolved_specs, &resolved_molecule_tests);
for module_path in namespaces.keys() {
let mod_rs_rel = if module_path.is_empty() {
PathBuf::from("mod.rs")
} else {
PathBuf::from(module_path.replace('/', std::path::MAIN_SEPARATOR_STR)).join("mod.rs")
};
generated_rs_rel_paths.insert(mod_rs_rel);
}
let output_base = ensure_output_marker(output)?;
let generate_options = GenerateOptions {
allow_unsafe_local_test_expect: config.validation.allow_unsafe_local_test_expect,
};
for spec in &resolved_specs {
let content = generate_code_with_options(spec, &generate_options)
.with_context(|| format!("Failed to generate Rust for {}", spec.id))?;
let output_path = output_base.join(path_for_spec(spec));
write_generated_file(&output_path.display().to_string(), &content)
.with_context(|| format!("Failed to write {}", output_path.display()))?;
}
for (module_path, namespace) in &namespaces {
let content = generate_mod_rs(
&namespace.unit_files.iter().cloned().collect::<Vec<_>>(),
&namespace.subdirs.iter().cloned().collect::<Vec<_>>(),
namespace.has_molecule_tests,
)
.with_context(|| format!("Failed to generate mod.rs for module '{module_path}'"))?;
let mod_rs_rel = if module_path.is_empty() {
PathBuf::from("mod.rs")
} else {
PathBuf::from(module_path.replace('/', std::path::MAIN_SEPARATOR_STR)).join("mod.rs")
};
let mod_rs_path = output_base.join(mod_rs_rel);
write_generated_file(&mod_rs_path.display().to_string(), &content)
.with_context(|| format!("Failed to write {}", mod_rs_path.display()))?;
}
let specs_by_id: HashMap<&str, &ResolvedSpec> =
resolved_specs.iter().map(|s| (s.id.as_str(), s)).collect();
let molecule_test_paths =
generate_and_write_molecule_tests(&resolved_molecule_tests, &specs_by_id, &output_base)
.with_context(|| "Failed to generate molecule test files")?;
let molecule_test_file_count = molecule_test_paths.len();
generated_rs_rel_paths.extend(molecule_test_paths);
clean_output_dir(&output_base, &generated_rs_rel_paths)
.with_context(|| format!("Failed to clean output directory {}", output_base.display()))?;
let generated_at = rfc3339_now();
println!(
"Generated {} file{}",
resolved_specs.len() + namespaces.len() + molecule_test_file_count,
pluralize(resolved_specs.len() + namespaces.len() + molecule_test_file_count)
);
Ok(GeneratedSpecs {
specs,
generated_at,
})
}
fn finalize_passports(
passport_root: &Path,
specs: &[LoadedSpec],
generated_at: &str,
evidence_by_spec: Option<&BTreeMap<String, PassportEvidence>>,
contract_hash_by_spec: Option<&BTreeMap<String, String>>,
) -> Result<()> {
if specs.is_empty() {
return Ok(());
}
let _writer_guard = ConcurrentPassportWriteGuard::begin(passport_root);
write_passports(specs, generated_at, evidence_by_spec, contract_hash_by_spec)?;
ensure_gitignore_entry(passport_root)
.with_context(|| "Failed to update .gitignore for passport files")?;
Ok(())
}
fn contract_hashes_for(specs: &[LoadedSpec]) -> Option<BTreeMap<String, String>> {
let mut hashes = BTreeMap::new();
for spec in specs {
if let Some(hash) = compute_contract_hash(spec) {
hashes.insert(spec.spec.id.clone(), hash);
}
}
if hashes.is_empty() {
None
} else {
Some(hashes)
}
}
fn write_passports(
specs: &[LoadedSpec],
generated_at: &str,
evidence_by_spec: Option<&BTreeMap<String, PassportEvidence>>,
contract_hash_by_spec: Option<&BTreeMap<String, String>>,
) -> Result<()> {
for spec in specs {
let source_path = Path::new(&spec.source.file_path);
let (evidence, contract_hash) = if evidence_by_spec.is_none() {
let existing = read_passport(source_path).ok().flatten();
let ev = existing.as_ref().and_then(|p| p.evidence.clone());
let hash = existing
.and_then(|p| p.contract_hash)
.or_else(|| compute_contract_hash(spec));
(ev, hash)
} else {
let ev = evidence_by_spec
.and_then(|map| map.get(&spec.spec.id))
.cloned();
let hash = contract_hash_by_spec
.and_then(|map| map.get(&spec.spec.id))
.cloned();
(ev, hash)
};
let passport = build_passport_with_evidence(spec, generated_at, evidence, contract_hash);
write_passport(&passport, source_path)
.with_context(|| format!("Failed to write passport for {}", spec.source.id))?;
}
Ok(())
}
struct PipelineContext {
crate_root: PathBuf,
cargo_target_dir: PathBuf,
timeout: Option<Duration>,
_temp_dir: Option<tempfile::TempDir>,
}
struct GeneratedSpecs {
specs: Vec<LoadedSpec>,
generated_at: String,
}
fn resolve_pipeline_context(
path: &Path,
crate_root_flag: Option<&Path>,
config: &WorkspaceConfig,
) -> Result<PipelineContext> {
let crate_root = match crate_root_flag.or(config.pipeline.crate_root.as_deref()) {
Some(p) => p.to_path_buf(),
None => workspace_root_for(path)?,
};
let mut temp_dir: Option<tempfile::TempDir> = None;
let cargo_target_dir = if let Some(p) = &config.pipeline.cargo_target_dir {
p.clone()
} else if let Ok(env_val) = std::env::var("CARGO_TARGET_DIR") {
PathBuf::from(env_val)
} else {
let td = tempfile::TempDir::new()
.with_context(|| "Failed to create temporary CARGO_TARGET_DIR")?;
let path = td.path().to_path_buf();
temp_dir = Some(td);
path
};
Ok(PipelineContext {
crate_root,
cargo_target_dir,
timeout: config.pipeline.timeout_secs.map(Duration::from_secs),
_temp_dir: temp_dir,
})
}
fn build_command(
path: &Path,
output: Option<&Path>,
crate_root_flag: Option<&Path>,
config: &WorkspaceConfig,
) -> Result<()> {
if path.is_file() {
bail!(
"❌ spec build requires a directory path — pass the units directory, not a single file"
);
}
if !cargo_available() {
bail!("❌ cargo not found — install Rust or ensure cargo is on PATH");
}
let (root_specs, loader_errors, loader_warnings, _total_files) = collect_specs(path)?;
if !loader_errors.is_empty() {
let mut errors = DiagnosticMap::new();
for err in loader_errors {
push_error(&mut errors, err);
}
print_diagnostics(&errors);
bail!("❌ unable to load units before build");
}
if !loader_warnings.is_empty() {
let mut warnings = DiagnosticMap::new();
for warning in loader_warnings {
push_warning(&mut warnings, warning);
}
print_diagnostics(&warnings);
}
let alias_errors = validate_library_crate_aliases(&root_specs, path, config);
if !alias_errors.is_empty() {
let mut errors = DiagnosticMap::new();
for err in alias_errors {
push_error(&mut errors, err);
}
print_diagnostics(&errors);
bail!("❌ cross-library crate alias validation failed");
}
let ctx = resolve_pipeline_context(path, crate_root_flag, config)?;
let resolved_output = output
.map(PathBuf::from)
.unwrap_or_else(|| ctx.crate_root.join("src/generated"));
let generated = generate_specs(path, &resolved_output)?;
if !generated.specs.is_empty() {
finalize_passports(path, &generated.specs, &generated.generated_at, None, None)?;
}
let result = run_cargo_build(
&ctx.crate_root,
&ctx.cargo_target_dir,
ctx.timeout,
Verbosity::Normal,
)?;
print!("{}", result.stdout);
eprint!("{}", result.stderr);
if result.timed_out {
bail!("❌ cargo build timed out{}", timeout_suffix(ctx.timeout));
}
if result.exit_code != 0 {
bail!("❌ cargo build failed");
}
Ok(())
}
fn test_command(
path: &Path,
output: Option<&Path>,
crate_root_flag: Option<&Path>,
config: &WorkspaceConfig,
) -> Result<()> {
if !cargo_available() {
bail!("❌ cargo not found — install Rust or ensure cargo is on PATH");
}
let (generation_scope, pipeline_scope, target_spec) = if path.is_file() {
if !is_unit_spec(path) {
bail!("{} is not a .unit.spec file", path.display());
}
(
path,
path.parent().unwrap_or(path),
Some(load_file(path).with_context(|| format!("Failed to load {}", path.display()))?),
)
} else {
(path, path, None)
};
let (root_specs, loader_errors, loader_warnings, _total_files) =
collect_specs(generation_scope)?;
if !loader_errors.is_empty() {
let mut errors = DiagnosticMap::new();
for err in loader_errors {
push_error(&mut errors, err);
}
print_diagnostics(&errors);
bail!("❌ unable to load units before test");
}
if !loader_warnings.is_empty() {
let mut warnings = DiagnosticMap::new();
for warning in loader_warnings {
push_warning(&mut warnings, warning);
}
print_diagnostics(&warnings);
}
let alias_errors = validate_library_crate_aliases(&root_specs, generation_scope, config);
if !alias_errors.is_empty() {
let mut errors = DiagnosticMap::new();
for err in alias_errors {
push_error(&mut errors, err);
}
print_diagnostics(&errors);
bail!("❌ cross-library crate alias validation failed");
}
let ctx = resolve_pipeline_context(pipeline_scope, crate_root_flag, config)?;
let resolved_output = output
.map(PathBuf::from)
.unwrap_or_else(|| ctx.crate_root.join("src/generated"));
let generated = generate_specs(generation_scope, &resolved_output)?;
if target_spec.is_none() {
finalize_passports(path, &generated.specs, &generated.generated_at, None, None)?;
}
let passport_write_plan =
passport_write_plan(path, pipeline_scope, &generated.specs, target_spec.as_ref());
let effective_prefix = match &config.pipeline.generated_module_prefix {
Some(explicit) => explicit.clone(),
None => output_module_prefix(&resolved_output, &ctx.crate_root)?,
};
let filter = target_spec.as_ref().map(|target| {
let resolved = ResolvedSpec::from_spec(target.spec.clone());
cargo_test_filter_for(&resolved, &effective_prefix)
});
let provenance = resolve_git_provenance(&ctx.crate_root);
let build_result = run_cargo_build(
&ctx.crate_root,
&ctx.cargo_target_dir,
ctx.timeout,
Verbosity::Normal,
)?;
print!("{}", build_result.stdout);
eprint!("{}", build_result.stderr);
if build_result.timed_out {
let observed_at = rfc3339_now();
let evidence_by_spec =
build_timeout_evidence(passport_write_plan.specs, &observed_at, provenance.as_ref());
let contract_hash_by_spec = contract_hashes_for(passport_write_plan.specs);
finalize_test_passports(
&passport_write_plan,
&generated.generated_at,
&evidence_by_spec,
contract_hash_by_spec.as_ref(),
)?;
bail!("❌ cargo build timed out{}", timeout_suffix(ctx.timeout));
}
if build_result.exit_code != 0 {
let observed_at = rfc3339_now();
let evidence_by_spec =
build_failure_evidence(passport_write_plan.specs, &observed_at, provenance.as_ref());
let contract_hash_by_spec = contract_hashes_for(passport_write_plan.specs);
finalize_test_passports(
&passport_write_plan,
&generated.generated_at,
&evidence_by_spec,
contract_hash_by_spec.as_ref(),
)?;
bail!("❌ cargo build failed");
}
let test_result = run_cargo_test(
&ctx.crate_root,
&ctx.cargo_target_dir,
filter.as_deref(),
ctx.timeout,
Verbosity::Normal,
)?;
print!("{}", test_result.stdout);
eprint!("{}", test_result.stderr);
if test_result.timed_out {
let observed_at = rfc3339_now();
let evidence_by_spec =
build_timeout_evidence(passport_write_plan.specs, &observed_at, provenance.as_ref());
let contract_hash_by_spec = contract_hashes_for(passport_write_plan.specs);
finalize_test_passports(
&passport_write_plan,
&generated.generated_at,
&evidence_by_spec,
contract_hash_by_spec.as_ref(),
)?;
bail!("❌ cargo test timed out{}", timeout_suffix(ctx.timeout));
}
if target_spec.is_some() && zero_tests_ran(&test_result.stdout) {
bail!("❌ cargo test matched 0 tests");
}
let parsed_test_results = parse_cargo_test_output(&test_result.stdout);
let observed_at = rfc3339_now();
let evidence_by_spec = build_test_evidence(
passport_write_plan.specs,
&effective_prefix,
&parsed_test_results,
&observed_at,
provenance.as_ref(),
)?;
let contract_hash_by_spec = contract_hashes_for(passport_write_plan.specs);
finalize_test_passports(
&passport_write_plan,
&generated.generated_at,
&evidence_by_spec,
contract_hash_by_spec.as_ref(),
)?;
if test_result.exit_code != 0 {
bail!("❌ cargo test failed");
}
Ok(())
}
fn timeout_suffix(timeout: Option<Duration>) -> String {
match timeout {
Some(timeout) => format!(" after {}s", timeout.as_secs()),
None => String::new(),
}
}
fn build_failure_evidence(
specs: &[LoadedSpec],
observed_at: &str,
provenance: Option<&ArtifactProvenance>,
) -> BTreeMap<String, PassportEvidence> {
build_incomplete_evidence(specs, "fail", observed_at, provenance)
}
fn build_timeout_evidence(
specs: &[LoadedSpec],
observed_at: &str,
provenance: Option<&ArtifactProvenance>,
) -> BTreeMap<String, PassportEvidence> {
build_incomplete_evidence(specs, "timeout", observed_at, provenance)
}
fn build_incomplete_evidence(
specs: &[LoadedSpec],
build_status: &str,
observed_at: &str,
provenance: Option<&ArtifactProvenance>,
) -> BTreeMap<String, PassportEvidence> {
specs
.iter()
.map(|spec| {
(
spec.spec.id.clone(),
PassportEvidence {
build_status: build_status.to_string(),
test_results: vec![],
observed_at: observed_at.to_string(),
provenance: provenance.cloned(),
},
)
})
.collect()
}
fn passport_write_plan<'a>(
requested_path: &'a Path,
spec_root: &'a Path,
generated_specs: &'a [LoadedSpec],
target_spec: Option<&'a LoadedSpec>,
) -> PassportWritePlan<'a> {
if let Some(target_spec) = target_spec {
PassportWritePlan {
passport_root: spec_root,
specs: std::slice::from_ref(target_spec),
}
} else {
PassportWritePlan {
passport_root: requested_path,
specs: generated_specs,
}
}
}
fn finalize_test_passports(
plan: &PassportWritePlan<'_>,
generated_at: &str,
evidence_by_spec: &BTreeMap<String, PassportEvidence>,
contract_hash_by_spec: Option<&BTreeMap<String, String>>,
) -> Result<()> {
finalize_passports(
plan.passport_root,
plan.specs,
generated_at,
Some(evidence_by_spec),
contract_hash_by_spec,
)
}
fn concurrent_passport_writer_registry_dir(passport_root: &Path, registry_base: &Path) -> PathBuf {
use std::collections::hash_map::DefaultHasher;
use std::hash::{Hash, Hasher};
let canonical_root = passport_root
.canonicalize()
.unwrap_or_else(|_| passport_root.to_path_buf());
let mut hasher = DefaultHasher::new();
canonical_root.hash(&mut hasher);
let hash = hasher.finish();
registry_base.join(format!("spec-passport-writers-{hash:016x}"))
}
fn concurrent_passport_writer_marker_name(pid: u32, now: SystemTime) -> String {
let now_secs = now.duration_since(UNIX_EPOCH).unwrap_or_default().as_secs();
format!("{pid}-{now_secs}.active")
}
fn parse_concurrent_passport_writer_marker(file_name: &str) -> Option<(u32, u64)> {
let file_name = file_name.strip_suffix(".active")?;
let (pid, started_at) = file_name.split_once('-')?;
Some((pid.parse().ok()?, started_at.parse().ok()?))
}
fn count_other_active_passport_writers(
registry_dir: &Path,
current_pid: u32,
now: SystemTime,
) -> Result<usize> {
let now_secs = now.duration_since(UNIX_EPOCH).unwrap_or_default().as_secs();
let mut other_writers = HashSet::new();
for entry in fs::read_dir(registry_dir)
.with_context(|| format!("Failed to read {}", registry_dir.display()))?
{
let entry = entry?;
let file_name = entry.file_name();
let Some(file_name) = file_name.to_str() else {
continue;
};
let Some((pid, started_at)) = parse_concurrent_passport_writer_marker(file_name) else {
continue;
};
if now_secs.saturating_sub(started_at) > CONCURRENT_PASSPORT_WRITER_TTL_SECS {
let _ = fs::remove_file(entry.path());
continue;
}
if pid != current_pid {
other_writers.insert(pid);
}
}
Ok(other_writers.len())
}
fn concurrent_passport_write_warning_message(
passport_root: &Path,
other_writers: usize,
) -> Option<String> {
if other_writers == 0 {
return None;
}
Some(format!(
"⚠ detected {other_writers} other spec process{} writing passports under {}; concurrent passport writes are best-effort only (no locking)",
pluralize(other_writers),
passport_root.display()
))
}
fn build_test_evidence(
specs: &[LoadedSpec],
output_prefix: &str,
parsed_test_results: &HashMap<String, ParsedCargoTestResult>,
observed_at: &str,
provenance: Option<&ArtifactProvenance>,
) -> Result<BTreeMap<String, PassportEvidence>> {
let mut evidence_by_spec = BTreeMap::new();
for spec in specs {
let resolved = ResolvedSpec::from_spec(spec.spec.clone());
let mut test_results = Vec::new();
for local_test in &spec.spec.local_tests {
let full_name = expected_cargo_test_name(&resolved, output_prefix, &local_test.id);
let observed = parsed_test_results.get(&full_name);
let (status, reason) = match observed {
Some(result) => (result.status.clone(), result.reason.clone()),
None => (
"unknown".to_string(),
Some("test not found in cargo output".to_string()),
),
};
test_results.push(PassportTestResult {
id: local_test.id.clone(),
status,
reason,
});
}
evidence_by_spec.insert(
spec.spec.id.clone(),
PassportEvidence {
build_status: "pass".to_string(),
test_results,
observed_at: observed_at.to_string(),
provenance: provenance.cloned(),
},
);
}
Ok(evidence_by_spec)
}
fn resolve_git_provenance(path: &Path) -> Option<ArtifactProvenance> {
let sha = resolve_git_commit_sha(path)?;
Some(ArtifactProvenance {
git_commit_sha: sha,
})
}
fn resolve_git_commit_sha(path: &Path) -> Option<String> {
let output = std::process::Command::new("git")
.current_dir(path)
.args(["rev-parse", "HEAD"])
.output()
.ok()?;
if !output.status.success() {
return None;
}
let sha = String::from_utf8(output.stdout).ok()?;
let sha = sha.trim();
if sha.is_empty() {
None
} else {
Some(sha.to_string())
}
}
fn output_module_prefix(output: &Path, crate_root: &Path) -> Result<String> {
let src_root = crate_root.join("src");
let relative = output.strip_prefix(&src_root).unwrap_or_else(|_| {
let mut comps = output.components();
if comps
.next()
.map(|c| c.as_os_str() == "src")
.unwrap_or(false)
{
output.strip_prefix("src").unwrap_or(output)
} else {
output
}
});
let parts: Vec<&str> = relative
.components()
.filter_map(|c| match c {
std::path::Component::Normal(s) => s.to_str(),
_ => None,
})
.collect();
if parts.is_empty() {
return Err(anyhow::anyhow!(
"❌ could not determine output module prefix from {}",
output.display()
));
}
Ok(parts.join("::"))
}
fn cargo_test_filter_for(spec: &ResolvedSpec, output_prefix: &str) -> String {
if spec.module_path.is_empty() {
format!("{output_prefix}::{}::tests::", spec.fn_name)
} else {
format!(
"{output_prefix}::{}::{}::tests::",
spec.module_path.replace('/', "::"),
spec.fn_name
)
}
}
fn expected_cargo_test_name(spec: &ResolvedSpec, output_prefix: &str, test_id: &str) -> String {
if spec.module_path.is_empty() {
format!("{output_prefix}::{}::tests::test_{test_id}", spec.fn_name)
} else {
format!(
"{output_prefix}::{}::{}::tests::test_{test_id}",
spec.module_path.replace('/', "::"),
spec.fn_name
)
}
}
#[derive(Default)]
struct Namespace {
unit_files: BTreeSet<String>,
subdirs: BTreeSet<String>,
has_molecule_tests: bool,
}
fn record_namespace_branch(module_path: &str, namespaces: &mut BTreeMap<String, Namespace>) {
namespaces.entry(String::new()).or_default();
if module_path.is_empty() {
return;
}
let segments: Vec<&str> = module_path.split('/').collect();
for depth in 0..segments.len() {
let parent = if depth == 0 {
String::new()
} else {
segments[..depth].join("/")
};
namespaces
.entry(parent)
.or_default()
.subdirs
.insert(segments[depth].to_string());
let current = segments[..=depth].join("/");
namespaces.entry(current).or_default();
}
}
fn build_namespaces(
specs: &[ResolvedSpec],
molecule_tests: &[ResolvedMoleculeTest],
) -> BTreeMap<String, Namespace> {
let mut namespaces = BTreeMap::<String, Namespace>::new();
namespaces.entry(String::new()).or_default();
for spec in specs {
record_namespace_branch(&spec.module_path, &mut namespaces);
namespaces
.entry(spec.module_path.clone())
.or_default()
.unit_files
.insert(spec.fn_name.clone());
}
for test in molecule_tests {
record_namespace_branch(&test.module_path, &mut namespaces);
namespaces
.entry(test.module_path.clone())
.or_default()
.has_molecule_tests = true;
}
namespaces
}
fn path_for_spec(spec: &ResolvedSpec) -> PathBuf {
let mut path = PathBuf::new();
if !spec.module_path.is_empty() {
path.push(spec.module_path.replace('/', std::path::MAIN_SEPARATOR_STR));
}
path.push(format!("{}.rs", spec.fn_name));
path
}
fn ensure_output_marker(output: &Path) -> Result<PathBuf> {
let output_base = safe_output_path(output)?;
if output_base.exists() && !output_base.is_dir() {
bail!(
"Refusing to generate into {}: output path exists and is not a directory",
output_base.display()
);
}
let marker = output_base.join(".spec-generated");
if !marker.exists() && output_base.exists() && !dir_is_empty(&output_base)? {
bail!(
"Refusing to generate into {}: non-empty directory missing .spec-generated marker",
output_base.display()
);
}
if !output_base.exists() {
fs::create_dir_all(&output_base).with_context(|| {
format!(
"Failed to create output directory {}",
output_base.display()
)
})?;
}
if !marker.exists() {
fs::write(&marker, "")
.with_context(|| format!("Failed to create marker {}", marker.display()))?;
}
Ok(output_base)
}
fn validate_export_output_path(output: &Path) -> Result<()> {
if output.is_dir() {
bail!("❌ --output must be a file path, not a directory");
}
if let Some(parent) = output.parent().filter(|p| !p.as_os_str().is_empty())
&& !parent.exists()
{
bail!("❌ output directory does not exist: {}", parent.display());
}
Ok(())
}
fn dir_is_empty(path: &Path) -> Result<bool> {
let mut entries =
fs::read_dir(path).with_context(|| format!("Failed to read dir {}", path.display()))?;
Ok(entries.next().is_none())
}
fn collect_specs(path: &Path) -> Result<CollectedSpecs> {
if path.is_file() {
let total_files = usize::from(is_unit_spec(path));
if !is_unit_spec(path) {
let hint = if path
.file_name()
.and_then(|n| n.to_str())
.is_some_and(|n| n.ends_with(".test.spec"))
{
" (to validate molecule tests, pass the containing directory)"
} else {
""
};
bail!("{} is not a .unit.spec file{}", path.display(), hint);
}
return match load_file(path) {
Ok(spec) => Ok((vec![spec], Vec::new(), Vec::new(), total_files)),
Err(err) => Ok((Vec::new(), vec![err], Vec::new(), total_files)),
};
}
if !path.is_dir() {
bail!("{} does not exist", path.display());
}
let report = load_directory_report(path);
Ok((
report.specs,
report.errors,
report.warnings,
report.total_files,
))
}
fn collect_validation_specs(
path: &Path,
libraries: &[ResolvedLibrary],
) -> Result<ValidationSpecCollection> {
let (root_specs, mut loader_errors, mut loader_warnings, mut total_files) =
collect_specs(path)?;
let (
_selected_libraries,
imported_libraries,
imported_errors,
imported_warnings,
imported_total_files,
) = load_referenced_validation_specs(&root_specs, libraries);
total_files += imported_total_files;
loader_errors.extend(imported_errors);
loader_warnings.extend(imported_warnings);
Ok(ValidationSpecCollection {
root_specs,
imported_libraries,
loader_errors,
loader_warnings,
total_files,
})
}
fn load_referenced_validation_specs(
root_specs: &[LoadedSpec],
libraries: &[ResolvedLibrary],
) -> (
Vec<ResolvedLibrary>,
Vec<ImportedLibrarySpecs>,
Vec<spec_core::SpecError>,
Vec<spec_core::SpecWarning>,
usize,
) {
let mut selected_libraries = Vec::new();
let mut imported_libraries = Vec::new();
let mut loader_errors = Vec::new();
let mut loader_warnings = Vec::new();
let mut total_files = 0;
let direct_root_aliases: BTreeSet<String> = direct_root_library_aliases(root_specs)
.into_keys()
.collect();
for library in libraries
.iter()
.filter(|library| direct_root_aliases.contains(&library.alias))
.cloned()
{
let report = load_directory_report(&library.root);
total_files += report.total_files;
loader_errors.extend(report.errors);
loader_warnings.extend(report.warnings);
selected_libraries.push(library.clone());
imported_libraries.push(ImportedLibrarySpecs {
alias: library.alias,
specs: report.specs,
});
}
(
selected_libraries,
imported_libraries,
loader_errors,
loader_warnings,
total_files,
)
}
#[derive(serde::Deserialize)]
struct CargoManifest {
#[serde(default)]
dependencies: BTreeMap<String, toml::Value>,
}
fn validate_library_crate_aliases(
root_specs: &[LoadedSpec],
path: &Path,
config: &WorkspaceConfig,
) -> Vec<spec_core::SpecError> {
let referenced_aliases = direct_root_library_aliases(root_specs);
if referenced_aliases.is_empty() {
return Vec::new();
}
let (manifest_path, dependency_aliases) = match load_root_cargo_dependency_aliases(path, config)
{
Ok(result) => result,
Err(err) => return vec![err],
};
referenced_aliases
.into_iter()
.filter_map(|(alias, source_path)| {
if dependency_aliases.contains(&alias) {
None
} else {
Some(spec_core::SpecError::LibraryCrateAliasMissing {
alias,
cargo_toml: manifest_path.display().to_string(),
path: source_path,
})
}
})
.collect()
}
fn direct_root_library_aliases(root_specs: &[LoadedSpec]) -> BTreeMap<String, String> {
let mut aliases = BTreeMap::new();
for spec in root_specs {
for dep in &spec.spec.deps {
let Ok(dep_ref) = DepRef::parse(dep) else {
continue;
};
if let Some(alias) = dep_ref.library_alias() {
aliases
.entry(alias.to_string())
.or_insert_with(|| spec.source.file_path.clone());
}
}
}
aliases
}
fn resolved_crate_manifest_path(path: &Path, config: &WorkspaceConfig) -> Result<PathBuf> {
let spec_root = if path.is_file() {
path.parent().unwrap_or(path)
} else {
path
};
let crate_root = match config.pipeline.crate_root.as_deref() {
Some(path) => path.to_path_buf(),
None => workspace_root_for(spec_root)?,
};
Ok(crate_root.join("Cargo.toml"))
}
fn load_root_cargo_dependency_aliases(
path: &Path,
config: &WorkspaceConfig,
) -> std::result::Result<(PathBuf, HashSet<String>), spec_core::SpecError> {
let manifest_path = resolved_crate_manifest_path(path, config).map_err(|err| {
spec_core::SpecError::LibraryCrateManifestError {
cargo_toml: None,
message: format!("Failed to resolve Cargo.toml for library alias validation: {err}"),
}
})?;
let dependency_aliases = load_cargo_dependency_aliases(&manifest_path).map_err(|err| {
spec_core::SpecError::LibraryCrateManifestError {
cargo_toml: Some(manifest_path.display().to_string()),
message: err.to_string(),
}
})?;
Ok((manifest_path, dependency_aliases))
}
fn load_cargo_dependency_aliases(manifest_path: &Path) -> Result<HashSet<String>> {
let manifest = fs::read_to_string(manifest_path)
.with_context(|| format!("Failed to read {}", manifest_path.display()))?;
let parsed: CargoManifest = toml::from_str(&manifest)
.with_context(|| format!("Failed to parse {}", manifest_path.display()))?;
Ok(parsed.dependencies.into_keys().collect())
}
fn includes_directory_molecule_tests(path: &Path) -> bool {
path.is_dir()
}
fn validate_molecule_tests(
tests: &[LoadedMoleculeTest],
specs: &[LoadedSpec],
) -> (Vec<spec_core::SpecError>, Vec<spec_core::SpecWarning>) {
let mut errors = Vec::new();
let mut warnings = Vec::new();
for test in tests {
if let Err(e) = validate_molecule_test_semantic(test) {
errors.push(e);
}
}
let unit_ids: std::collections::HashSet<&str> =
specs.iter().map(|s| s.spec.id.as_str()).collect();
for test in tests {
let (errs, warns) = validate_molecule_test_covers(test, &unit_ids);
errors.extend(errs);
warnings.extend(warns);
}
errors.extend(validate_no_duplicate_molecule_test_ids(tests));
(errors, warnings)
}
#[cfg(test)]
fn finish_validation(
specs: &[LoadedSpec],
options: &ValidationOptions,
) -> (Vec<spec_core::SpecError>, Vec<spec_core::SpecWarning>) {
let mut errors = Vec::new();
let mut warnings = Vec::new();
errors.extend(validate_no_duplicate_ids(specs));
for spec in specs {
if let Err(err) = validate_full_with_options(spec, options) {
errors.push(err);
}
}
let (dep_errors, dep_warnings) = validate_deps_exist_with_options(specs, options);
errors.extend(dep_errors);
warnings.extend(dep_warnings);
warnings.extend(check_spec_versions(specs));
(errors, warnings)
}
fn finish_validation_with_imports(
specs: &ValidationSpecCollection,
options: &ValidationOptions,
) -> (Vec<spec_core::SpecError>, Vec<spec_core::SpecWarning>) {
let mut errors = Vec::new();
let mut warnings = Vec::new();
for spec in &specs.root_specs {
if let Err(err) = validate_full_with_options(spec, options) {
errors.push(err);
}
}
for library in &specs.imported_libraries {
for spec in &library.specs {
if let Err(err) = validate_full_with_options(spec, options) {
errors.push(err);
}
}
}
let alias_set: HashSet<&str> = specs
.imported_libraries
.iter()
.map(|library| library.alias.as_str())
.collect();
let qualified_specs = build_qualified_validation_specs(specs, &alias_set, &mut errors);
errors.extend(validate_no_duplicate_qualified_ids(&qualified_specs));
let (mut dep_errors, dep_warnings) =
validate_qualified_deps_exist_with_options(&qualified_specs, options);
errors.append(&mut dep_errors);
warnings.extend(dep_warnings);
warnings.extend(check_spec_versions(&specs.root_specs));
for library in &specs.imported_libraries {
warnings.extend(check_spec_versions(&library.specs));
}
(errors, warnings)
}
fn build_qualified_validation_specs<'a>(
specs: &'a ValidationSpecCollection,
known_library_aliases: &HashSet<&str>,
errors: &mut Vec<spec_core::SpecError>,
) -> Vec<QualifiedLoadedSpec<'a>> {
let mut qualified_specs = Vec::new();
for spec in &specs.root_specs {
qualified_specs.push(qualify_loaded_spec(
spec,
None,
known_library_aliases,
errors,
));
}
for library in &specs.imported_libraries {
for spec in &library.specs {
qualified_specs.push(qualify_loaded_spec(
spec,
Some(library.alias.as_str()),
known_library_aliases,
errors,
));
}
}
qualified_specs
}
fn qualify_loaded_spec<'a>(
loaded: &'a LoadedSpec,
current_library: Option<&str>,
known_library_aliases: &HashSet<&str>,
errors: &mut Vec<spec_core::SpecError>,
) -> QualifiedLoadedSpec<'a> {
let mut qualified_deps = Vec::with_capacity(loaded.spec.deps.len());
for authored_dep in &loaded.spec.deps {
let dep = match DepRef::parse(authored_dep) {
Ok(dep) => dep,
Err(err) => {
errors.push(spec_core::SpecError::SemanticValidation {
message: err.to_string(),
path: loaded.source.file_path.clone(),
});
continue;
}
};
if let Some(alias) = dep.library_alias()
&& !known_library_aliases.contains(alias)
{
errors.push(spec_core::SpecError::UnknownLibraryNamespace {
alias: alias.to_string(),
dep: dep.authored(),
path: loaded.source.file_path.clone(),
});
continue;
}
qualified_deps.push(dep.to_qualified(current_library));
}
QualifiedLoadedSpec {
loaded,
qualified_id: QualifiedUnitRef::new(
current_library.map(str::to_string),
loaded.spec.id.clone(),
),
qualified_deps,
}
}
fn print_diagnostics(diagnostics: &DiagnosticMap) {
for (path, messages) in diagnostics {
eprintln!("{path}:");
for message in messages {
eprintln!(" - {message}");
}
}
}
fn push_error(diagnostics: &mut DiagnosticMap, err: spec_core::SpecError) {
let key = error_key(&err);
diagnostics.entry(key).or_default().push(err.to_string());
}
fn push_warning(diagnostics: &mut DiagnosticMap, warning: spec_core::SpecWarning) {
let key = warning_key(&warning);
diagnostics
.entry(key)
.or_default()
.push(warning.to_string());
}
fn spec_error_code(err: &spec_core::SpecError) -> &'static str {
match err {
spec_core::SpecError::Io(_) => "SPEC_IO",
spec_core::SpecError::InvalidUtf8 { .. } => "SPEC_INVALID_UTF8",
spec_core::SpecError::YamlParse { .. } => "SPEC_YAML_PARSE",
spec_core::SpecError::Json(_) => "SPEC_JSON",
spec_core::SpecError::SchemaValidation { .. } => "SPEC_SCHEMA_VALIDATION",
spec_core::SpecError::SemanticValidation { .. } => "SPEC_SEMANTIC_VALIDATION",
spec_core::SpecError::RustKeyword { .. } => "SPEC_RUST_KEYWORD",
spec_core::SpecError::DuplicateId { .. } => "SPEC_DUPLICATE_ID",
spec_core::SpecError::DepCollision { .. } => "SPEC_DEP_COLLISION",
spec_core::SpecError::MissingDep { .. } => "SPEC_MISSING_DEP",
spec_core::SpecError::UnknownLibraryNamespace { .. } => "SPEC_UNKNOWN_LIBRARY_NAMESPACE",
spec_core::SpecError::CrossLibraryDepNotFound { .. } => "SPEC_CROSS_LIBRARY_DEP_NOT_FOUND",
spec_core::SpecError::LibraryCrateAliasMissing { .. } => "SPEC_LIBRARY_CRATE_ALIAS_MISSING",
spec_core::SpecError::LibraryCrateManifestError { .. } => {
"SPEC_LIBRARY_CRATE_MANIFEST_ERROR"
}
spec_core::SpecError::CyclicDep { .. } => "SPEC_CYCLIC_DEP",
spec_core::SpecError::CrossLibraryCycle { .. } => "SPEC_CROSS_LIBRARY_CYCLE",
spec_core::SpecError::UseStatementInBody { .. } => "SPEC_USE_STATEMENT_IN_BODY",
spec_core::SpecError::BodyRustMustBeBlock { .. } => "SPEC_BODY_RUST_MUST_BE_BLOCK",
spec_core::SpecError::BodyRustLooksLikeFnDeclaration { .. } => {
"SPEC_BODY_RUST_LOOKS_LIKE_FN_DECLARATION"
}
spec_core::SpecError::LocalTestExpectNotExpr { .. } => "SPEC_LOCAL_TEST_EXPECT_NOT_EXPR",
spec_core::SpecError::DuplicateLocalTestId { .. } => "SPEC_DUPLICATE_LOCAL_TEST_ID",
spec_core::SpecError::ContractTypeInvalid { .. } => "SPEC_CONTRACT_TYPE_INVALID",
spec_core::SpecError::ContractInputNameInvalid { .. } => "SPEC_CONTRACT_INPUT_NAME_INVALID",
spec_core::SpecError::Traversal { .. } => "SPEC_TRAVERSAL",
spec_core::SpecError::Generator { .. } => "SPEC_GENERATOR",
spec_core::SpecError::OutputDir { .. } => "SPEC_OUTPUT_DIR",
spec_core::SpecError::MissingMarker { .. } => "SPEC_MISSING_MARKER",
spec_core::SpecError::MoleculeCoversNotFound { .. } => "SPEC_MOLECULE_COVERS_NOT_FOUND",
spec_core::SpecError::CrossLibraryMoleculeCoverUnsupported { .. } => {
"SPEC_MOLECULE_CROSS_LIBRARY_COVERS_UNSUPPORTED"
}
spec_core::SpecError::DuplicateMoleculeTestId { .. } => "SPEC_DUPLICATE_MOLECULE_ID",
spec_core::SpecError::MoleculeCoversCollision { .. } => "SPEC_MOLECULE_COVERS_COLLISION",
spec_core::SpecError::MoleculeBodyRustMustBeBlock { .. } => {
"SPEC_MOLECULE_BODY_RUST_MUST_BE_BLOCK"
}
spec_core::SpecError::MoleculeBodyContainsUnsafe { .. } => {
"SPEC_MOLECULE_BODY_CONTAINS_UNSAFE"
}
spec_core::SpecError::ReservedUnitName { .. } => "SPEC_RESERVED_UNIT_NAME",
}
}
fn spec_error_to_json_entry(
err: &spec_core::SpecError,
id_by_path: &HashMap<String, String>,
) -> JsonErrorEntry {
let code = spec_error_code(err).to_string();
let fields = match err {
spec_core::SpecError::Io(_) => ErrorFields {
message: Some(err.to_string()),
..Default::default()
},
spec_core::SpecError::InvalidUtf8 { path } => ErrorFields {
unit: id_by_path.get(path).cloned(),
path: Some(path.clone()),
..Default::default()
},
spec_core::SpecError::YamlParse { message, path } => ErrorFields {
unit: id_by_path.get(path).cloned(),
path: Some(path.clone()),
message: Some(message.clone()),
..Default::default()
},
spec_core::SpecError::Json(_) => ErrorFields {
message: Some(err.to_string()),
..Default::default()
},
spec_core::SpecError::SchemaValidation { message, path } => ErrorFields {
unit: id_by_path.get(path).cloned(),
path: Some(path.clone()),
message: Some(message.clone()),
..Default::default()
},
spec_core::SpecError::SemanticValidation { message, path } => ErrorFields {
unit: id_by_path.get(path).cloned(),
path: Some(path.clone()),
message: Some(message.clone()),
..Default::default()
},
spec_core::SpecError::RustKeyword { path, segment, id } => ErrorFields {
unit: id_by_path.get(path).cloned(),
path: Some(path.clone()),
value: Some(segment.clone()),
id: Some(id.clone()),
..Default::default()
},
spec_core::SpecError::DuplicateId { id, file1, file2 } => ErrorFields {
unit: id_by_path.get(file1).cloned(),
path: Some(file1.clone()),
id: Some(id.clone()),
path2: Some(file2.clone()),
..Default::default()
},
spec_core::SpecError::DepCollision {
dep1,
dep2,
fn_name,
path,
} => ErrorFields {
unit: id_by_path.get(path).cloned(),
path: Some(path.clone()),
dep: Some(dep1.clone()),
value: Some(fn_name.clone()),
path2: Some(dep2.clone()),
..Default::default()
},
spec_core::SpecError::MissingDep { dep, path } => ErrorFields {
unit: id_by_path.get(path).cloned(),
path: Some(path.clone()),
dep: Some(dep.clone()),
..Default::default()
},
spec_core::SpecError::UnknownLibraryNamespace { alias, dep, path } => ErrorFields {
unit: id_by_path.get(path).cloned(),
path: Some(path.clone()),
dep: Some(dep.clone()),
value: Some(alias.clone()),
..Default::default()
},
spec_core::SpecError::CrossLibraryDepNotFound { dep, path } => ErrorFields {
unit: id_by_path.get(path).cloned(),
path: Some(path.clone()),
dep: Some(dep.clone()),
..Default::default()
},
spec_core::SpecError::LibraryCrateAliasMissing {
alias,
cargo_toml,
path,
} => ErrorFields {
unit: id_by_path.get(path).cloned(),
path: Some(path.clone()),
value: Some(alias.clone()),
path2: Some(cargo_toml.clone()),
..Default::default()
},
spec_core::SpecError::LibraryCrateManifestError {
cargo_toml,
message,
} => ErrorFields {
path: cargo_toml.clone(),
message: Some(message.clone()),
..Default::default()
},
spec_core::SpecError::CyclicDep { cycle_path, path } => ErrorFields {
unit: id_by_path.get(path).cloned(),
path: Some(path.clone()),
cycle: Some(cycle_path.clone()),
..Default::default()
},
spec_core::SpecError::CrossLibraryCycle { cycle_path, path } => ErrorFields {
unit: id_by_path.get(path).cloned(),
path: Some(path.clone()),
cycle: Some(cycle_path.clone()),
..Default::default()
},
spec_core::SpecError::UseStatementInBody { path } => ErrorFields {
unit: id_by_path.get(path).cloned(),
path: Some(path.clone()),
..Default::default()
},
spec_core::SpecError::BodyRustMustBeBlock { path, message } => ErrorFields {
unit: id_by_path.get(path).cloned(),
path: Some(path.clone()),
message: Some(message.clone()),
..Default::default()
},
spec_core::SpecError::BodyRustLooksLikeFnDeclaration { path } => ErrorFields {
unit: id_by_path.get(path).cloned(),
path: Some(path.clone()),
..Default::default()
},
spec_core::SpecError::LocalTestExpectNotExpr { id, path, message } => ErrorFields {
unit: id_by_path.get(path).cloned(),
path: Some(path.clone()),
message: Some(message.clone()),
id: Some(id.clone()),
..Default::default()
},
spec_core::SpecError::DuplicateLocalTestId { id, path } => ErrorFields {
unit: id_by_path.get(path).cloned(),
path: Some(path.clone()),
id: Some(id.clone()),
..Default::default()
},
spec_core::SpecError::ContractTypeInvalid {
field,
type_str,
path,
..
} => ErrorFields {
unit: id_by_path.get(path).cloned(),
path: Some(path.clone()),
field: Some(format!("contract.{field}")),
value: Some(type_str.clone()),
..Default::default()
},
spec_core::SpecError::ContractInputNameInvalid { name, path, .. } => ErrorFields {
unit: id_by_path.get(path).cloned(),
path: Some(path.clone()),
field: Some(format!("contract.inputs.{name}")),
..Default::default()
},
spec_core::SpecError::Traversal { message, path } => ErrorFields {
unit: id_by_path.get(path).cloned(),
path: Some(path.clone()),
message: Some(message.clone()),
..Default::default()
},
spec_core::SpecError::Generator { message } => ErrorFields {
message: Some(message.clone()),
..Default::default()
},
spec_core::SpecError::OutputDir { message } => ErrorFields {
message: Some(message.clone()),
..Default::default()
},
spec_core::SpecError::MissingMarker { path } => ErrorFields {
unit: id_by_path.get(path).cloned(),
path: Some(path.clone()),
..Default::default()
},
spec_core::SpecError::MoleculeCoversNotFound {
cover_id,
test_id,
test_path,
} => ErrorFields {
path: Some(test_path.clone()),
id: Some(test_id.clone()),
dep: Some(cover_id.clone()),
..Default::default()
},
spec_core::SpecError::CrossLibraryMoleculeCoverUnsupported {
cover_id,
test_id,
test_path,
} => ErrorFields {
path: Some(test_path.clone()),
id: Some(test_id.clone()),
dep: Some(cover_id.clone()),
message: Some(err.to_string()),
..Default::default()
},
spec_core::SpecError::DuplicateMoleculeTestId { id, file1, file2 } => ErrorFields {
path: Some(file1.clone()),
id: Some(id.clone()),
path2: Some(file2.clone()),
..Default::default()
},
spec_core::SpecError::MoleculeCoversCollision {
cover1,
cover2,
fn_name,
test_id,
test_path,
} => ErrorFields {
path: Some(test_path.clone()),
id: Some(test_id.clone()),
dep: Some(cover1.clone()),
value: Some(fn_name.clone()),
path2: Some(cover2.clone()),
..Default::default()
},
spec_core::SpecError::MoleculeBodyRustMustBeBlock { message, test_path } => ErrorFields {
path: Some(test_path.clone()),
message: Some(message.clone()),
..Default::default()
},
spec_core::SpecError::MoleculeBodyContainsUnsafe { test_path } => ErrorFields {
path: Some(test_path.clone()),
..Default::default()
},
spec_core::SpecError::ReservedUnitName { segment, path } => ErrorFields {
path: Some(path.clone()),
value: Some(segment.clone()),
..Default::default()
},
};
JsonErrorEntry {
unit: fields.unit,
code,
path: fields.path,
dep: fields.dep,
field: fields.field,
value: fields.value,
message: fields.message,
id: fields.id,
path2: fields.path2,
cycle: fields.cycle,
}
}
fn error_paths(err: &spec_core::SpecError) -> Vec<String> {
match err {
spec_core::SpecError::DuplicateId { file1, file2, .. } => {
vec![file1.clone(), file2.clone()]
}
spec_core::SpecError::InvalidUtf8 { path }
| spec_core::SpecError::YamlParse { path, .. }
| spec_core::SpecError::SchemaValidation { path, .. }
| spec_core::SpecError::SemanticValidation { path, .. }
| spec_core::SpecError::RustKeyword { path, .. }
| spec_core::SpecError::DepCollision { path, .. }
| spec_core::SpecError::MissingDep { path, .. }
| spec_core::SpecError::UnknownLibraryNamespace { path, .. }
| spec_core::SpecError::CrossLibraryDepNotFound { path, .. }
| spec_core::SpecError::LibraryCrateAliasMissing { path, .. }
| spec_core::SpecError::CyclicDep { path, .. }
| spec_core::SpecError::CrossLibraryCycle { path, .. }
| spec_core::SpecError::UseStatementInBody { path }
| spec_core::SpecError::BodyRustMustBeBlock { path, .. }
| spec_core::SpecError::BodyRustLooksLikeFnDeclaration { path }
| spec_core::SpecError::LocalTestExpectNotExpr { path, .. }
| spec_core::SpecError::DuplicateLocalTestId { path, .. }
| spec_core::SpecError::ContractTypeInvalid { path, .. }
| spec_core::SpecError::ContractInputNameInvalid { path, .. }
| spec_core::SpecError::Traversal { path, .. }
| spec_core::SpecError::MissingMarker { path } => vec![path.clone()],
spec_core::SpecError::LibraryCrateManifestError {
cargo_toml: Some(path),
..
} => vec![path.clone()],
spec_core::SpecError::Generator { .. }
| spec_core::SpecError::OutputDir { .. }
| spec_core::SpecError::LibraryCrateManifestError {
cargo_toml: None, ..
}
| spec_core::SpecError::Io(_)
| spec_core::SpecError::Json(_) => Vec::new(),
spec_core::SpecError::MoleculeCoversNotFound { test_path, .. }
| spec_core::SpecError::CrossLibraryMoleculeCoverUnsupported { test_path, .. }
| spec_core::SpecError::MoleculeCoversCollision { test_path, .. }
| spec_core::SpecError::MoleculeBodyRustMustBeBlock { test_path, .. }
| spec_core::SpecError::MoleculeBodyContainsUnsafe { test_path } => {
vec![test_path.clone()]
}
spec_core::SpecError::DuplicateMoleculeTestId { file1, file2, .. } => {
vec![file1.clone(), file2.clone()]
}
spec_core::SpecError::ReservedUnitName { path, .. } => vec![path.clone()],
}
}
fn print_status_unit(unit: &JsonStatusUnit) {
let detail = match unit.status {
HealthState::Invalid => format!(
"({} error{})",
unit.errors.len(),
pluralize(unit.errors.len())
),
_ => match &unit.reason {
Some(r) => r.clone(),
None => match &unit.evidence_at {
Some(ts) => format!("evidence:{ts}"),
None => String::new(),
},
},
};
println!(
"{} {:<32} {:<10} {detail}",
unit.status.symbol(),
unit.id,
unit.status.as_str()
);
if unit.status == HealthState::Invalid {
for entry in &unit.errors {
println!(" · {}", json_error_entry_to_human(entry));
}
}
}
fn json_error_entry_to_human(entry: &JsonErrorEntry) -> String {
if entry.code == "SPEC_DEP_COLLISION"
&& let (Some(dep1), Some(dep2), Some(fn_name)) = (&entry.dep, &entry.path2, &entry.value)
{
return format!(
"{}: '{}' and '{}' both resolve to '{}'",
entry.code, dep1, dep2, fn_name
);
}
if entry.code == "SPEC_MOLECULE_COVERS_COLLISION"
&& let (Some(cover1), Some(cover2), Some(fn_name), Some(test_id)) =
(&entry.dep, &entry.path2, &entry.value, &entry.id)
{
return format!(
"{}: '{}' and '{}' both resolve to '{}' in {}",
entry.code, cover1, cover2, fn_name, test_id
);
}
if let Some(message) = &entry.message {
return format!("{}: {message}", entry.code);
}
if let Some(dep) = &entry.dep {
return format!("{}: dep '{dep}' not found in this spec set", entry.code);
}
if let Some(field) = &entry.field {
if let Some(value) = &entry.value {
return format!("{}: {field}: invalid type '{value}'", entry.code);
}
return format!("{}: {field}", entry.code);
}
if let Some(id) = &entry.id {
if let Some(path2) = &entry.path2 {
return format!("{}: '{id}' also in {path2}", entry.code);
}
return format!("{}: {id}", entry.code);
}
entry.code.clone()
}
fn error_key(err: &spec_core::SpecError) -> String {
match err {
spec_core::SpecError::InvalidUtf8 { path }
| spec_core::SpecError::YamlParse { path, .. }
| spec_core::SpecError::SchemaValidation { path, .. }
| spec_core::SpecError::SemanticValidation { path, .. }
| spec_core::SpecError::RustKeyword { path, .. }
| spec_core::SpecError::DepCollision { path, .. }
| spec_core::SpecError::MissingDep { path, .. }
| spec_core::SpecError::UnknownLibraryNamespace { path, .. }
| spec_core::SpecError::CrossLibraryDepNotFound { path, .. }
| spec_core::SpecError::LibraryCrateAliasMissing { path, .. }
| spec_core::SpecError::CyclicDep { path, .. }
| spec_core::SpecError::CrossLibraryCycle { path, .. }
| spec_core::SpecError::UseStatementInBody { path }
| spec_core::SpecError::BodyRustMustBeBlock { path, .. }
| spec_core::SpecError::BodyRustLooksLikeFnDeclaration { path }
| spec_core::SpecError::LocalTestExpectNotExpr { path, .. }
| spec_core::SpecError::DuplicateLocalTestId { path, .. }
| spec_core::SpecError::ContractTypeInvalid { path, .. }
| spec_core::SpecError::ContractInputNameInvalid { path, .. }
| spec_core::SpecError::Traversal { path, .. }
| spec_core::SpecError::MissingMarker { path } => path.clone(),
spec_core::SpecError::LibraryCrateManifestError {
cargo_toml: Some(path),
..
} => path.clone(),
spec_core::SpecError::DuplicateId { file1, file2, .. } => format!("{file1} | {file2}"),
spec_core::SpecError::Generator { .. } | spec_core::SpecError::OutputDir { .. } => {
"generation".to_string()
}
spec_core::SpecError::Io(_)
| spec_core::SpecError::Json(_)
| spec_core::SpecError::LibraryCrateManifestError {
cargo_toml: None, ..
} => "validation".to_string(),
spec_core::SpecError::MoleculeCoversNotFound { test_path, .. }
| spec_core::SpecError::CrossLibraryMoleculeCoverUnsupported { test_path, .. }
| spec_core::SpecError::MoleculeCoversCollision { test_path, .. }
| spec_core::SpecError::MoleculeBodyRustMustBeBlock { test_path, .. }
| spec_core::SpecError::MoleculeBodyContainsUnsafe { test_path } => test_path.clone(),
spec_core::SpecError::DuplicateMoleculeTestId { file1, file2, .. } => {
format!("{file1} | {file2}")
}
spec_core::SpecError::ReservedUnitName { path, .. } => path.clone(),
}
}
fn warning_key(warning: &spec_core::SpecWarning) -> String {
match warning {
spec_core::SpecWarning::MissingDep { path, .. }
| spec_core::SpecWarning::SymlinkCycleSkipped { path }
| spec_core::SpecWarning::MissingSpecVersion { path, .. } => path.clone(),
spec_core::SpecWarning::MoleculeTestNoCoveredUnits { test_path, .. } => test_path.clone(),
}
}
fn count_messages(diagnostics: &DiagnosticMap) -> usize {
diagnostics.values().map(Vec::len).sum()
}
fn count_unique_files(errors: &DiagnosticMap) -> usize {
let mut files = std::collections::BTreeSet::new();
for key in errors.keys() {
for part in key.split(" | ") {
files.insert(part.trim());
}
}
files.len()
}
fn pluralize(count: usize) -> &'static str {
if count == 1 { "" } else { "s" }
}
#[cfg(test)]
mod tests {
use super::*;
use std::fs;
use std::process::Command as ProcessCommand;
use std::time::Instant;
use tempfile::TempDir;
fn write_spec(dir: &Path, relative_path: &str, body: &str) {
let path = dir.join(relative_path);
if let Some(parent) = path.parent() {
fs::create_dir_all(parent).unwrap();
}
fs::write(path, body).unwrap();
}
fn copy_dir_all(src: &Path, dst: &Path) {
fs::create_dir_all(dst).unwrap();
for entry in fs::read_dir(src).unwrap() {
let entry = entry.unwrap();
let entry_path = entry.path();
let dst_path = dst.join(entry.file_name());
if entry.file_type().unwrap().is_dir() {
copy_dir_all(&entry_path, &dst_path);
} else {
fs::copy(&entry_path, &dst_path).unwrap();
}
}
}
fn benchmark_loaded_spec(index: usize, tests_per_spec: usize) -> LoadedSpec {
let id = format!("pricing/bench_{index:04}");
LoadedSpec {
source: spec_core::types::SpecSource {
file_path: format!("units/pricing/bench_{index:04}.unit.spec"),
id: id.clone(),
},
spec: spec_core::types::SpecStruct {
id,
kind: "function".to_string(),
intent: spec_core::types::Intent {
why: format!("Benchmark unit {index}"),
},
contract: None,
deps: Vec::new(),
imports: Vec::new(),
body: spec_core::types::Body {
rust: "{ true }".to_string(),
},
local_tests: (0..tests_per_spec)
.map(|test_index| spec_core::types::LocalTest {
id: format!("case_{test_index:02}"),
expect: "true".to_string(),
})
.collect(),
links: None,
spec_version: None,
},
}
}
fn benchmark_specs(spec_count: usize, tests_per_spec: usize) -> Vec<LoadedSpec> {
(0..spec_count)
.map(|index| benchmark_loaded_spec(index, tests_per_spec))
.collect()
}
fn benchmark_stdout(specs: &[LoadedSpec], output: &Path, crate_root: &Path) -> String {
let output_prefix = output_module_prefix(output, crate_root).unwrap();
let mut stdout = String::from("running synthetic benchmark tests\n");
for spec in specs {
let resolved = ResolvedSpec::from_spec(spec.spec.clone());
for (test_index, local_test) in spec.spec.local_tests.iter().enumerate() {
let full_name = expected_cargo_test_name(&resolved, &output_prefix, &local_test.id);
let status = if test_index % 11 == 0 { "FAILED" } else { "ok" };
stdout.push_str("test ");
stdout.push_str(&full_name);
stdout.push_str(" ... ");
stdout.push_str(status);
stdout.push('\n');
}
}
stdout
}
fn parse_cargo_test_output_btree_baseline(
stdout: &str,
) -> BTreeMap<String, ParsedCargoTestResult> {
let mut results: BTreeMap<String, ParsedCargoTestResult> = BTreeMap::new();
for line in stdout.lines() {
let Some(rest) = line.strip_prefix("test ") else {
continue;
};
let Some((full_name, terminal_status)) = rest.split_once(" ... ") else {
continue;
};
let parsed = match terminal_status.trim() {
"ok" => ParsedCargoTestResult {
status: "pass".to_string(),
reason: None,
},
"FAILED" => ParsedCargoTestResult {
status: "fail".to_string(),
reason: None,
},
other => ParsedCargoTestResult {
status: "error".to_string(),
reason: Some(other.to_string()),
},
};
match results.get_mut(full_name) {
Some(existing) => {
existing.status = "error".to_string();
existing.reason = Some("multiple matching cargo results".to_string());
}
None => {
results.insert(full_name.to_string(), parsed);
}
}
}
results
}
fn build_test_evidence_btree_baseline(
specs: &[LoadedSpec],
output_prefix: &str,
parsed_test_results: &BTreeMap<String, ParsedCargoTestResult>,
observed_at: &str,
provenance: Option<&ArtifactProvenance>,
) -> Result<BTreeMap<String, PassportEvidence>> {
let mut evidence_by_spec = BTreeMap::new();
for spec in specs {
let resolved = ResolvedSpec::from_spec(spec.spec.clone());
let mut test_results = Vec::new();
for local_test in &spec.spec.local_tests {
let full_name = expected_cargo_test_name(&resolved, output_prefix, &local_test.id);
let observed = parsed_test_results.get(&full_name);
let (status, reason) = match observed {
Some(result) => (result.status.clone(), result.reason.clone()),
None => (
"unknown".to_string(),
Some("test not found in cargo output".to_string()),
),
};
test_results.push(PassportTestResult {
id: local_test.id.clone(),
status,
reason,
});
}
evidence_by_spec.insert(
spec.spec.id.clone(),
PassportEvidence {
build_status: "pass".to_string(),
test_results,
observed_at: observed_at.to_string(),
provenance: provenance.cloned(),
},
);
}
Ok(evidence_by_spec)
}
#[test]
fn generate_command_bootstraps_marker_and_writes_files() {
let temp_dir = TempDir::new_in(std::env::current_dir().unwrap()).unwrap();
let units_dir = temp_dir.path().join("units");
let output_dir = temp_dir.path().join("generated/spec");
write_spec(
&units_dir,
"pricing/apply_discount.unit.spec",
r#"
id: pricing/apply_discount
kind: function
intent:
why: Apply a discount.
body:
rust: |
{
round(Decimal::ZERO)
}
"#,
);
generate_command(&units_dir, Some(&output_dir)).unwrap();
assert!(output_dir.join(".spec-generated").exists());
assert!(output_dir.join("pricing/apply_discount.rs").exists());
assert!(output_dir.join("pricing/mod.rs").exists());
assert!(output_dir.join("mod.rs").exists());
}
#[test]
fn validate_command_collects_directory_errors() {
let temp_dir = TempDir::new().unwrap();
let units_dir = temp_dir.path().join("units");
write_spec(
&units_dir,
"pricing/good.unit.spec",
r#"
id: pricing/apply_discount
kind: function
intent:
why: Apply a discount.
body:
rust: |
pub fn apply_discount() {}
"#,
);
write_spec(
&units_dir,
"pricing/bad.unit.spec",
r#"
id: pricing/type
kind: function
intent:
why: Should fail.
body:
rust: |
use std::fmt;
pub fn type() {}
extra_field: nope
"#,
);
let result = validate_command(&units_dir, false, OutputFormat::Text);
assert!(result.is_err());
let error_text = format!("{:#}", result.unwrap_err());
assert!(error_text.contains("error"));
}
#[test]
fn generate_command_writes_doc_comments_for_ecommerce_units() {
let temp_dir = TempDir::new_in(std::env::current_dir().unwrap()).unwrap();
let repo_root = Path::new(env!("CARGO_MANIFEST_DIR"))
.parent()
.unwrap()
.to_path_buf();
let fixture_src = repo_root.join("examples/ecommerce");
let fixture_dst = temp_dir.path().join("ecommerce");
copy_dir_all(&fixture_src, &fixture_dst);
let units_dir = fixture_dst.join("units");
let output_dir = fixture_dst.join("src/generated");
generate_command(&units_dir, Some(&output_dir)).unwrap();
let apply_tax = fs::read_to_string(output_dir.join("pricing/apply_tax.rs")).unwrap();
assert!(apply_tax.contains(
"/// Add sales tax to a subtotal using a rate expressed as a decimal fraction.\n"
));
assert!(apply_tax.contains("pub fn apply_tax("));
}
#[test]
fn generate_specs_rejects_reserved_molecule_tests_namespace_segment() {
let temp_dir = TempDir::new_in(std::env::current_dir().unwrap()).unwrap();
let units_dir = temp_dir.path().join("units");
let output_dir = temp_dir.path().join("generated/spec");
write_spec(
&units_dir,
"qa/molecule_tests/foo.unit.spec",
r#"
id: qa/molecule_tests/foo
kind: function
intent:
why: Reproduce reserved namespace collision.
body:
rust: |
{
true
}
"#,
);
write_spec(
&units_dir,
"qa/flow.test.spec",
r#"
id: qa/flow
intent:
why: Exercise qa molecule test generation.
covers:
- qa/molecule_tests/foo
body:
rust: |
{
assert!(foo());
}
"#,
);
let (specs, loader_errors, loader_warnings, total_files) =
collect_specs(&units_dir).unwrap();
assert_eq!(loader_errors.len(), 0);
assert_eq!(loader_warnings.len(), 0);
assert_eq!(total_files, 1);
let validation_options = ValidationOptions {
strict_deps: true,
allow_unsafe_local_test_expect: false,
};
let (validation_errors, _validation_warnings) =
finish_validation(&specs, &validation_options);
assert!(
validation_errors.iter().any(|err| {
matches!(err, spec_core::SpecError::ReservedUnitName { segment, .. } if segment == "molecule_tests")
&& spec_error_code(err) == "SPEC_RESERVED_UNIT_NAME"
}),
"expected SPEC_RESERVED_UNIT_NAME, got: {validation_errors:?}"
);
let err = match generate_specs(&units_dir, &output_dir) {
Ok(_) => panic!("expected reserved namespace validation to fail"),
Err(err) => err.to_string(),
};
assert!(
err.contains("1 error"),
"expected generation to fail before output, got: {err}"
);
assert!(
!output_dir.join("qa/molecule_tests.rs").exists(),
"generator should fail before writing molecule_tests.rs"
);
assert!(
!output_dir.join("qa/molecule_tests/mod.rs").exists(),
"generator should fail before writing conflicting module output"
);
}
#[test]
fn cargo_doc_succeeds_for_generated_ecommerce_docs() {
if !cargo_available() {
return;
}
let temp_dir = TempDir::new_in(std::env::current_dir().unwrap()).unwrap();
let repo_root = Path::new(env!("CARGO_MANIFEST_DIR"))
.parent()
.unwrap()
.to_path_buf();
let fixture_src = repo_root.join("examples/ecommerce");
let fixture_dst = temp_dir.path().join("ecommerce");
copy_dir_all(&fixture_src, &fixture_dst);
let units_dir = fixture_dst.join("units");
let output_dir = fixture_dst.join("src/generated");
generate_command(&units_dir, Some(&output_dir)).unwrap();
let output = ProcessCommand::new("cargo")
.current_dir(&fixture_dst)
.env("CARGO_TARGET_DIR", temp_dir.path().join("cargo-target"))
.env("CARGO_TERM_COLOR", "never")
.args(["doc", "--no-deps"])
.output()
.unwrap();
assert!(
output.status.success(),
"cargo doc failed\nstdout:\n{}\nstderr:\n{}",
String::from_utf8_lossy(&output.stdout),
String::from_utf8_lossy(&output.stderr)
);
}
#[test]
fn spec_error_code_namespace_is_stable_and_exhaustive_for_current_variants() {
let io_error = std::io::Error::other("boom");
let json_error = serde_json::from_str::<serde_json::Value>("{").unwrap_err();
let errors = vec![
spec_core::SpecError::Io(io_error),
spec_core::SpecError::InvalidUtf8 {
path: "units/a.unit.spec".to_string(),
},
spec_core::SpecError::YamlParse {
message: "bad yaml".to_string(),
path: "units/a.unit.spec".to_string(),
},
spec_core::SpecError::Json(json_error),
spec_core::SpecError::SchemaValidation {
message: "bad schema".to_string(),
path: "units/a.unit.spec".to_string(),
},
spec_core::SpecError::SemanticValidation {
message: "bad semantics".to_string(),
path: "units/a.unit.spec".to_string(),
},
spec_core::SpecError::RustKeyword {
segment: "type".to_string(),
id: "pricing/type".to_string(),
path: "units/a.unit.spec".to_string(),
},
spec_core::SpecError::DuplicateId {
id: "pricing/apply_discount".to_string(),
file1: "units/a.unit.spec".to_string(),
file2: "units/b.unit.spec".to_string(),
},
spec_core::SpecError::DepCollision {
dep1: "money/round".to_string(),
dep2: "money/format".to_string(),
fn_name: "money".to_string(),
path: "units/a.unit.spec".to_string(),
},
spec_core::SpecError::MissingDep {
dep: "money/round".to_string(),
path: "units/a.unit.spec".to_string(),
},
spec_core::SpecError::UnknownLibraryNamespace {
alias: "shared".to_string(),
dep: "shared::money/round".to_string(),
path: "units/a.unit.spec".to_string(),
},
spec_core::SpecError::CrossLibraryDepNotFound {
dep: "shared::money/round".to_string(),
path: "units/a.unit.spec".to_string(),
},
spec_core::SpecError::LibraryCrateAliasMissing {
alias: "shared".to_string(),
cargo_toml: "Cargo.toml".to_string(),
path: "units/a.unit.spec".to_string(),
},
spec_core::SpecError::LibraryCrateManifestError {
cargo_toml: Some("Cargo.toml".to_string()),
message: "Failed to parse Cargo.toml".to_string(),
},
spec_core::SpecError::CyclicDep {
cycle_path: vec!["a".to_string(), "b".to_string()],
path: "units/a.unit.spec".to_string(),
},
spec_core::SpecError::CrossLibraryCycle {
cycle_path: vec!["a".to_string(), "shared::b".to_string()],
path: "units/a.unit.spec".to_string(),
},
spec_core::SpecError::UseStatementInBody {
path: "units/a.unit.spec".to_string(),
},
spec_core::SpecError::BodyRustMustBeBlock {
message: "expected block".to_string(),
path: "units/a.unit.spec".to_string(),
},
spec_core::SpecError::BodyRustLooksLikeFnDeclaration {
path: "units/a.unit.spec".to_string(),
},
spec_core::SpecError::LocalTestExpectNotExpr {
id: "happy_path".to_string(),
message: "not expr".to_string(),
path: "units/a.unit.spec".to_string(),
},
spec_core::SpecError::DuplicateLocalTestId {
id: "happy_path".to_string(),
path: "units/a.unit.spec".to_string(),
},
spec_core::SpecError::ContractTypeInvalid {
field: "contract.returns".to_string(),
type_str: "Vec<".to_string(),
message: "bad type".to_string(),
path: "units/a.unit.spec".to_string(),
},
spec_core::SpecError::ContractInputNameInvalid {
name: "bad-name".to_string(),
message: "bad identifier".to_string(),
path: "units/a.unit.spec".to_string(),
},
spec_core::SpecError::Traversal {
message: "walk failed".to_string(),
path: "units".to_string(),
},
spec_core::SpecError::Generator {
message: "gen failed".to_string(),
},
spec_core::SpecError::OutputDir {
message: "outside root".to_string(),
},
spec_core::SpecError::MissingMarker {
path: "generated/spec".to_string(),
},
spec_core::SpecError::MoleculeCoversNotFound {
cover_id: "pricing/apply_discount".to_string(),
test_id: "pricing/discount_flow".to_string(),
test_path: "units/pricing/discount_flow.test.spec".to_string(),
},
spec_core::SpecError::DuplicateMoleculeTestId {
id: "pricing/discount_flow".to_string(),
file1: "units/pricing/a.test.spec".to_string(),
file2: "units/pricing/b.test.spec".to_string(),
},
spec_core::SpecError::MoleculeCoversCollision {
cover1: "money/round".to_string(),
cover2: "utils/round".to_string(),
fn_name: "round".to_string(),
test_id: "pricing/discount_flow".to_string(),
test_path: "units/pricing/discount_flow.test.spec".to_string(),
},
spec_core::SpecError::MoleculeBodyRustMustBeBlock {
message: "expected block".to_string(),
test_path: "units/pricing/discount_flow.test.spec".to_string(),
},
spec_core::SpecError::MoleculeBodyContainsUnsafe {
test_path: "units/pricing/discount_flow.test.spec".to_string(),
},
spec_core::SpecError::ReservedUnitName {
segment: "molecule_tests".to_string(),
path: "units/pricing/molecule_tests.unit.spec".to_string(),
},
];
let codes = errors
.iter()
.map(spec_error_code)
.collect::<std::collections::HashSet<_>>();
assert_eq!(codes.len(), errors.len());
assert!(codes.iter().all(|code| code.starts_with("SPEC_")));
assert!(codes.iter().all(|code| !code.is_empty()));
}
#[test]
fn spec_error_to_json_entry_preserves_multi_field_variants() {
let mut id_by_path = HashMap::new();
id_by_path.insert(
"units/pricing/apply_discount.unit.spec".to_string(),
"pricing/apply_discount".to_string(),
);
let duplicate = spec_error_to_json_entry(
&spec_core::SpecError::DuplicateId {
id: "pricing/apply_discount".to_string(),
file1: "units/pricing/apply_discount.unit.spec".to_string(),
file2: "units/pricing/apply_tax.unit.spec".to_string(),
},
&id_by_path,
);
assert_eq!(duplicate.unit.as_deref(), Some("pricing/apply_discount"));
assert_eq!(
duplicate.path.as_deref(),
Some("units/pricing/apply_discount.unit.spec")
);
assert_eq!(duplicate.id.as_deref(), Some("pricing/apply_discount"));
assert_eq!(
duplicate.path2.as_deref(),
Some("units/pricing/apply_tax.unit.spec")
);
let dep_collision = spec_error_to_json_entry(
&spec_core::SpecError::DepCollision {
dep1: "money/round".to_string(),
dep2: "money/format".to_string(),
fn_name: "money".to_string(),
path: "units/pricing/apply_discount.unit.spec".to_string(),
},
&id_by_path,
);
assert_eq!(dep_collision.dep.as_deref(), Some("money/round"));
assert_eq!(dep_collision.value.as_deref(), Some("money"));
assert_eq!(dep_collision.path2.as_deref(), Some("money/format"));
let molecule_collision = spec_error_to_json_entry(
&spec_core::SpecError::MoleculeCoversCollision {
cover1: "money/round".to_string(),
cover2: "utils/round".to_string(),
fn_name: "round".to_string(),
test_id: "pricing/rounding_flow".to_string(),
test_path: "units/pricing/rounding_flow.test.spec".to_string(),
},
&id_by_path,
);
assert_eq!(
molecule_collision.path.as_deref(),
Some("units/pricing/rounding_flow.test.spec")
);
assert_eq!(
molecule_collision.id.as_deref(),
Some("pricing/rounding_flow")
);
assert_eq!(molecule_collision.dep.as_deref(), Some("money/round"));
assert_eq!(molecule_collision.value.as_deref(), Some("round"));
assert_eq!(molecule_collision.path2.as_deref(), Some("utils/round"));
}
#[test]
fn workspace_config_error_json_entry_uses_stable_codes_and_config_path() {
let config_path = PathBuf::from("/tmp/spec.toml");
let cases = vec![
(
WorkspaceConfigError::LibraryPathNotFound {
config_path: config_path.clone(),
alias: "shared".to_string(),
candidate: PathBuf::from("/tmp/missing-spec"),
},
"SPEC_LIBRARY_PATH_NOT_FOUND",
),
(
WorkspaceConfigError::LibraryOutOfRoot {
config_path: config_path.clone(),
alias: "shared".to_string(),
resolved_root: PathBuf::from("/outside/shared-spec"),
},
"SPEC_LIBRARY_OUT_OF_ROOT",
),
(
WorkspaceConfigError::LibraryAliasSelf {
config_path: config_path.clone(),
alias: "app".to_string(),
},
"SPEC_LIBRARY_ALIAS_SELF",
),
(
WorkspaceConfigError::DuplicateLibraryRoot {
config_path: config_path.clone(),
existing_alias: "shared".to_string(),
alias: "shared_copy".to_string(),
resolved_root: PathBuf::from("/repo/shared-spec"),
},
"SPEC_DUPLICATE_LIBRARY_ROOT",
),
];
for (err, expected_code) in cases {
let entry = workspace_config_error_to_json_entry(&err);
assert_eq!(entry.unit, None);
assert_eq!(entry.code, expected_code);
assert_eq!(entry.path.as_deref(), Some("/tmp/spec.toml"));
assert_eq!(
entry.message.as_deref(),
Some(err.detail_message().as_str())
);
}
}
#[test]
fn output_module_prefix_absolute_crate_root_strips_src() {
let crate_root = Path::new("/home/user/myproject");
assert_eq!(
output_module_prefix(
&PathBuf::from("/home/user/myproject/src/generated"),
crate_root
)
.unwrap(),
"generated"
);
assert_eq!(
output_module_prefix(
&PathBuf::from("/home/user/myproject/src/generated/spec"),
crate_root
)
.unwrap(),
"generated::spec"
);
assert_eq!(
output_module_prefix(
&PathBuf::from("/home/user/myproject/src/api/gen"),
crate_root
)
.unwrap(),
"api::gen"
);
}
#[test]
fn output_module_prefix_relative_path_fallback_strips_src_component() {
let crate_root = Path::new("");
assert_eq!(
output_module_prefix(Path::new("src/generated"), crate_root).unwrap(),
"generated"
);
assert_eq!(
output_module_prefix(Path::new("src/generated/spec"), crate_root).unwrap(),
"generated::spec"
);
}
#[test]
fn output_module_prefix_no_src_prefix_preserved() {
let crate_root = Path::new("/home/user/myproject");
assert_eq!(
output_module_prefix(Path::new("generated"), crate_root).unwrap(),
"generated"
);
}
#[test]
fn build_test_evidence_preserves_found_missing_and_duplicate_statuses() {
let output = Path::new("src/generated");
let crate_root = Path::new("");
let spec = benchmark_loaded_spec(0, 3);
let resolved = ResolvedSpec::from_spec(spec.spec.clone());
let output_prefix = output_module_prefix(output, crate_root).unwrap();
let mut parsed_test_results = HashMap::new();
parsed_test_results.insert(
expected_cargo_test_name(&resolved, &output_prefix, "case_00"),
ParsedCargoTestResult {
status: "pass".to_string(),
reason: None,
},
);
parsed_test_results.insert(
expected_cargo_test_name(&resolved, &output_prefix, "case_01"),
ParsedCargoTestResult {
status: "error".to_string(),
reason: Some("multiple matching cargo results".to_string()),
},
);
let evidence = build_test_evidence(
std::slice::from_ref(&spec),
&output_prefix,
&parsed_test_results,
"2026-04-11T12:00:00Z",
None,
)
.unwrap();
let test_results = &evidence["pricing/bench_0000"].test_results;
assert_eq!(test_results[0].status, "pass");
assert_eq!(test_results[0].reason, None);
assert_eq!(test_results[1].status, "error");
assert_eq!(
test_results[1].reason.as_deref(),
Some("multiple matching cargo results")
);
assert_eq!(test_results[2].status, "unknown");
assert_eq!(
test_results[2].reason.as_deref(),
Some("test not found in cargo output")
);
}
#[test]
#[ignore = "manual benchmark for Priority 4 parse/evidence ship gate"]
fn benchmark_parse_and_evidence_hash_lookup_against_btree_baseline() {
let output = Path::new("src/generated");
let crate_root = Path::new("");
let output_prefix = output_module_prefix(output, crate_root).unwrap();
let specs = benchmark_specs(600, 8);
let stdout = benchmark_stdout(&specs, output, crate_root);
let observed_at = "2026-04-11T12:00:00Z";
let baseline_evidence = build_test_evidence_btree_baseline(
&specs,
&output_prefix,
&parse_cargo_test_output_btree_baseline(&stdout),
observed_at,
None,
)
.unwrap();
let hash_evidence = build_test_evidence(
&specs,
&output_prefix,
&parse_cargo_test_output(&stdout),
observed_at,
None,
)
.unwrap();
assert_eq!(hash_evidence, baseline_evidence);
const ITERS: usize = 75;
for _ in 0..5 {
let _ = std::hint::black_box(parse_cargo_test_output_btree_baseline(&stdout));
let _ = std::hint::black_box(parse_cargo_test_output(&stdout));
}
let btree_started = Instant::now();
for _ in 0..ITERS {
let parsed = parse_cargo_test_output_btree_baseline(std::hint::black_box(&stdout));
let evidence = build_test_evidence_btree_baseline(
&specs,
&output_prefix,
&parsed,
observed_at,
None,
)
.unwrap();
std::hint::black_box(evidence);
}
let btree_elapsed = btree_started.elapsed();
let hash_started = Instant::now();
for _ in 0..ITERS {
let parsed = parse_cargo_test_output(std::hint::black_box(&stdout));
let evidence =
build_test_evidence(&specs, &output_prefix, &parsed, observed_at, None).unwrap();
std::hint::black_box(evidence);
}
let hash_elapsed = hash_started.elapsed();
let speedup = btree_elapsed.as_secs_f64() / hash_elapsed.as_secs_f64();
eprintln!(
"Priority 4 benchmark: btree={btree_elapsed:?}, hash={hash_elapsed:?}, speedup={speedup:.2}x, specs={}, tests_per_spec={}",
specs.len(),
specs[0].spec.local_tests.len()
);
}
#[test]
fn concurrent_passport_write_guard_detects_other_active_writer() {
let temp_dir = TempDir::new().unwrap();
let passport_root = temp_dir.path().join("units");
fs::create_dir_all(&passport_root).unwrap();
let registry_dir = concurrent_passport_writer_registry_dir(&passport_root, temp_dir.path());
fs::create_dir_all(®istry_dir).unwrap();
fs::write(
registry_dir.join(concurrent_passport_writer_marker_name(7, SystemTime::now())),
"",
)
.unwrap();
let (_guard, other_writers) = ConcurrentPassportWriteGuard::begin_in(
&passport_root,
temp_dir.path(),
42,
SystemTime::now(),
)
.unwrap();
assert_eq!(other_writers, 1);
let warning =
concurrent_passport_write_warning_message(&passport_root, other_writers).unwrap();
assert!(warning.contains("1 other spec process"), "{warning}");
assert!(
warning.contains(passport_root.to_str().unwrap()),
"{warning}"
);
}
#[test]
fn concurrent_passport_write_guard_ignores_stale_markers() {
let temp_dir = TempDir::new().unwrap();
let passport_root = temp_dir.path().join("units");
fs::create_dir_all(&passport_root).unwrap();
let stale_now = UNIX_EPOCH + Duration::from_secs(10);
let registry_dir = concurrent_passport_writer_registry_dir(&passport_root, temp_dir.path());
fs::create_dir_all(®istry_dir).unwrap();
fs::write(
registry_dir.join(concurrent_passport_writer_marker_name(7, stale_now)),
"",
)
.unwrap();
let (_guard, other_writers) = ConcurrentPassportWriteGuard::begin_in(
&passport_root,
temp_dir.path(),
42,
UNIX_EPOCH + Duration::from_secs(10 + CONCURRENT_PASSPORT_WRITER_TTL_SECS + 1),
)
.unwrap();
assert_eq!(other_writers, 0);
}
}