use std::path::PathBuf;
use anyhow::{Result, bail};
use clap::{Parser, Subcommand};
use zpl_toolchain_spec_compiler::{SCHEMA_VERSION, pipeline, write_json_pretty};
#[derive(Parser, Debug)]
#[command(name = "zpl-spec-compiler", version)]
struct Cli {
#[command(subcommand)]
cmd: Cmd,
}
#[derive(Subcommand, Debug)]
enum Cmd {
Check {
#[arg(long, default_value = "spec")]
spec_dir: PathBuf,
},
Build {
#[arg(long, default_value = "spec")]
spec_dir: PathBuf,
#[arg(long, default_value = "generated")]
out_dir: PathBuf,
#[arg(long)]
strict: bool,
},
NoteAudit {
#[arg(long, default_value = "spec")]
spec_dir: PathBuf,
#[arg(long, default_value = "json")]
format: String,
#[arg(long, default_value_t = false)]
allow_findings: bool,
},
}
fn main() -> Result<()> {
let cli = Cli::parse();
match cli.cmd {
Cmd::Check { spec_dir } => check(spec_dir)?,
Cmd::Build {
spec_dir,
out_dir,
strict,
} => build(spec_dir, out_dir, strict)?,
Cmd::NoteAudit {
spec_dir,
format,
allow_findings,
} => note_audit(spec_dir, &format, allow_findings)?,
}
Ok(())
}
fn validate_schema_versions(versions: &std::collections::BTreeSet<String>) -> Result<()> {
if versions.is_empty() {
bail!("no schemaVersion values were discovered while loading specs");
}
if versions.len() > 1 {
let found = versions.iter().cloned().collect::<Vec<_>>().join(", ");
bail!(
"mixed schemaVersion values are not allowed (expected a single version, found: {found})"
);
}
let only = versions.iter().next().expect("checked non-empty");
if only != SCHEMA_VERSION {
bail!("unexpected schemaVersion '{only}' (expected '{SCHEMA_VERSION}')");
}
Ok(())
}
fn check(spec_dir: PathBuf) -> Result<()> {
let loaded = pipeline::load_spec_files(&spec_dir)?;
eprintln!(
"loaded {} command(s) from {:?}",
loaded.commands.len(),
spec_dir
);
validate_schema_versions(&loaded.schema_versions)?;
let validation_errors = pipeline::validate_cross_field(&loaded.commands, &spec_dir);
let mut issue_count = 0usize;
for ve in &validation_errors {
for err in &ve.errors {
eprintln!("warn [{}]: {}", ve.code, err);
issue_count += 1;
}
}
let ok = validation_errors.is_empty();
println!(
"{}",
serde_json::json!({
"ok": ok,
"commands_loaded": loaded.commands.len(),
"schema_versions": loaded.schema_versions.iter().cloned().collect::<Vec<_>>(),
"validation_issues": issue_count,
"commands_with_issues": validation_errors.len(),
})
);
if !ok {
std::process::exit(1);
}
Ok(())
}
fn build(spec_dir: PathBuf, out_dir: PathBuf, strict: bool) -> Result<()> {
let loaded = pipeline::load_spec_files(&spec_dir)?;
validate_schema_versions(&loaded.schema_versions)?;
let validation_errors = pipeline::validate_cross_field(&loaded.commands, &spec_dir);
for ve in &validation_errors {
for err in &ve.errors {
eprintln!("warn [{}]: {}", ve.code, err);
}
}
if strict && !validation_errors.is_empty() {
anyhow::bail!(
"strict mode: {} command(s) with validation issues",
validation_errors.len()
);
}
let workspace_root = PathBuf::from(env!("CARGO_MANIFEST_DIR"))
.parent()
.and_then(|p| p.parent())
.expect("spec-compiler manifest should be nested under workspace/crates/spec-compiler")
.to_path_buf();
let master_codes_path = workspace_root.join("docs/public/schema/zpl-commands.jsonc");
let master_codes = pipeline::load_master_codes(
master_codes_path
.to_str()
.expect("master codes path must be valid UTF-8"),
);
let tables = pipeline::generate_tables(&loaded.commands, &loaded.schema_versions)?;
let docs_bundle =
pipeline::generate_docs_bundle(&loaded.commands, &loaded.schema_versions, &master_codes)?;
write_json_pretty(out_dir.join("docs_bundle.json"), &docs_bundle)?;
let constraints_bundle =
pipeline::generate_constraints_bundle(&loaded.commands, &loaded.schema_versions)?;
write_json_pretty(out_dir.join("constraints_bundle.json"), &constraints_bundle)?;
let coverage = pipeline::generate_coverage(
&loaded.commands,
&loaded.schema_versions,
&master_codes,
&validation_errors,
);
write_json_pretty(out_dir.join("coverage.json"), &coverage)?;
let state_keys = pipeline::generate_state_keys(&loaded.commands, &loaded.schema_versions);
write_json_pretty(out_dir.join("state_keys.json"), &state_keys)?;
write_json_pretty(out_dir.join("parser_tables.json"), &tables)?;
println!("{}", serde_json::json!({"ok": true}));
Ok(())
}
fn note_audit(spec_dir: PathBuf, format: &str, allow_findings: bool) -> Result<()> {
let loaded = pipeline::load_spec_files(&spec_dir)?;
let findings = pipeline::audit_notes(&loaded.commands);
let payload = serde_json::json!({
"ok": findings.is_empty(),
"commands_loaded": loaded.commands.len(),
"findings": findings,
});
if format == "human" {
for finding in &findings {
eprintln!(
"warn [{}] {} {}: {}",
finding.level, finding.code, finding.location, finding.message
);
}
} else {
println!("{payload}");
}
if !allow_findings && !findings.is_empty() {
std::process::exit(1);
}
Ok(())
}