use clap::{Parser, Subcommand};
use horkos::errors::{Diagnostic, ErrorCode};
use horkos::{
compile_and_extract, extract_exports, CompileOptions, GlobalSymbolTable, OutputFormat, Project,
};
use owo_colors::OwoColorize;
use std::path::PathBuf;
use std::process::ExitCode;
#[derive(Parser)]
#[command(name = "horkos")]
#[command(
author,
version,
about = "Infrastructure language where insecure code won't compile"
)]
#[command(
long_about = "Horkos is a statically typed infrastructure language that compiles to Terraform HCL.\n\n\
It enforces security invariants at compile time, making insecure configurations\n\
impossible to express without explicit acknowledgment."
)]
struct Cli {
#[command(subcommand)]
command: Commands,
}
#[derive(Subcommand)]
enum Commands {
Compile {
#[arg()]
input: Option<PathBuf>,
#[arg(short, long, default_value = "terraform")]
output: PathBuf,
#[arg(short, long, default_value = "production")]
target: String,
#[arg(long, default_value = "hcl")]
format: OutputFormat,
#[arg(long)]
dry_run: bool,
#[arg(long, hide = true)]
debug_ast: bool,
#[arg(long, hide = true)]
debug_tokens: bool,
#[arg(short, long)]
quiet: bool,
#[arg(long)]
strict: bool,
#[arg(long)]
no_hcl: bool,
},
Check {
#[arg()]
input: Option<PathBuf>,
},
Init {
#[arg(default_value = ".")]
path: PathBuf,
},
Audit {
#[arg(default_value = ".")]
path: PathBuf,
#[arg(long, default_value = "text")]
format: String,
},
}
fn main() -> ExitCode {
let cli = Cli::parse();
let result = match cli.command {
Commands::Compile {
input,
output,
target,
format,
dry_run,
debug_ast,
debug_tokens,
quiet,
strict,
no_hcl,
} => {
let options = CompileOptions {
output_dir: output.clone(),
target,
format,
dry_run,
debug_ast,
debug_tokens,
no_hcl,
};
let is_single_file = input
.as_ref()
.is_some_and(|p| p.is_file() && p.extension().is_some_and(|ext| ext == "hk"));
if is_single_file {
run_compile_single(input.unwrap(), options, quiet, strict)
} else {
run_compile_project(input, output, options, quiet, strict)
}
}
Commands::Check { input } => run_check(input),
Commands::Init { path } => run_init(path),
Commands::Audit { path, format } => run_audit(path, &format),
};
match result {
Ok(()) => ExitCode::SUCCESS,
Err(e) => {
eprintln!("{}: {}", "error".red().bold(), e);
ExitCode::FAILURE
}
}
}
fn run_compile_project(
input: Option<PathBuf>,
output: PathBuf,
options: CompileOptions,
quiet: bool,
strict: bool,
) -> Result<(), String> {
let (root, entry) = match &input {
Some(path) if path.is_file() => {
let root = path.parent().unwrap_or(std::path::Path::new("."));
(root.to_path_buf(), Some(path.clone()))
}
Some(path) if path.is_dir() => {
(path.clone(), None)
}
Some(path) => {
return Err(format!("not found: {}", path.display()));
}
None => {
(
std::env::current_dir().unwrap_or_else(|_| PathBuf::from(".")),
None,
)
}
};
println!(
"{} project in {}",
"Discovering".blue().bold(),
root.display().cyan()
);
let project = Project::discover(&root, entry.as_ref()).map_err(|e| e.to_string())?;
println!(" {} {} file(s)", "Found".green(), project.files.len());
for file in &project.compile_order {
let relative = file.strip_prefix(&root).unwrap_or(file);
println!(" {}", relative.display());
}
if !options.dry_run {
std::fs::create_dir_all(&output)
.map_err(|e| format!("failed to create output directory: {}", e))?;
}
let mut globals = GlobalSymbolTable::new();
for file in project.files_to_compile() {
let source = std::fs::read_to_string(file).unwrap_or_default();
for line in source.lines() {
let line = line.trim();
if line.starts_with("import ") {
if let Some(start) = line.find('"') {
if let Some(end) = line[start + 1..].find('"') {
let import_path = &line[start + 1..start + 1 + end];
if import_path.ends_with(".hk") {
let from_dir = file.parent().unwrap_or(&root);
let resolved = if import_path.starts_with("./")
|| import_path.starts_with("../")
{
from_dir.join(import_path)
} else {
root.join("src").join(import_path)
};
if let Ok(canonical) = resolved.canonicalize() {
globals.register_import_path(import_path, &canonical);
}
}
}
}
}
}
}
let mut file_outputs: std::collections::HashMap<PathBuf, String> =
std::collections::HashMap::new();
let mut error_count = 0;
let mut total_unsafe_blocks = 0;
for file in project.files_to_compile() {
let relative = file.strip_prefix(&root).unwrap_or(file);
println!(
"{} {}",
"Compiling".green().bold(),
relative.display().cyan()
);
let source = std::fs::read_to_string(file)
.map_err(|e| format!("failed to read {}: {}", file.display(), e))?;
let filename = file.to_string_lossy();
match compile_and_extract(&source, &filename, &options, Some(&globals)) {
Ok((hcl, typed_ast, overrides)) => {
total_unsafe_blocks += typed_ast.count_unsafe_blocks();
if !overrides.is_empty() {
if strict {
for override_info in &overrides {
let message = format!(
"{} disabled (recommended: {})",
override_info.param_name, override_info.recommended
);
let diagnostic = Diagnostic::error_at(
message,
override_info.span,
filename.to_string(),
)
.with_code(ErrorCode::PreferredOverride)
.with_primary_label(format!(
"`{}` is a preferred setting with default `{}`",
override_info.param_name, override_info.recommended
))
.with_help("remove --strict flag or use the recommended default");
diagnostic.print(&source);
}
error_count += overrides.len();
} else if !quiet {
for override_info in &overrides {
println!(
" {}: {} disabled for {} (recommended: {})",
"info".cyan(),
override_info.param_name,
override_info.resource_name,
override_info.recommended
);
}
}
}
let exports = extract_exports(&typed_ast, file);
globals.register(exports);
let relative_display = relative.display();
let output_hcl = format!(
"# =============================================================================\n\
# Source: {relative_display}\n\
# =============================================================================\n\
#\n\
# This file was generated by Horkos from the source file above.\n\
# Do not edit manually - changes will be overwritten.\n\
#\n\
# =============================================================================\n\n\
{hcl}"
);
file_outputs.insert(file.to_path_buf(), output_hcl);
}
Err(diagnostics) => {
for diagnostic in &diagnostics {
diagnostic.print(&source);
}
error_count += diagnostics.len();
}
}
}
if error_count > 0 {
return Err(format!("{} error(s) found", error_count));
}
if options.dry_run {
for (file, hcl) in &file_outputs {
let relative = file.strip_prefix(&root).unwrap_or(file);
println!("\n--- {} ---\n{}", relative.display(), hcl);
}
} else {
let mut written_files = Vec::new();
let provider_tf = output.join("main.tf");
let provider_content = generate_provider_config(&options.target);
std::fs::write(&provider_tf, &provider_content)
.map_err(|e| format!("failed to write {}: {}", provider_tf.display(), e))?;
written_files.push(provider_tf);
for file in project.files_to_compile() {
if let Some(hcl) = file_outputs.get(file) {
let relative = file.strip_prefix(&root).unwrap_or(file);
let relative = relative.strip_prefix("src/").unwrap_or(relative);
let mut out_path = output.join(relative).with_extension("tf");
if out_path.file_name().is_some_and(|n| n == "main.tf") {
out_path = out_path.with_file_name("resources.tf");
}
if let Some(parent) = out_path.parent() {
std::fs::create_dir_all(parent)
.map_err(|e| format!("failed to create {}: {}", parent.display(), e))?;
}
std::fs::write(&out_path, hcl)
.map_err(|e| format!("failed to write {}: {}", out_path.display(), e))?;
written_files.push(out_path);
}
}
println!(
"\n{} {} file(s):",
"Wrote".green().bold(),
written_files.len()
);
for f in &written_files {
let relative = f.strip_prefix(&output).unwrap_or(f);
println!(" {}", relative.display());
}
if total_unsafe_blocks > 0 {
println!(
"\n{}: {} unsafe block(s) acknowledged",
"Note".yellow().bold(),
total_unsafe_blocks
);
}
}
Ok(())
}
pub const TERRAFORM_MIN_VERSION: &str = "1.5.0";
pub const AWS_PROVIDER_MIN_VERSION: &str = "5.0";
pub const AWS_PROVIDER_MAX_VERSION: &str = "6.0";
fn generate_provider_config(target: &str) -> String {
format!(
r#"# =============================================================================
# Horkos Generated Terraform Configuration
# =============================================================================
#
# This file was generated by Horkos. Do not edit manually.
# Re-run `horkos compile` to regenerate.
#
# Target Environment: {target}
# Generated: {timestamp}
# Horkos Version: {version}
#
# Supported Versions:
# Terraform: >= {tf_version}
# AWS Provider: >= {aws_min}, < {aws_max}
#
# =============================================================================
terraform {{
required_version = ">= {tf_version}"
required_providers {{
aws = {{
source = "hashicorp/aws"
version = ">= {aws_min}, < {aws_max}"
}}
}}
}}
provider "aws" {{
# Configure via environment variables or AWS CLI profile:
# AWS_ACCESS_KEY_ID, AWS_SECRET_ACCESS_KEY, AWS_REGION
default_tags {{
tags = {{
ManagedBy = "horkos"
Environment = "{target}"
}}
}}
}}
"#,
target = target,
timestamp = chrono_lite_timestamp(),
version = env!("CARGO_PKG_VERSION"),
tf_version = TERRAFORM_MIN_VERSION,
aws_min = AWS_PROVIDER_MIN_VERSION,
aws_max = AWS_PROVIDER_MAX_VERSION,
)
}
fn chrono_lite_timestamp() -> String {
use std::time::SystemTime;
match SystemTime::now().duration_since(SystemTime::UNIX_EPOCH) {
Ok(d) => {
let secs = d.as_secs();
format!("{}", secs)
}
Err(_) => "unknown".to_string(),
}
}
fn run_compile_single(
file: PathBuf,
options: CompileOptions,
quiet: bool,
strict: bool,
) -> Result<(), String> {
use horkos::compile_source_with_globals;
if !file.exists() {
return Err(format!("file not found: {}", file.display()));
}
println!("{} {}", "Compiling".green().bold(), file.display().cyan());
let source = std::fs::read_to_string(&file)
.map_err(|e| format!("failed to read {}: {}", file.display(), e))?;
let filename = file.to_string_lossy();
match compile_source_with_globals(&source, &filename, &options, None) {
Ok(result) => {
if !result.preferred_overrides.is_empty() {
if strict {
for override_info in &result.preferred_overrides {
let message = format!(
"{} disabled (recommended: {})",
override_info.param_name, override_info.recommended
);
let diagnostic =
Diagnostic::error_at(message, override_info.span, filename.to_string())
.with_code(ErrorCode::PreferredOverride)
.with_primary_label(format!(
"`{}` is a preferred setting with default `{}`",
override_info.param_name, override_info.recommended
))
.with_help("remove --strict flag or use the recommended default");
diagnostic.print(&source);
}
return Err(format!(
"{} preferred setting(s) overridden (use defaults or remove --strict)",
result.preferred_overrides.len()
));
} else if !quiet {
for override_info in &result.preferred_overrides {
println!(
" {}: {} disabled for {} (recommended: {})",
"info".cyan(),
override_info.param_name,
override_info.resource_name,
override_info.recommended
);
}
}
}
let unsafe_count = result.unsafe_count;
if options.dry_run {
println!("{}", result.output);
} else {
std::fs::create_dir_all(&options.output_dir)
.map_err(|e| format!("failed to create output directory: {}", e))?;
let out_file = options.output_dir.join(
file.with_extension("tf")
.file_name()
.ok_or_else(|| format!("invalid file path: {}", file.display()))?,
);
std::fs::write(&out_file, &result.output)
.map_err(|e| format!("failed to write {}: {}", out_file.display(), e))?;
println!(" {} {}", "Wrote".green(), out_file.display());
if unsafe_count > 0 {
println!(
"\n{}: {} unsafe block(s) acknowledged",
"Note".yellow().bold(),
unsafe_count
);
}
}
Ok(())
}
Err(diagnostics) => {
for diagnostic in &diagnostics {
diagnostic.print(&source);
}
Err(format!("{} error(s) found", diagnostics.len()))
}
}
}
fn run_check(input: Option<PathBuf>) -> Result<(), String> {
let (root, entry) = match &input {
Some(path) if path.is_file() => {
let root = path.parent().unwrap_or(std::path::Path::new("."));
(root.to_path_buf(), Some(path.clone()))
}
Some(path) if path.is_dir() => (path.clone(), None),
Some(path) => {
return Err(format!("not found: {}", path.display()));
}
None => (
std::env::current_dir().unwrap_or_else(|_| PathBuf::from(".")),
None,
),
};
let project = Project::discover(&root, entry.as_ref()).map_err(|e| e.to_string())?;
let mut error_count = 0;
for file in project.files_to_compile() {
let relative = file.strip_prefix(&root).unwrap_or(file);
println!("{} {}", "Checking".blue().bold(), relative.display().cyan());
let source = std::fs::read_to_string(file)
.map_err(|e| format!("failed to read {}: {}", file.display(), e))?;
match horkos::check_file(file) {
Ok(()) => {
println!(" {} No errors", "✓".green());
}
Err(diagnostics) => {
for diagnostic in &diagnostics {
diagnostic.print(&source);
}
error_count += diagnostics.len();
}
}
}
if error_count > 0 {
Err(format!("{} error(s) found", error_count))
} else {
println!("\n{} All files passed", "✓".green().bold());
Ok(())
}
}
fn run_init(path: PathBuf) -> Result<(), String> {
use std::fs;
fs::create_dir_all(&path).map_err(|e| format!("failed to create directory: {}", e))?;
let src = path.join("src");
fs::create_dir_all(&src).map_err(|e| format!("failed to create src directory: {}", e))?;
let main_hk = src.join("main.hk");
if !main_hk.exists() {
fs::write(
&main_hk,
r#"// Horkos Infrastructure
// Documentation: https://horkos.cloud
// Example: Create a bucket with secure defaults
val appData = S3.createBucket("my-app-data")
// Example: Create a VPC with subnets
val vpc = Network.createVpc("main", cidr: "10.0.0.0/16")
// Example: Import legacy infrastructure
// import "legacy.tf" as legacy
//
// unsafe("TICKET-123: Migration in progress") {
// val policy = S3.attachPolicy(bucket: legacy.oldBucket)
// }
"#,
)
.map_err(|e| format!("failed to write main.hk: {}", e))?;
}
let gitignore = path.join(".gitignore");
if !gitignore.exists() {
fs::write(
&gitignore,
r#"# Generated Terraform files (optional - you may want to commit these)
# terraform/
# Terraform state (never commit)
*.tfstate
*.tfstate.*
.terraform/
# Horkos build artifacts
.horkos-cache/
"#,
)
.map_err(|e| format!("failed to write .gitignore: {}", e))?;
}
println!(
"{} Horkos project in {}",
"Initialized".green().bold(),
path.display()
);
println!();
println!(" {}", "Created:".blue());
println!(" src/main.hk");
println!(" .gitignore");
println!();
println!(" {}", "Next steps:".blue());
println!(
" 1. Edit {} to define your infrastructure",
"src/main.hk".cyan()
);
println!(
" 2. Run {} to generate Terraform",
"horkos compile".cyan()
);
println!(
" 3. Run {} to deploy",
"cd terraform && terraform apply".cyan()
);
Ok(())
}
fn run_audit(path: PathBuf, format: &str) -> Result<(), String> {
let unsafe_blocks =
horkos::find_unsafe_blocks(&path).map_err(|e| format!("audit failed: {}", e))?;
if unsafe_blocks.is_empty() {
println!("{} No unsafe blocks found", "✓".green());
return Ok(());
}
match format {
"json" => {
println!("{}", serde_json::to_string_pretty(&unsafe_blocks).unwrap());
}
_ => {
println!(
"{} Found {} unsafe block(s):\n",
"!".yellow().bold(),
unsafe_blocks.len()
);
for block in &unsafe_blocks {
println!(
" {}:{}:{}\n Reason: {}\n",
block.file.cyan(),
block.line,
block.column,
block.reason.yellow()
);
}
}
}
Ok(())
}